From d0d5690f13b9c794044bfe6bd7ac87557dd3dcea Mon Sep 17 00:00:00 2001 From: "michal.banka" Date: Wed, 10 Jul 2019 16:14:27 +0200 Subject: [PATCH] Fix checkstyle violations in sdc/jtosca Number of checkstyle violations has decreased from about 8200 to 450. Change-Id: I31f763d7f51fa66958aab68d094280189c612417 Issue-ID: SDC-2434 Signed-off-by: michal.banka --- .../sdc/toscaparser/api/CapabilityAssignment.java | 182 +-- .../sdc/toscaparser/api/CapabilityAssignments.java | 13 +- .../org/onap/sdc/toscaparser/api/DataEntity.java | 368 +++-- .../onap/sdc/toscaparser/api/EntityTemplate.java | 862 ++++++----- .../java/org/onap/sdc/toscaparser/api/Group.java | 177 +-- .../onap/sdc/toscaparser/api/ImportsLoader.java | 709 +++++---- .../org/onap/sdc/toscaparser/api/NodeTemplate.java | 943 ++++++------ .../java/org/onap/sdc/toscaparser/api/Policy.java | 250 ++-- .../org/onap/sdc/toscaparser/api/Property.java | 369 +++-- .../sdc/toscaparser/api/RelationshipTemplate.java | 228 +-- .../org/onap/sdc/toscaparser/api/Repository.java | 124 +- .../sdc/toscaparser/api/RequirementAssignment.java | 12 +- .../toscaparser/api/RequirementAssignments.java | 6 +- .../sdc/toscaparser/api/SubstitutionMappings.java | 440 +++--- .../onap/sdc/toscaparser/api/TopologyTemplate.java | 900 ++++++------ .../org/onap/sdc/toscaparser/api/ToscaGraph.java | 114 +- .../onap/sdc/toscaparser/api/ToscaTemplate.java | 1552 ++++++++++---------- .../org/onap/sdc/toscaparser/api/Triggers.java | 232 ++- .../onap/sdc/toscaparser/api/UnsupportedType.java | 29 +- .../toscaparser/api/common/JToscaException.java | 40 +- .../api/common/JToscaValidationIssue.java | 52 +- .../sdc/toscaparser/api/common/TOSCAException.java | 67 +- .../api/common/ValidationIssueCollector.java | 16 +- .../toscaparser/api/elements/ArtifactTypeDef.java | 102 +- .../sdc/toscaparser/api/elements/AttributeDef.java | 24 +- .../api/elements/CapabilityTypeDef.java | 228 ++- .../sdc/toscaparser/api/elements/DataType.java | 88 +- .../sdc/toscaparser/api/elements/EntityType.java | 354 +++-- .../sdc/toscaparser/api/elements/GroupType.java | 263 ++-- .../toscaparser/api/elements/InterfacesDef.java | 322 ++-- .../sdc/toscaparser/api/elements/Metadata.java | 59 +- .../sdc/toscaparser/api/elements/NodeType.java | 513 ++++--- .../sdc/toscaparser/api/elements/PolicyType.java | 286 ++-- .../sdc/toscaparser/api/elements/PortSpec.java | 55 +- .../sdc/toscaparser/api/elements/PropertyDef.java | 206 ++- .../toscaparser/api/elements/RelationshipType.java | 92 +- .../sdc/toscaparser/api/elements/ScalarUnit.java | 281 ++-- .../api/elements/ScalarUnitFrequency.java | 25 +- .../toscaparser/api/elements/ScalarUnitSize.java | 36 +- .../toscaparser/api/elements/ScalarUnitTime.java | 26 +- .../api/elements/StatefulEntityType.java | 216 ++- .../toscaparser/api/elements/TypeValidation.java | 126 +- .../api/elements/constraints/Constraint.java | 336 +++-- .../api/elements/constraints/Equal.java | 56 +- .../api/elements/constraints/GreaterOrEqual.java | 126 +- .../api/elements/constraints/GreaterThan.java | 109 +- .../api/elements/constraints/InRange.java | 139 +- .../api/elements/constraints/Length.java | 71 +- .../api/elements/constraints/LessOrEqual.java | 117 +- .../api/elements/constraints/LessThan.java | 107 +- .../api/elements/constraints/MaxLength.java | 93 +- .../api/elements/constraints/MinLength.java | 108 +- .../api/elements/constraints/Pattern.java | 105 +- .../api/elements/constraints/Schema.java | 249 ++-- .../api/elements/constraints/ValidValues.java | 65 +- .../toscaparser/api/elements/enums/FileSize.java | 32 + .../api/elements/enums/ToscaElementNames.java | 34 +- .../sdc/toscaparser/api/extensions/ExtTools.java | 188 ++- .../onap/sdc/toscaparser/api/functions/Concat.java | 40 +- .../sdc/toscaparser/api/functions/Function.java | 318 ++-- .../toscaparser/api/functions/GetAttribute.java | 534 +++---- .../sdc/toscaparser/api/functions/GetInput.java | 222 ++- .../api/functions/GetOperationOutput.java | 246 ++-- .../sdc/toscaparser/api/functions/GetProperty.java | 663 +++++---- .../onap/sdc/toscaparser/api/functions/Token.java | 59 +- .../sdc/toscaparser/api/parameters/Annotation.java | 140 +- .../onap/sdc/toscaparser/api/parameters/Input.java | 315 ++-- .../sdc/toscaparser/api/parameters/Output.java | 135 +- .../org/onap/sdc/toscaparser/api/prereq/CSAR.java | 765 +++++----- .../onap/sdc/toscaparser/api/utils/CopyUtils.java | 41 +- .../onap/sdc/toscaparser/api/utils/DumpUtils.java | 91 +- .../toscaparser/api/utils/JToscaErrorCodes.java | 12 +- .../api/utils/TOSCAVersionProperty.java | 157 +- .../toscaparser/api/utils/ThreadLocalsHolder.java | 17 +- .../onap/sdc/toscaparser/api/utils/UrlUtils.java | 88 +- .../sdc/toscaparser/api/utils/ValidateUtils.java | 502 ++++--- .../sdc/toscaparser/api/GetValidationIssues.java | 13 +- .../onap/sdc/toscaparser/api/JToscaImportTest.java | 478 +++--- .../sdc/toscaparser/api/JToscaMetadataParse.java | 26 +- .../api/elements/CalculatePropertyByPathTest.java | 4 +- .../toscaparser/api/elements/EntityTypeTest.java | 72 +- .../toscaparser/api/functions/GetInputTest.java | 44 +- 82 files changed, 8921 insertions(+), 8983 deletions(-) create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java diff --git a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java index 126c858..bb7b47d 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,97 +28,103 @@ import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; import org.onap.sdc.toscaparser.api.elements.PropertyDef; public class CapabilityAssignment { - - private String name; - private LinkedHashMap _properties; - private CapabilityTypeDef _definition; - private LinkedHashMap _customDef; - - public CapabilityAssignment(String cname, - LinkedHashMap cproperties, - CapabilityTypeDef cdefinition, LinkedHashMap customDef) { - name = cname; - _properties = cproperties; - _definition = cdefinition; - _customDef = customDef; - } - - /** - * Get the properties list for capability - * @return list of property objects for capability - */ - public ArrayList getPropertiesObjects() { - // Return a list of property objects - ArrayList properties = new ArrayList(); - LinkedHashMap props = _properties; - if(props != null) { - for(Map.Entry me: props.entrySet()) { - String pname = me.getKey(); - Object pvalue = me.getValue(); - - LinkedHashMap propsDef = _definition.getPropertiesDef(); - if(propsDef != null) { - PropertyDef pd = (PropertyDef)propsDef.get(pname); - if(pd != null) { - properties.add(new Property(pname,pvalue,pd.getSchema(), _customDef)); - } - } - } - } - return properties; - } - - /** - * Get the map of properties - * @return map of all properties contains dictionary of property name and property object - */ - public LinkedHashMap getProperties() { + + private String name; + private LinkedHashMap _properties; + private CapabilityTypeDef _definition; + private LinkedHashMap _customDef; + + public CapabilityAssignment(String cname, + LinkedHashMap cproperties, + CapabilityTypeDef cdefinition, LinkedHashMap customDef) { + name = cname; + _properties = cproperties; + _definition = cdefinition; + _customDef = customDef; + } + + /** + * Get the properties list for capability + * + * @return list of property objects for capability + */ + public ArrayList getPropertiesObjects() { + // Return a list of property objects + ArrayList properties = new ArrayList(); + LinkedHashMap props = _properties; + if (props != null) { + for (Map.Entry me : props.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + + LinkedHashMap propsDef = _definition.getPropertiesDef(); + if (propsDef != null) { + PropertyDef pd = (PropertyDef) propsDef.get(pname); + if (pd != null) { + properties.add(new Property(pname, pvalue, pd.getSchema(), _customDef)); + } + } + } + } + return properties; + } + + /** + * Get the map of properties + * + * @return map of all properties contains dictionary of property name and property object + */ + public LinkedHashMap getProperties() { // Return a dictionary of property name-object pairs - LinkedHashMap npps = new LinkedHashMap<>(); - for(Property p: getPropertiesObjects()) { - npps.put(p.getName(),p); - } - return npps; - } - - /** - * Get the property value by name - * @param pname - the property name for capability - * @return the property value for this name - */ - public Object getPropertyValue(String pname) { + LinkedHashMap npps = new LinkedHashMap<>(); + for (Property p : getPropertiesObjects()) { + npps.put(p.getName(), p); + } + return npps; + } + + /** + * Get the property value by name + * + * @param pname - the property name for capability + * @return the property value for this name + */ + public Object getPropertyValue(String pname) { // Return the value of a given property name - LinkedHashMap props = getProperties(); - if(props != null && props.get(pname) != null) { + LinkedHashMap props = getProperties(); + if (props != null && props.get(pname) != null) { return props.get(name).getValue(); } return null; - } - - /** - * Get the name for capability - * @return the name for capability - */ - public String getName() { - return name; - } - - /** - * Get the definition for capability - * @return CapabilityTypeDef - contain definition for capability - */ - public CapabilityTypeDef getDefinition() { - return _definition; - } - - /** - * Set the property for capability - * @param pname - the property name for capability to set - * @param pvalue - the property valiue for capability to set - */ - public void setProperty(String pname,Object pvalue) { - _properties.put(pname,pvalue); - } + } + + /** + * Get the name for capability + * + * @return the name for capability + */ + public String getName() { + return name; + } + + /** + * Get the definition for capability + * + * @return CapabilityTypeDef - contain definition for capability + */ + public CapabilityTypeDef getDefinition() { + return _definition; + } + + /** + * Set the property for capability + * + * @param pname - the property name for capability to set + * @param pvalue - the property valiue for capability to set + */ + public void setProperty(String pname, Object pvalue) { + _properties.put(pname, pvalue); + } @Override public String toString() { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java index b960e77..28ada96 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,15 +28,16 @@ import java.util.stream.Collectors; public class CapabilityAssignments { - private Map capabilityAssignments; + private Map capabilityAssignments; - public CapabilityAssignments(Map capabilityAssignments) { + public CapabilityAssignments(Map capabilityAssignments) { this.capabilityAssignments = capabilityAssignments != null ? new HashMap<>(capabilityAssignments) : new HashMap<>(); } /** * Get all capability assignments for node template.
* This object can be either the original one, holding all capability assignments for this node template,or a filtered one, holding a filtered subset.
+ * * @return list of capability assignments for the node template.
* If there are no capability assignments, empty list is returned. */ @@ -46,12 +47,13 @@ public class CapabilityAssignments { /** * Filter capability assignments by capability tosca type. + * * @param type - The tosca type of capability assignments. * @return CapabilityAssignments object, containing capability assignments of this type.
* If no such found, filtering will result in an empty collection. */ public CapabilityAssignments getCapabilitiesByType(String type) { - Map capabilityAssignmentsMap = capabilityAssignments.entrySet().stream() + Map capabilityAssignmentsMap = capabilityAssignments.entrySet().stream() .filter(cap -> cap.getValue().getDefinition().getType().equals(type)).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); return new CapabilityAssignments(capabilityAssignmentsMap); @@ -59,6 +61,7 @@ public class CapabilityAssignments { /** * Get capability assignment by capability name. + * * @param name - The name of capability assignment * @return capability assignment with this name, or null if no such capability assignment was found. */ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java b/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java index 75802a3..e95fe72 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,12 +20,13 @@ package org.onap.sdc.toscaparser.api; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; - import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.elements.*; +import org.onap.sdc.toscaparser.api.elements.DataType; +import org.onap.sdc.toscaparser.api.elements.PortSpec; +import org.onap.sdc.toscaparser.api.elements.PropertyDef; +import org.onap.sdc.toscaparser.api.elements.ScalarUnitFrequency; +import org.onap.sdc.toscaparser.api.elements.ScalarUnitSize; +import org.onap.sdc.toscaparser.api.elements.ScalarUnitTime; import org.onap.sdc.toscaparser.api.elements.constraints.Constraint; import org.onap.sdc.toscaparser.api.elements.constraints.Schema; import org.onap.sdc.toscaparser.api.functions.Function; @@ -33,132 +34,134 @@ import org.onap.sdc.toscaparser.api.utils.TOSCAVersionProperty; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.onap.sdc.toscaparser.api.utils.ValidateUtils; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; + public class DataEntity { // A complex data value entity - - private LinkedHashMap customDef; - private DataType dataType; - private LinkedHashMap schema; - private Object value; - private String propertyName; - - public DataEntity(String _dataTypeName,Object _valueDict, - LinkedHashMap _customDef,String _propName) { - + + private LinkedHashMap customDef; + private DataType dataType; + private LinkedHashMap schema; + private Object value; + private String propertyName; + + public DataEntity(String _dataTypeName, Object _valueDict, + LinkedHashMap _customDef, String _propName) { + customDef = _customDef; - dataType = new DataType(_dataTypeName,_customDef); + dataType = new DataType(_dataTypeName, _customDef); schema = dataType.getAllProperties(); value = _valueDict; propertyName = _propName; - } - - @SuppressWarnings("unchecked") - public Object validate() { - // Validate the value by the definition of the datatype + } + + @SuppressWarnings("unchecked") + public Object validate() { + // Validate the value by the definition of the datatype // A datatype can not have both 'type' and 'properties' definitions. // If the datatype has 'type' definition - if(dataType.getValueType() != null) { - value = DataEntity.validateDatatype(dataType.getValueType(),value,null,customDef,null); - Schema schemaCls = new Schema(propertyName,dataType.getDefs()); - for(Constraint constraint: schemaCls.getConstraints()) { + if (dataType.getValueType() != null) { + value = DataEntity.validateDatatype(dataType.getValueType(), value, null, customDef, null); + Schema schemaCls = new Schema(propertyName, dataType.getDefs()); + for (Constraint constraint : schemaCls.getConstraints()) { constraint.validate(value); } } // If the datatype has 'properties' definition else { - if(!(value instanceof LinkedHashMap)) { - //ERROR under investigation - String checkedVal = value != null ? value.toString() : null; + if (!(value instanceof LinkedHashMap)) { + //ERROR under investigation + String checkedVal = value != null ? value.toString() : null; - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE001", String.format( - "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"", - checkedVal, dataType.getType()))); - - if (value instanceof List && ((List) value).size() > 0) { - value = ((List) value).get(0); - } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE001", String.format( + "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"", + checkedVal, dataType.getType()))); - if (!(value instanceof LinkedHashMap)) { - return value; - } - } + if (value instanceof List && ((List) value).size() > 0) { + value = ((List) value).get(0); + } + if (!(value instanceof LinkedHashMap)) { + return value; + } + } - LinkedHashMap valueDict = (LinkedHashMap)value; + LinkedHashMap valueDict = (LinkedHashMap) value; ArrayList allowedProps = new ArrayList<>(); ArrayList requiredProps = new ArrayList<>(); - LinkedHashMap defaultProps = new LinkedHashMap<>(); - if(schema != null) { - allowedProps.addAll(schema.keySet()); - for(String name: schema.keySet()) { - PropertyDef propDef = schema.get(name); - if(propDef.isRequired()) { - requiredProps.add(name); - } - if(propDef.getDefault() != null) { - defaultProps.put(name,propDef.getDefault()); - } - } + LinkedHashMap defaultProps = new LinkedHashMap<>(); + if (schema != null) { + allowedProps.addAll(schema.keySet()); + for (String name : schema.keySet()) { + PropertyDef propDef = schema.get(name); + if (propDef.isRequired()) { + requiredProps.add(name); + } + if (propDef.getDefault() != null) { + defaultProps.put(name, propDef.getDefault()); + } + } } - + // check allowed field - for(String valueKey: valueDict.keySet()) { - //1710 devlop JSON validation - if(!("json").equals(dataType.getType()) && !allowedProps.contains(valueKey)) { + for (String valueKey : valueDict.keySet()) { + //1710 devlop JSON validation + if (!("json").equals(dataType.getType()) && !allowedProps.contains(valueKey)) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE100", String.format( "UnknownFieldError: Data value of type \"%s\" contains unknown field \"%s\"", - dataType.getType(),valueKey))); - } + dataType.getType(), valueKey))); + } } // check default field - for(String defKey: defaultProps.keySet()) { - Object defValue = defaultProps.get(defKey); - if(valueDict.get(defKey) == null) { - valueDict.put(defKey, defValue); - } - + for (String defKey : defaultProps.keySet()) { + Object defValue = defaultProps.get(defKey); + if (valueDict.get(defKey) == null) { + valueDict.put(defKey, defValue); + } + } - + // check missing field ArrayList missingProp = new ArrayList<>(); - for(String reqKey: requiredProps) { - if(!valueDict.keySet().contains(reqKey)) { + for (String reqKey : requiredProps) { + if (!valueDict.keySet().contains(reqKey)) { missingProp.add(reqKey); } } - if(missingProp.size() > 0) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003",String.format( - "MissingRequiredFieldError: Data value of type \"%s\" is missing required field(s) \"%s\"", - dataType.getType(),missingProp.toString()))); + if (missingProp.size() > 0) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", String.format( + "MissingRequiredFieldError: Data value of type \"%s\" is missing required field(s) \"%s\"", + dataType.getType(), missingProp.toString()))); } - + // check every field - for(String vname: valueDict.keySet()) { - Object vvalue = valueDict.get(vname); - LinkedHashMap schemaName = _findSchema(vname); - if(schemaName == null) { - continue; - } - Schema propSchema = new Schema(vname,schemaName); + for (String vname : valueDict.keySet()) { + Object vvalue = valueDict.get(vname); + LinkedHashMap schemaName = _findSchema(vname); + if (schemaName == null) { + continue; + } + Schema propSchema = new Schema(vname, schemaName); // check if field value meets type defined - DataEntity.validateDatatype(propSchema.getType(), - vvalue, - propSchema.getEntrySchema(), - customDef, - null); - + DataEntity.validateDatatype(propSchema.getType(), + vvalue, + propSchema.getEntrySchema(), + customDef, + null); + // check if field value meets constraints defined - if(propSchema.getConstraints() != null) { - for(Constraint constraint: propSchema.getConstraints()) { - if(vvalue instanceof ArrayList) { - for(Object val: (ArrayList)vvalue) { + if (propSchema.getConstraints() != null) { + for (Constraint constraint : propSchema.getConstraints()) { + if (vvalue instanceof ArrayList) { + for (Object val : (ArrayList) vvalue) { constraint.validate(val); } - } - else { + } else { constraint.validate(vvalue); } } @@ -166,134 +169,117 @@ public class DataEntity { } } return value; - } - - private LinkedHashMap _findSchema(String name) { - if(schema != null && schema.get(name) != null) { - return schema.get(name).getSchema(); - } - return null; - } - - public static Object validateDatatype(String type, - Object value, - LinkedHashMap entrySchema, - LinkedHashMap customDef, - String propName) { - // Validate value with given type + } + + private LinkedHashMap _findSchema(String name) { + if (schema != null && schema.get(name) != null) { + return schema.get(name).getSchema(); + } + return null; + } + + public static Object validateDatatype(String type, + Object value, + LinkedHashMap entrySchema, + LinkedHashMap customDef, + String propName) { + // Validate value with given type // If type is list or map, validate its entry by entry_schema(if defined) // If type is a user-defined complex datatype, custom_def is required. - if(Function.isFunction(value)) { - return value; - } - else if (type == null) { - //NOT ANALYZED - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE002", String.format( - "MissingType: Type is missing for value \"%s\"", - value.toString()))); - return value; - } - else if(type.equals(Schema.STRING)) { + if (Function.isFunction(value)) { + return value; + } else if (type == null) { + //NOT ANALYZED + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE002", String.format( + "MissingType: Type is missing for value \"%s\"", + value.toString()))); + return value; + } else if (type.equals(Schema.STRING)) { return ValidateUtils.validateString(value); - } - else if(type.equals(Schema.INTEGER)) { + } else if (type.equals(Schema.INTEGER)) { return ValidateUtils.validateInteger(value); - } - else if(type.equals(Schema.FLOAT)) { + } else if (type.equals(Schema.FLOAT)) { return ValidateUtils.validateFloat(value); - } - else if(type.equals(Schema.NUMBER)) { + } else if (type.equals(Schema.NUMBER)) { return ValidateUtils.validateNumeric(value); - } - else if(type.equals(Schema.BOOLEAN)) { + } else if (type.equals(Schema.BOOLEAN)) { return ValidateUtils.validateBoolean(value); - } - else if(type.equals(Schema.RANGE)) { + } else if (type.equals(Schema.RANGE)) { return ValidateUtils.validateRange(value); - } - else if(type.equals(Schema.TIMESTAMP)) { + } else if (type.equals(Schema.TIMESTAMP)) { ValidateUtils.validateTimestamp(value); return value; - } - else if(type.equals(Schema.LIST)) { + } else if (type.equals(Schema.LIST)) { ValidateUtils.validateList(value); - if(entrySchema != null) { - DataEntity.validateEntry(value,entrySchema,customDef); + if (entrySchema != null) { + DataEntity.validateEntry(value, entrySchema, customDef); } return value; - } - else if(type.equals(Schema.SCALAR_UNIT_SIZE)) { + } else if (type.equals(Schema.SCALAR_UNIT_SIZE)) { return (new ScalarUnitSize(value)).validateScalarUnit(); - } - else if(type.equals(Schema.SCALAR_UNIT_FREQUENCY)) { + } else if (type.equals(Schema.SCALAR_UNIT_FREQUENCY)) { return (new ScalarUnitFrequency(value)).validateScalarUnit(); - } - else if(type.equals(Schema.SCALAR_UNIT_TIME)) { + } else if (type.equals(Schema.SCALAR_UNIT_TIME)) { return (new ScalarUnitTime(value)).validateScalarUnit(); - } - else if(type.equals(Schema.VERSION)) { - return (new TOSCAVersionProperty(value)).getVersion(); - } - else if(type.equals(Schema.MAP)) { + } else if (type.equals(Schema.VERSION)) { + return (new TOSCAVersionProperty(value.toString())).getVersion(); + } else if (type.equals(Schema.MAP)) { ValidateUtils.validateMap(value); - if(entrySchema != null) { - DataEntity.validateEntry(value,entrySchema,customDef); + if (entrySchema != null) { + DataEntity.validateEntry(value, entrySchema, customDef); } return value; - } - else if(type.equals(Schema.PORTSPEC)) { + } else if (type.equals(Schema.PORTSPEC)) { // tODO(TBD) bug 1567063, validate source & target as PortDef type // as complex types not just as integers - PortSpec.validateAdditionalReq(value,propName,customDef); - } - else { - DataEntity data = new DataEntity(type,value,customDef,null); + PortSpec.validateAdditionalReq(value, propName, customDef); + } else { + DataEntity data = new DataEntity(type, value, customDef, null); return data.validate(); } - - return value; - } - - @SuppressWarnings("unchecked") - public static Object validateEntry(Object value, - LinkedHashMap entrySchema, - LinkedHashMap customDef) { - + + return value; + } + + @SuppressWarnings("unchecked") + public static Object validateEntry(Object value, + LinkedHashMap entrySchema, + LinkedHashMap customDef) { + // Validate entries for map and list - Schema schema = new Schema(null,entrySchema); + Schema schema = new Schema(null, entrySchema); Object valueob = value; ArrayList valueList = null; - if(valueob instanceof LinkedHashMap) { - valueList = new ArrayList(((LinkedHashMap)valueob).values()); - } - else if(valueob instanceof ArrayList) { - valueList = (ArrayList)valueob; + if (valueob instanceof LinkedHashMap) { + valueList = new ArrayList(((LinkedHashMap) valueob).values()); + } else if (valueob instanceof ArrayList) { + valueList = (ArrayList) valueob; } - if(valueList != null) { - for(Object v: valueList) { - DataEntity.validateDatatype(schema.getType(),v,schema.getEntrySchema(),customDef,null); - if(schema.getConstraints() != null) { - for(Constraint constraint: schema.getConstraints()) { - constraint.validate(v); - } - } - } + if (valueList != null) { + for (Object v : valueList) { + DataEntity.validateDatatype(schema.getType(), v, schema.getEntrySchema(), customDef, null); + if (schema.getConstraints() != null) { + for (Constraint constraint : schema.getConstraints()) { + constraint.validate(v); + } + } + } } - return value; - } - - @Override - public String toString() { - return "DataEntity{" + - "customDef=" + customDef + - ", dataType=" + dataType + - ", schema=" + schema + - ", value=" + value + - ", propertyName='" + propertyName + '\'' + - '}'; - } + return value; + } + + @Override + public String toString() { + return "DataEntity{" + + "customDef=" + customDef + + ", dataType=" + dataType + + ", schema=" + schema + + ", value=" + value + + ", propertyName='" + propertyName + '\'' + + '}'; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java index b0540be..93bfe2b 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -33,111 +33,107 @@ import java.util.Map; public abstract class EntityTemplate { // Base class for TOSCA templates - protected static final String DERIVED_FROM = "derived_from"; - protected static final String PROPERTIES = "properties"; - protected static final String REQUIREMENTS = "requirements"; - protected static final String INTERFACES = "interfaces"; - protected static final String CAPABILITIES = "capabilities"; - protected static final String TYPE = "type"; - protected static final String DESCRIPTION = "description"; - protected static final String DIRECTIVES = "directives"; - protected static final String ATTRIBUTES = "attributes"; - protected static final String ARTIFACTS = "artifacts"; - protected static final String NODE_FILTER = "node_filter"; - protected static final String COPY = "copy"; - - protected static final String SECTIONS[] = { - DERIVED_FROM, PROPERTIES, REQUIREMENTS,INTERFACES, - CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES, - ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY}; - - private static final String NODE = "node"; - private static final String CAPABILITY = "capability"; - private static final String RELATIONSHIP = "relationship"; - private static final String OCCURRENCES = "occurrences"; - - protected static final String REQUIREMENTS_SECTION[] = { - NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER}; - - //# Special key names - private static final String METADATA = "metadata"; - protected static final String SPECIAL_SECTIONS[] = {METADATA}; - - protected String name; - protected LinkedHashMap entityTpl; - protected LinkedHashMap customDef; - protected StatefulEntityType typeDefinition; - private ArrayList _properties; - private ArrayList _interfaces; - private ArrayList _requirements; - private ArrayList _capabilities; - - @Nullable - private NodeTemplate _parentNodeTemplate; - - // dummy constructor for subclasses that don't want super - public EntityTemplate() { - return; - } + protected static final String DERIVED_FROM = "derived_from"; + protected static final String PROPERTIES = "properties"; + protected static final String REQUIREMENTS = "requirements"; + protected static final String INTERFACES = "interfaces"; + protected static final String CAPABILITIES = "capabilities"; + protected static final String TYPE = "type"; + protected static final String DESCRIPTION = "description"; + protected static final String DIRECTIVES = "directives"; + protected static final String ATTRIBUTES = "attributes"; + protected static final String ARTIFACTS = "artifacts"; + protected static final String NODE_FILTER = "node_filter"; + protected static final String COPY = "copy"; + + protected static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, + CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES, + ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY}; + + private static final String NODE = "node"; + private static final String CAPABILITY = "capability"; + private static final String RELATIONSHIP = "relationship"; + private static final String OCCURRENCES = "occurrences"; + + protected static final String REQUIREMENTS_SECTION[] = { + NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER}; + + //# Special key names + private static final String METADATA = "metadata"; + protected static final String SPECIAL_SECTIONS[] = {METADATA}; + + protected String name; + protected LinkedHashMap entityTpl; + protected LinkedHashMap customDef; + protected StatefulEntityType typeDefinition; + private ArrayList _properties; + private ArrayList _interfaces; + private ArrayList _requirements; + private ArrayList _capabilities; + + @Nullable + private NodeTemplate _parentNodeTemplate; + + // dummy constructor for subclasses that don't want super + public EntityTemplate() { + return; + } public EntityTemplate(String _name, - LinkedHashMap _template, + LinkedHashMap _template, String _entityName, - LinkedHashMap _customDef) { - this(_name, _template, _entityName, _customDef, null); + LinkedHashMap _customDef) { + this(_name, _template, _entityName, _customDef, null); } @SuppressWarnings("unchecked") - public EntityTemplate(String _name, - LinkedHashMap _template, - String _entityName, - LinkedHashMap _customDef, - NodeTemplate parentNodeTemplate) { + public EntityTemplate(String _name, + LinkedHashMap _template, + String _entityName, + LinkedHashMap _customDef, + NodeTemplate parentNodeTemplate) { name = _name; entityTpl = _template; customDef = _customDef; _validateField(entityTpl); - String type = (String)entityTpl.get("type"); - UnsupportedType.validateType(type); - if(_entityName.equals("node_type")) { - if(type != null) { - typeDefinition = new NodeType(type, customDef); - } - else { - typeDefinition = null; - } + String type = (String) entityTpl.get("type"); + UnsupportedType.validateType(type); + if (_entityName.equals("node_type")) { + if (type != null) { + typeDefinition = new NodeType(type, customDef); + } else { + typeDefinition = null; + } } - if(_entityName.equals("relationship_type")) { - Object relationship = _template.get("relationship"); + if (_entityName.equals("relationship_type")) { + Object relationship = _template.get("relationship"); type = null; - if(relationship != null && relationship instanceof LinkedHashMap) { - type = (String)((LinkedHashMap)relationship).get("type"); - } - else if(relationship instanceof String) { - type = (String)entityTpl.get("relationship"); - } - else { - type = (String)entityTpl.get("type"); + if (relationship != null && relationship instanceof LinkedHashMap) { + type = (String) ((LinkedHashMap) relationship).get("type"); + } else if (relationship instanceof String) { + type = (String) entityTpl.get("relationship"); + } else { + type = (String) entityTpl.get("type"); } UnsupportedType.validateType(type); - typeDefinition = new RelationshipType(type,null, customDef); + typeDefinition = new RelationshipType(type, null, customDef); } - if(_entityName.equals("policy_type")) { - if(type == null) { + if (_entityName.equals("policy_type")) { + if (type == null) { //msg = (_('Policy definition of "%(pname)s" must have' // ' a "type" ''attribute.') % dict(pname=name)) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE140", String.format( - "ValidationError: Policy definition of \"%s\" must have a \"type\" attribute",name))); + "ValidationError: Policy definition of \"%s\" must have a \"type\" attribute", name))); } typeDefinition = new PolicyType(type, customDef); } - if(_entityName.equals("group_type")) { - if(type != null) { - typeDefinition = new GroupType(type, customDef); - } - else { + if (_entityName.equals("group_type")) { + if (type != null) { + typeDefinition = new GroupType(type, customDef); + } else { typeDefinition = null; - } + } } _properties = null; _interfaces = null; @@ -146,451 +142,439 @@ public abstract class EntityTemplate { _parentNodeTemplate = parentNodeTemplate; } - public NodeTemplate getParentNodeTemplate() { - return _parentNodeTemplate; - } + public NodeTemplate getParentNodeTemplate() { + return _parentNodeTemplate; + } public String getType() { - if(typeDefinition != null) { - String clType = typeDefinition.getClass().getSimpleName(); - if(clType.equals("NodeType")) { - return (String)((NodeType)typeDefinition).getType(); - } - else if(clType.equals("PolicyType")) { - return (String)((PolicyType)typeDefinition).getType(); - } - else if(clType.equals("GroupType")) { - return (String)((GroupType)typeDefinition).getType(); - } - else if(clType.equals("RelationshipType")) { - return (String)((RelationshipType)typeDefinition).getType(); - } - } - return null; + if (typeDefinition != null) { + String clType = typeDefinition.getClass().getSimpleName(); + if (clType.equals("NodeType")) { + return (String) ((NodeType) typeDefinition).getType(); + } else if (clType.equals("PolicyType")) { + return (String) ((PolicyType) typeDefinition).getType(); + } else if (clType.equals("GroupType")) { + return (String) ((GroupType) typeDefinition).getType(); + } else if (clType.equals("RelationshipType")) { + return (String) ((RelationshipType) typeDefinition).getType(); + } + } + return null; } public Object getParentType() { - if(typeDefinition != null) { - String clType = typeDefinition.getClass().getSimpleName(); - if(clType.equals("NodeType")) { - return ((NodeType)typeDefinition).getParentType(); - } - else if(clType.equals("PolicyType")) { - return ((PolicyType)typeDefinition).getParentType(); - } - else if(clType.equals("GroupType")) { - return ((GroupType)typeDefinition).getParentType(); - } - else if(clType.equals("RelationshipType")) { - return ((RelationshipType)typeDefinition).getParentType(); - } - } - return null; + if (typeDefinition != null) { + String clType = typeDefinition.getClass().getSimpleName(); + if (clType.equals("NodeType")) { + return ((NodeType) typeDefinition).getParentType(); + } else if (clType.equals("PolicyType")) { + return ((PolicyType) typeDefinition).getParentType(); + } else if (clType.equals("GroupType")) { + return ((GroupType) typeDefinition).getParentType(); + } else if (clType.equals("RelationshipType")) { + return ((RelationshipType) typeDefinition).getParentType(); + } + } + return null; } - - @SuppressWarnings("unchecked") - public RequirementAssignments getRequirements() { - if(_requirements == null) { - _requirements = _createRequirements(); - } - return new RequirementAssignments(_requirements); + + @SuppressWarnings("unchecked") + public RequirementAssignments getRequirements() { + if (_requirements == null) { + _requirements = _createRequirements(); + } + return new RequirementAssignments(_requirements); } private ArrayList _createRequirements() { - ArrayList reqs = new ArrayList<>(); - ArrayList> requirements = (ArrayList>) - typeDefinition.getValue(REQUIREMENTS,entityTpl,false); - if(requirements == null) { - requirements = new ArrayList<>(); - } - for (Map req: requirements) { - for(String reqName: req.keySet()) { - Object reqItem = req.get(reqName); - if(reqItem instanceof LinkedHashMap) { - Object rel = ((LinkedHashMap)reqItem).get("relationship"); + ArrayList reqs = new ArrayList<>(); + ArrayList> requirements = (ArrayList>) + typeDefinition.getValue(REQUIREMENTS, entityTpl, false); + if (requirements == null) { + requirements = new ArrayList<>(); + } + for (Map req : requirements) { + for (String reqName : req.keySet()) { + Object reqItem = req.get(reqName); + if (reqItem instanceof LinkedHashMap) { + Object rel = ((LinkedHashMap) reqItem).get("relationship"); // LinkedHashMap relationship = rel instanceof LinkedHashMap ? (LinkedHashMap) rel : null; - String nodeName = ((LinkedHashMap)reqItem).get("node").toString(); - Object capability = ((LinkedHashMap)reqItem).get("capability"); - String capabilityString = capability != null ? capability.toString() : null; - - reqs.add(new RequirementAssignment(reqName, nodeName, capabilityString, rel)); - } else if (reqItem instanceof String) { //short notation - String nodeName = String.valueOf(reqItem); - reqs.add(new RequirementAssignment(reqName, nodeName)); - } - } - } - return reqs; - } + String nodeName = ((LinkedHashMap) reqItem).get("node").toString(); + Object capability = ((LinkedHashMap) reqItem).get("capability"); + String capabilityString = capability != null ? capability.toString() : null; + + reqs.add(new RequirementAssignment(reqName, nodeName, capabilityString, rel)); + } else if (reqItem instanceof String) { //short notation + String nodeName = String.valueOf(reqItem); + reqs.add(new RequirementAssignment(reqName, nodeName)); + } + } + } + return reqs; + } public ArrayList getPropertiesObjects() { // Return properties objects for this template - if(_properties ==null) { + if (_properties == null) { _properties = _createProperties(); } - return _properties; + return _properties; } - - public LinkedHashMap getProperties() { - LinkedHashMap props = new LinkedHashMap<>(); - for(Property po: getPropertiesObjects()) { - props.put(po.getName(),po); - } - return props; + + public LinkedHashMap getProperties() { + LinkedHashMap props = new LinkedHashMap<>(); + for (Property po : getPropertiesObjects()) { + props.put(po.getName(), po); + } + return props; } - + public Object getPropertyValue(String name) { - LinkedHashMap props = getProperties(); - Property p = props.get(name); - return p != null ? p.getValue() : null; - } + LinkedHashMap props = getProperties(); + Property p = props.get(name); + return p != null ? p.getValue() : null; + } - public String getPropertyType(String name) { - Property property = getProperties().get(name); + public String getPropertyType(String name) { + Property property = getProperties().get(name); if (property != null) { return property.getType(); } return null; - } + } public ArrayList getInterfaces() { - if(_interfaces == null) { - _interfaces = _createInterfaces(); - } - return _interfaces; + if (_interfaces == null) { + _interfaces = _createInterfaces(); + } + return _interfaces; } - + public ArrayList getCapabilitiesObjects() { // Return capabilities objects for this template - if(_capabilities == null) { - _capabilities = _createCapabilities(); - } - return _capabilities; - + if (_capabilities == null) { + _capabilities = _createCapabilities(); + } + return _capabilities; + } - + public CapabilityAssignments getCapabilities() { - LinkedHashMap caps = new LinkedHashMap(); - for(CapabilityAssignment cap: getCapabilitiesObjects()) { - caps.put(cap.getName(),cap); - } - return new CapabilityAssignments(caps); + LinkedHashMap caps = new LinkedHashMap(); + for (CapabilityAssignment cap : getCapabilitiesObjects()) { + caps.put(cap.getName(), cap); + } + return new CapabilityAssignments(caps); } public boolean isDerivedFrom(String typeStr) { - // Returns true if this object is derived from 'type_str'. + // Returns true if this object is derived from 'type_str'. // False otherwise - - if(getType() == null) { - return false; - } - else if(getType().equals(typeStr)) { - return true; - } - else if(getParentType() != null) { - return ((EntityType)getParentType()).isDerivedFrom(typeStr); - } - return false; + + if (getType() == null) { + return false; + } else if (getType().equals(typeStr)) { + return true; + } else if (getParentType() != null) { + return ((EntityType) getParentType()).isDerivedFrom(typeStr); + } + return false; } - + @SuppressWarnings("unchecked") - private ArrayList _createCapabilities() { - ArrayList capability = new ArrayList(); - LinkedHashMap caps = (LinkedHashMap) - ((EntityType)typeDefinition).getValue(CAPABILITIES,entityTpl,true); - if(caps != null) { - //?!? getCapabilities defined only for NodeType... - LinkedHashMap capabilities = null; - if(typeDefinition instanceof NodeType){ - capabilities = ((NodeType)typeDefinition).getCapabilities(); - } else if (typeDefinition instanceof GroupType){ - capabilities = ((GroupType)typeDefinition).getCapabilities(); - } - for(Map.Entry me: caps.entrySet()) { - String name = me. getKey(); - LinkedHashMap props = (LinkedHashMap)me.getValue(); - if(capabilities.get(name) != null) { - CapabilityTypeDef c = capabilities.get(name); // a CapabilityTypeDef - LinkedHashMap properties = new LinkedHashMap(); - // first use the definition default value - LinkedHashMap cprops = c.getProperties(); - if(cprops != null) { - for(Map.Entry cpe: cprops.entrySet()) { - String propertyName = cpe.getKey(); - LinkedHashMap propertyDef = (LinkedHashMap)cpe.getValue(); - Object dob = propertyDef.get("default"); - if(dob != null) { - properties.put(propertyName, dob); - - } - } - } + private ArrayList _createCapabilities() { + ArrayList capability = new ArrayList(); + LinkedHashMap caps = (LinkedHashMap) + ((EntityType) typeDefinition).getValue(CAPABILITIES, entityTpl, true); + if (caps != null) { + //?!? getCapabilities defined only for NodeType... + LinkedHashMap capabilities = null; + if (typeDefinition instanceof NodeType) { + capabilities = ((NodeType) typeDefinition).getCapabilities(); + } else if (typeDefinition instanceof GroupType) { + capabilities = ((GroupType) typeDefinition).getCapabilities(); + } + for (Map.Entry me : caps.entrySet()) { + String name = me.getKey(); + LinkedHashMap props = (LinkedHashMap) me.getValue(); + if (capabilities.get(name) != null) { + CapabilityTypeDef c = capabilities.get(name); // a CapabilityTypeDef + LinkedHashMap properties = new LinkedHashMap(); + // first use the definition default value + LinkedHashMap cprops = c.getProperties(); + if (cprops != null) { + for (Map.Entry cpe : cprops.entrySet()) { + String propertyName = cpe.getKey(); + LinkedHashMap propertyDef = (LinkedHashMap) cpe.getValue(); + Object dob = propertyDef.get("default"); + if (dob != null) { + properties.put(propertyName, dob); + + } + } + } // then update (if available) with the node properties - LinkedHashMap pp = (LinkedHashMap)props.get("properties"); - if(pp != null) { - properties.putAll(pp); - } + LinkedHashMap pp = (LinkedHashMap) props.get("properties"); + if (pp != null) { + properties.putAll(pp); + } CapabilityAssignment cap = new CapabilityAssignment(name, properties, c, customDef); capability.add(cap); - } - } - } - return capability; - } - - protected void _validateProperties(LinkedHashMap template,StatefulEntityType entityType) { - @SuppressWarnings("unchecked") - LinkedHashMap properties = (LinkedHashMap)entityType.getValue(PROPERTIES,template,false); - _commonValidateProperties(entityType,properties); + } + } + } + return capability; + } + + protected void _validateProperties(LinkedHashMap template, StatefulEntityType entityType) { + @SuppressWarnings("unchecked") + LinkedHashMap properties = (LinkedHashMap) entityType.getValue(PROPERTIES, template, false); + _commonValidateProperties(entityType, properties); } protected void _validateCapabilities() { - //BUG??? getCapabilities only defined in NodeType... - LinkedHashMap typeCapabilities = ((NodeType)typeDefinition).getCapabilities(); - ArrayList allowedCaps = new ArrayList(); - if(typeCapabilities != null) { - allowedCaps.addAll(typeCapabilities.keySet()); - } - @SuppressWarnings("unchecked") - LinkedHashMap capabilities = (LinkedHashMap) - ((EntityType)typeDefinition).getValue(CAPABILITIES, entityTpl, false); - if(capabilities != null) { + //BUG??? getCapabilities only defined in NodeType... + LinkedHashMap typeCapabilities = ((NodeType) typeDefinition).getCapabilities(); + ArrayList allowedCaps = new ArrayList(); + if (typeCapabilities != null) { + allowedCaps.addAll(typeCapabilities.keySet()); + } + @SuppressWarnings("unchecked") + LinkedHashMap capabilities = (LinkedHashMap) + ((EntityType) typeDefinition).getValue(CAPABILITIES, entityTpl, false); + if (capabilities != null) { _commonValidateField(capabilities, allowedCaps, "capabilities"); _validateCapabilitiesProperties(capabilities); - } + } } - - @SuppressWarnings("unchecked") - private void _validateCapabilitiesProperties(LinkedHashMap capabilities) { - for(Map.Entry me: capabilities.entrySet()) { - String cap = me.getKey(); - LinkedHashMap props = (LinkedHashMap)me.getValue(); - CapabilityAssignment capability = getCapability(cap); - if(capability == null) { - continue; - } - CapabilityTypeDef capabilitydef = capability.getDefinition(); - _commonValidateProperties(capabilitydef,(LinkedHashMap)props.get(PROPERTIES)); - + + @SuppressWarnings("unchecked") + private void _validateCapabilitiesProperties(LinkedHashMap capabilities) { + for (Map.Entry me : capabilities.entrySet()) { + String cap = me.getKey(); + LinkedHashMap props = (LinkedHashMap) me.getValue(); + CapabilityAssignment capability = getCapability(cap); + if (capability == null) { + continue; + } + CapabilityTypeDef capabilitydef = capability.getDefinition(); + _commonValidateProperties(capabilitydef, (LinkedHashMap) props.get(PROPERTIES)); + // validating capability properties values - for(Property prop: getCapability(cap).getPropertiesObjects()) { + for (Property prop : getCapability(cap).getPropertiesObjects()) { prop.validate(); - - if(cap.equals("scalable") && prop.getName().equals("default_instances")) { - LinkedHashMap propDict = (LinkedHashMap)props.get(PROPERTIES); - int minInstances = (int)propDict.get("min_instances"); - int maxInstances = (int)propDict.get("max_instances"); - int defaultInstances = (int)propDict.get("default_instances"); - if(defaultInstances < minInstances || defaultInstances > maxInstances) { + + if (cap.equals("scalable") && prop.getName().equals("default_instances")) { + LinkedHashMap propDict = (LinkedHashMap) props.get(PROPERTIES); + int minInstances = (int) propDict.get("min_instances"); + int maxInstances = (int) propDict.get("max_instances"); + int defaultInstances = (int) propDict.get("default_instances"); + if (defaultInstances < minInstances || defaultInstances > maxInstances) { //err_msg = ('"properties" of template "%s": ' // '"default_instances" value is not between ' // '"min_instances" and "max_instances".' % // self.name) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE141", String.format( - "ValidationError: \"properties\" of template \"%s\": \"default_instances\" value is not between \"min_instances\" and \"max_instances\"", - name))); + "ValidationError: \"properties\" of template \"%s\": \"default_instances\" value is not between \"min_instances\" and \"max_instances\"", + name))); } } - } - } + } + } } - private void _commonValidateProperties(StatefulEntityType entityType,LinkedHashMap properties) { - ArrayList allowedProps = new ArrayList(); - ArrayList requiredProps = new ArrayList(); - for(PropertyDef p: entityType.getPropertiesDefObjects()) { - allowedProps.add(p.getName()); - // If property is 'required' and has no 'default' value then record - if(p.isRequired() && p.getDefault() == null) { - requiredProps.add(p.getName()); - } - } + private void _commonValidateProperties(StatefulEntityType entityType, LinkedHashMap properties) { + ArrayList allowedProps = new ArrayList(); + ArrayList requiredProps = new ArrayList(); + for (PropertyDef p : entityType.getPropertiesDefObjects()) { + allowedProps.add(p.getName()); + // If property is 'required' and has no 'default' value then record + if (p.isRequired() && p.getDefault() == null) { + requiredProps.add(p.getName()); + } + } // validate all required properties have values - if(properties != null) { + if (properties != null) { ArrayList reqPropsNoValueOrDefault = new ArrayList(); _commonValidateField(properties, allowedProps, "properties"); // make sure it's not missing any property required by a tosca type - for(String r: requiredProps) { - if(properties.get(r) == null) { - reqPropsNoValueOrDefault.add(r); - } + for (String r : requiredProps) { + if (properties.get(r) == null) { + reqPropsNoValueOrDefault.add(r); + } } // Required properties found without value or a default value - if(!reqPropsNoValueOrDefault.isEmpty()) { + if (!reqPropsNoValueOrDefault.isEmpty()) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", String.format( - "MissingRequiredFieldError: properties of template \"%s\" are missing field(s): %s", - name,reqPropsNoValueOrDefault.toString()))); + "MissingRequiredFieldError: properties of template \"%s\" are missing field(s): %s", + name, reqPropsNoValueOrDefault.toString()))); } - } - else { + } else { // Required properties in schema, but not in template - if(!requiredProps.isEmpty()) { + if (!requiredProps.isEmpty()) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE004", String.format( "MissingRequiredFieldError2: properties of template \"%s\" are missing field(s): %s", - name,requiredProps.toString()))); + name, requiredProps.toString()))); } } } - + @SuppressWarnings("unchecked") - private void _validateField(LinkedHashMap template) { - if(!(template instanceof LinkedHashMap)) { + private void _validateField(LinkedHashMap template) { + if (!(template instanceof LinkedHashMap)) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE142", String.format( - "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE))); + "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"", name, TYPE))); return;//??? } boolean bBad = false; - Object relationship = ((LinkedHashMap)template).get("relationship"); - if(relationship != null) { - if(!(relationship instanceof String)) { - bBad = (((LinkedHashMap)relationship).get(TYPE) == null); - } - else if(relationship instanceof String) { - bBad = (template.get("relationship") == null); - } - } - else { - bBad = (template.get(TYPE) == null); + Object relationship = ((LinkedHashMap) template).get("relationship"); + if (relationship != null) { + if (!(relationship instanceof String)) { + bBad = (((LinkedHashMap) relationship).get(TYPE) == null); + } else if (relationship instanceof String) { + bBad = (template.get("relationship") == null); + } + } else { + bBad = (template.get(TYPE) == null); } - if(bBad) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE143", String.format( - "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE))); + if (bBad) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE143", String.format( + "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"", name, TYPE))); } } - - protected void _commonValidateField(LinkedHashMap schema, ArrayList allowedList,String section) { - for(String sname: schema.keySet()) { - boolean bFound = false; - for(String allowed: allowedList) { - if(sname.equals(allowed)) { - bFound = true; - break; - } - } - if(!bFound) { + + protected void _commonValidateField(LinkedHashMap schema, ArrayList allowedList, String section) { + for (String sname : schema.keySet()) { + boolean bFound = false; + for (String allowed : allowedList) { + if (sname.equals(allowed)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE144", String.format( - "UnknownFieldError: Section \"%s\" of template \"%s\" contains unknown field \"%s\"",section,name,sname))); - } - } - + "UnknownFieldError: Section \"%s\" of template \"%s\" contains unknown field \"%s\"", section, name, sname))); + } + } + } - + @SuppressWarnings("unchecked") - private ArrayList _createProperties() { - ArrayList props = new ArrayList(); - LinkedHashMap properties = (LinkedHashMap) - ((EntityType)typeDefinition).getValue(PROPERTIES,entityTpl,false); - if(properties == null) { - properties = new LinkedHashMap(); - } - for(Map.Entry me: properties.entrySet()) { - String pname = me.getKey(); - Object pvalue = me.getValue(); - LinkedHashMap propsDef = ((StatefulEntityType)typeDefinition).getPropertiesDef(); - if(propsDef != null && propsDef.get(pname) != null) { - PropertyDef pd = (PropertyDef)propsDef.get(pname); - Property prop = new Property(pname,pvalue,pd.getSchema(),customDef); - props.add(prop); - } - } - ArrayList pds = ((StatefulEntityType)typeDefinition).getPropertiesDefObjects(); - for(Object pdo: pds) { - PropertyDef pd = (PropertyDef)pdo; - if(pd.getDefault() != null && properties.get(pd.getName()) == null) { - Property prop = new Property(pd.getName(),pd.getDefault(),pd.getSchema(),customDef); - props.add(prop); - } - } - return props; + private ArrayList _createProperties() { + ArrayList props = new ArrayList(); + LinkedHashMap properties = (LinkedHashMap) + ((EntityType) typeDefinition).getValue(PROPERTIES, entityTpl, false); + if (properties == null) { + properties = new LinkedHashMap(); + } + for (Map.Entry me : properties.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + LinkedHashMap propsDef = ((StatefulEntityType) typeDefinition).getPropertiesDef(); + if (propsDef != null && propsDef.get(pname) != null) { + PropertyDef pd = (PropertyDef) propsDef.get(pname); + Property prop = new Property(pname, pvalue, pd.getSchema(), customDef); + props.add(prop); + } + } + ArrayList pds = ((StatefulEntityType) typeDefinition).getPropertiesDefObjects(); + for (Object pdo : pds) { + PropertyDef pd = (PropertyDef) pdo; + if (pd.getDefault() != null && properties.get(pd.getName()) == null) { + Property prop = new Property(pd.getName(), pd.getDefault(), pd.getSchema(), customDef); + props.add(prop); + } + } + return props; } @SuppressWarnings("unchecked") - private ArrayList _createInterfaces() { - ArrayList interfaces = new ArrayList<>(); - LinkedHashMap typeInterfaces = new LinkedHashMap(); - if(typeDefinition instanceof RelationshipType) { - if(entityTpl instanceof LinkedHashMap) { - typeInterfaces = (LinkedHashMap)entityTpl.get(INTERFACES); - if(typeInterfaces == null) { - for(String relName: entityTpl.keySet()) { - Object relValue = entityTpl.get(relName); - if(!relName.equals("type")) { - Object relDef = relValue; - LinkedHashMap rel = null; - if(relDef instanceof LinkedHashMap) { - Object relob = ((LinkedHashMap)relDef).get("relationship"); - if(relob instanceof LinkedHashMap) { - rel = (LinkedHashMap)relob; - } - } - if(rel != null) { - if(rel.get(INTERFACES) != null) { - typeInterfaces = (LinkedHashMap)rel.get(INTERFACES); - break; - } - } - } - } - } - } - } - else { - typeInterfaces = (LinkedHashMap) - ((EntityType)typeDefinition).getValue(INTERFACES,entityTpl,false); - } - if(typeInterfaces != null) { - for(Map.Entry me: typeInterfaces.entrySet()) { - String interfaceType = me.getKey(); - LinkedHashMap value = (LinkedHashMap)me.getValue(); - for(Map.Entry ve: value.entrySet()) { - String op = ve.getKey(); - Object opDef = ve.getValue(); - InterfacesDef iface = new InterfacesDef((EntityType)typeDefinition, - interfaceType, - this, - op, - opDef); - interfaces.add(iface); - } - - } - } - return interfaces; + private ArrayList _createInterfaces() { + ArrayList interfaces = new ArrayList<>(); + LinkedHashMap typeInterfaces = new LinkedHashMap(); + if (typeDefinition instanceof RelationshipType) { + if (entityTpl instanceof LinkedHashMap) { + typeInterfaces = (LinkedHashMap) entityTpl.get(INTERFACES); + if (typeInterfaces == null) { + for (String relName : entityTpl.keySet()) { + Object relValue = entityTpl.get(relName); + if (!relName.equals("type")) { + Object relDef = relValue; + LinkedHashMap rel = null; + if (relDef instanceof LinkedHashMap) { + Object relob = ((LinkedHashMap) relDef).get("relationship"); + if (relob instanceof LinkedHashMap) { + rel = (LinkedHashMap) relob; + } + } + if (rel != null) { + if (rel.get(INTERFACES) != null) { + typeInterfaces = (LinkedHashMap) rel.get(INTERFACES); + break; + } + } + } + } + } + } + } else { + typeInterfaces = (LinkedHashMap) + ((EntityType) typeDefinition).getValue(INTERFACES, entityTpl, false); + } + if (typeInterfaces != null) { + for (Map.Entry me : typeInterfaces.entrySet()) { + String interfaceType = me.getKey(); + LinkedHashMap value = (LinkedHashMap) me.getValue(); + for (Map.Entry ve : value.entrySet()) { + String op = ve.getKey(); + Object opDef = ve.getValue(); + InterfacesDef iface = new InterfacesDef((EntityType) typeDefinition, + interfaceType, + this, + op, + opDef); + interfaces.add(iface); + } + + } + } + return interfaces; } - - public CapabilityAssignment getCapability(String name) { + + public CapabilityAssignment getCapability(String name) { // Provide named capability - // :param name: name of capability + // :param name: name of capability // :return: capability object if found, None otherwise - return getCapabilities().getCapabilityByName(name); + return getCapabilities().getCapabilityByName(name); } - - // getter - public String getName() { - return name; + + // getter + public String getName() { + return name; } - + public StatefulEntityType getTypeDefinition() { - return typeDefinition; + return typeDefinition; + } + + public LinkedHashMap getCustomDef() { + return customDef; + } + + @Override + public String toString() { + return "EntityTemplate{" + + "name='" + name + '\'' + + ", entityTpl=" + entityTpl + + ", customDef=" + customDef + + ", typeDefinition=" + typeDefinition + + ", _properties=" + _properties + + ", _interfaces=" + _interfaces + + ", _requirements=" + _requirements + + ", _capabilities=" + _capabilities + + '}'; } - - public LinkedHashMap getCustomDef() { - return customDef; - } - - @Override - public String toString() { - return "EntityTemplate{" + - "name='" + name + '\'' + - ", entityTpl=" + entityTpl + - ", customDef=" + customDef + - ", typeDefinition=" + typeDefinition + - ", _properties=" + _properties + - ", _interfaces=" + _interfaces + - ", _requirements=" + _requirements + - ", _capabilities=" + _capabilities + - '}'; - } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Group.java b/src/main/java/org/onap/sdc/toscaparser/api/Group.java index 299ba01..0591d9a 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Group.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Group.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -30,96 +30,97 @@ import java.util.LinkedHashMap; import java.util.Map; public class Group extends EntityTemplate { - - private static final String TYPE = "type"; - private static final String METADATA = "metadata"; - private static final String DESCRIPTION = "description"; - private static final String PROPERTIES = "properties"; - private static final String MEMBERS = "members"; - private static final String INTERFACES = "interfaces"; - private static final String SECTIONS[] = { - TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; - - private String name; - LinkedHashMap tpl; - ArrayList memberNodes; - LinkedHashMap customDef; - Metadata metaData; - - - public Group(String _name, LinkedHashMap _templates, - ArrayList _memberNodes, - LinkedHashMap _customDef){ - this(_name, _templates, _memberNodes, _customDef, null); - } - - public Group(String _name, LinkedHashMap _templates, - ArrayList _memberNodes, - LinkedHashMap _customDef, NodeTemplate parentNodeTemplate) { - super(_name, _templates, "group_type", _customDef, parentNodeTemplate); - - name = _name; - tpl = _templates; - if(tpl.get(METADATA) != null) { - Object metadataObject = tpl.get(METADATA); - ValidateUtils.validateMap(metadataObject); - metaData = new Metadata((Map)metadataObject); + + private static final String TYPE = "type"; + private static final String METADATA = "metadata"; + private static final String DESCRIPTION = "description"; + private static final String PROPERTIES = "properties"; + private static final String MEMBERS = "members"; + private static final String INTERFACES = "interfaces"; + private static final String[] SECTIONS = { + TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; + + private String name; + private LinkedHashMap tpl; + private ArrayList memberNodes; + private LinkedHashMap customDef; + private Metadata metaData; + + + public Group(String name, LinkedHashMap templates, + ArrayList memberNodes, + LinkedHashMap customDef) { + this(name, templates, memberNodes, customDef, null); + } + + public Group(String name, LinkedHashMap templates, + ArrayList memberNodes, + LinkedHashMap customDef, NodeTemplate parentNodeTemplate) { + super(name, templates, "group_type", customDef, parentNodeTemplate); + + this.name = name; + tpl = templates; + if (tpl.get(METADATA) != null) { + Object metadataObject = tpl.get(METADATA); + ValidateUtils.validateMap(metadataObject); + metaData = new Metadata((Map) metadataObject); } - memberNodes = _memberNodes; - _validateKeys(); + this.memberNodes = memberNodes; + validateKeys(); getCapabilities(); - } - - public Metadata getMetadata() { - return metaData; - } - - public ArrayList getMembers() { - return (ArrayList)entityTpl.get("members"); - } - - public String getDescription() { - return (String)entityTpl.get("description"); - - } - - public ArrayList getMemberNodes() { - return memberNodes; - } - - private void _validateKeys() { - for(String key: entityTpl.keySet()) { - boolean bFound = false; - for(String sect: SECTIONS) { - if(key.equals(sect)) { - bFound = true; - break; - } - } - if(!bFound) { + } + + public Metadata getMetadata() { + return metaData; + } + + public ArrayList getMembers() { + return (ArrayList) entityTpl.get("members"); + } + + public String getDescription() { + return (String) entityTpl.get("description"); + + } + + public ArrayList getMemberNodes() { + return memberNodes; + } + + private void validateKeys() { + for (String key : entityTpl.keySet()) { + boolean bFound = false; + for (String sect : SECTIONS) { + if (key.equals(sect)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE183", String.format( "UnknownFieldError: Groups \"%s\" contains unknown field \"%s\"", - name,key))); - } - } - } - - @Override - public String toString() { - return "Group{" + - "name='" + name + '\'' + - ", tpl=" + tpl + - ", memberNodes=" + memberNodes + - ", customDef=" + customDef + - ", metaData=" + metaData + - '}'; - } - - public int compareTo(Group other){ - if(this.equals(other)) - return 0; - return this.getName().compareTo(other.getName()) == 0 ? this.getType().compareTo(other.getType()) : this.getName().compareTo(other.getName()); - } + name, key))); + } + } + } + + @Override + public String toString() { + return "Group{" + + "name='" + name + '\'' + + ", tpl=" + tpl + + ", memberNodes=" + memberNodes + + ", customDef=" + customDef + + ", metaData=" + metaData + + '}'; + } + + public int compareTo(Group other) { + if (this.equals(other)) { + return 0; + } + return this.getName().compareTo(other.getName()) == 0 ? this.getType().compareTo(other.getType()) : this.getName().compareTo(other.getName()); + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java index 5ef639b..019adb3 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -37,202 +37,197 @@ import java.util.*; public class ImportsLoader { - private static Logger log = LoggerFactory.getLogger(ImportsLoader.class.getName()); - private static final String FILE = "file"; - private static final String REPOSITORY = "repository"; - private static final String NAMESPACE_URI = "namespace_uri"; - private static final String NAMESPACE_PREFIX = "namespace_prefix"; + private static Logger log = LoggerFactory.getLogger(ImportsLoader.class.getName()); + private static final String FILE = "file"; + private static final String REPOSITORY = "repository"; + private static final String NAMESPACE_URI = "namespace_uri"; + private static final String NAMESPACE_PREFIX = "namespace_prefix"; private String IMPORTS_SECTION[] = {FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX}; - - private ArrayList importslist; - private String path; - private ArrayList typeDefinitionList; - - private LinkedHashMap customDefs; - private LinkedHashMap allCustomDefs; - private ArrayList> nestedToscaTpls; - private LinkedHashMap repositories; - - @SuppressWarnings("unchecked") - public ImportsLoader(ArrayList_importslist, - String _path, - Object _typeDefinitionList, - LinkedHashMap tpl) { - - this.importslist = _importslist; - customDefs = new LinkedHashMap(); - allCustomDefs = new LinkedHashMap(); - nestedToscaTpls = new ArrayList>(); - if((_path == null || _path.isEmpty()) && tpl == null) { - //msg = _('Input tosca template is not provided.') - //log.warning(msg) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE184", "ValidationError: Input tosca template is not provided")); - } - - this.path = _path; - this.repositories = new LinkedHashMap(); - - if(tpl != null && tpl.get("repositories") != null) { - this.repositories = (LinkedHashMap)tpl.get("repositories"); - } - this.typeDefinitionList = new ArrayList(); - if(_typeDefinitionList != null) { - if(_typeDefinitionList instanceof ArrayList) { - this.typeDefinitionList = (ArrayList)_typeDefinitionList; - } - else { - this.typeDefinitionList.add((String)_typeDefinitionList); - } - } - _validateAndLoadImports(); - } - - public LinkedHashMap getCustomDefs() { - return allCustomDefs; - } - - public ArrayList> getNestedToscaTpls() { - return nestedToscaTpls; + + private ArrayList importslist; + private String path; + private ArrayList typeDefinitionList; + + private LinkedHashMap customDefs; + private LinkedHashMap allCustomDefs; + private ArrayList> nestedToscaTpls; + private LinkedHashMap repositories; + + @SuppressWarnings("unchecked") + public ImportsLoader(ArrayList _importslist, + String _path, + Object _typeDefinitionList, + LinkedHashMap tpl) { + + this.importslist = _importslist; + customDefs = new LinkedHashMap(); + allCustomDefs = new LinkedHashMap(); + nestedToscaTpls = new ArrayList>(); + if ((_path == null || _path.isEmpty()) && tpl == null) { + //msg = _('Input tosca template is not provided.') + //log.warning(msg) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE184", "ValidationError: Input tosca template is not provided")); + } + + this.path = _path; + this.repositories = new LinkedHashMap(); + + if (tpl != null && tpl.get("repositories") != null) { + this.repositories = (LinkedHashMap) tpl.get("repositories"); + } + this.typeDefinitionList = new ArrayList(); + if (_typeDefinitionList != null) { + if (_typeDefinitionList instanceof ArrayList) { + this.typeDefinitionList = (ArrayList) _typeDefinitionList; + } else { + this.typeDefinitionList.add((String) _typeDefinitionList); + } + } + _validateAndLoadImports(); + } + + public LinkedHashMap getCustomDefs() { + return allCustomDefs; } - - @SuppressWarnings({ "unchecked", "unused" }) - public void _validateAndLoadImports() { - Set importNames = new HashSet(); - - if(importslist == null) { + + public ArrayList> getNestedToscaTpls() { + return nestedToscaTpls; + } + + @SuppressWarnings({"unchecked", "unused"}) + public void _validateAndLoadImports() { + Set importNames = new HashSet(); + + if (importslist == null) { //msg = _('"imports" keyname is defined without including templates.') //log.error(msg) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE185", - "ValidationError: \"imports\" keyname is defined without including templates")); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE185", + "ValidationError: \"imports\" keyname is defined without including templates")); return; - } - - for(Object importDef: importslist) { - String fullFileName = null; - LinkedHashMap customType = null; - if(importDef instanceof LinkedHashMap) { - for(Map.Entry me: ((LinkedHashMap)importDef).entrySet()) { - String importName = me.getKey(); - Object importUri = me.getValue(); - if(importNames.contains(importName)) { + } + + for (Object importDef : importslist) { + String fullFileName = null; + LinkedHashMap customType = null; + if (importDef instanceof LinkedHashMap) { + for (Map.Entry me : ((LinkedHashMap) importDef).entrySet()) { + String importName = me.getKey(); + Object importUri = me.getValue(); + if (importNames.contains(importName)) { //msg = (_('Duplicate import name "%s" was found.') % import_name) //log.error(msg) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE186", String.format( - "ValidationError: Duplicate import name \"%s\" was found",importName))); - } - importNames.add(importName); //??? - - // _loadImportTemplate returns 2 objects - Object ffnct[] = _loadImportTemplate(importName, importUri); - fullFileName = (String)ffnct[0]; - customType = (LinkedHashMap)ffnct[1]; - String namespacePrefix = ""; - if(importUri instanceof LinkedHashMap) { - namespacePrefix = (String) - ((LinkedHashMap)importUri).get(NAMESPACE_PREFIX); - } - - if(customType != null) { - TypeValidation tv = new TypeValidation(customType, importDef); + "ValidationError: Duplicate import name \"%s\" was found", importName))); + } + importNames.add(importName); //??? + + // _loadImportTemplate returns 2 objects + Object ffnct[] = _loadImportTemplate(importName, importUri); + fullFileName = (String) ffnct[0]; + customType = (LinkedHashMap) ffnct[1]; + String namespacePrefix = ""; + if (importUri instanceof LinkedHashMap) { + namespacePrefix = (String) + ((LinkedHashMap) importUri).get(NAMESPACE_PREFIX); + } + + if (customType != null) { + TypeValidation tv = new TypeValidation(customType, importDef); _updateCustomDefs(customType, namespacePrefix); - } - } - } - else { // old style of imports - // _loadImportTemplate returns 2 objects - Object ffnct[] = _loadImportTemplate(null,importDef); - fullFileName = (String)ffnct[0]; - customType = (LinkedHashMap)ffnct[1]; - if(customType != null) { - TypeValidation tv = new TypeValidation(customType,importDef); - _updateCustomDefs(customType,null); + } } - } + } else { // old style of imports + // _loadImportTemplate returns 2 objects + Object ffnct[] = _loadImportTemplate(null, importDef); + fullFileName = (String) ffnct[0]; + customType = (LinkedHashMap) ffnct[1]; + if (customType != null) { + TypeValidation tv = new TypeValidation(customType, importDef); + _updateCustomDefs(customType, null); + } + } _updateNestedToscaTpls(fullFileName, customType); - - } + + } } - /** - * This method is used to get consolidated custom definitions by passing custom Types from - * each import. The resultant collection is then passed back which contains all import - * definitions - * - * @param customType the custom type - * @param namespacePrefix the namespace prefix - */ - @SuppressWarnings("unchecked") - private void _updateCustomDefs(LinkedHashMap customType, String namespacePrefix) { - LinkedHashMap outerCustomTypes; - for(String typeDef: typeDefinitionList) { - if(typeDef.equals("imports")) { - customDefs.put("imports", customType.get(typeDef)); - if (allCustomDefs.isEmpty() || allCustomDefs.get("imports") == null){ - allCustomDefs.put("imports",customType.get(typeDef)); - } - else if (customType.get(typeDef) != null){ - Set allCustomImports = new HashSet<>((ArrayList)allCustomDefs.get("imports")); - allCustomImports.addAll((ArrayList) customType.get(typeDef)); - allCustomDefs.put("imports", new ArrayList<>(allCustomImports)); - } - } - else { - outerCustomTypes = (LinkedHashMap)customType.get(typeDef); - if(outerCustomTypes != null) { - if(namespacePrefix != null && !namespacePrefix.isEmpty()) { - LinkedHashMap prefixCustomTypes = new LinkedHashMap(); - for(Map.Entry me: outerCustomTypes.entrySet()) { - String typeDefKey = me.getKey(); - String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; - prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); - } - customDefs.putAll(prefixCustomTypes); - allCustomDefs.putAll(prefixCustomTypes); - } - else { - customDefs.putAll(outerCustomTypes); - allCustomDefs.putAll(outerCustomTypes); - } - } - } - } - } - - private void _updateNestedToscaTpls(String fullFileName,LinkedHashMap customTpl) { - if(fullFileName != null && customTpl != null) { - LinkedHashMap tt = new LinkedHashMap(); - tt.put(fullFileName, customTpl); - nestedToscaTpls.add(tt); - } + /** + * This method is used to get consolidated custom definitions by passing custom Types from + * each import. The resultant collection is then passed back which contains all import + * definitions + * + * @param customType the custom type + * @param namespacePrefix the namespace prefix + */ + @SuppressWarnings("unchecked") + private void _updateCustomDefs(LinkedHashMap customType, String namespacePrefix) { + LinkedHashMap outerCustomTypes; + for (String typeDef : typeDefinitionList) { + if (typeDef.equals("imports")) { + customDefs.put("imports", customType.get(typeDef)); + if (allCustomDefs.isEmpty() || allCustomDefs.get("imports") == null) { + allCustomDefs.put("imports", customType.get(typeDef)); + } else if (customType.get(typeDef) != null) { + Set allCustomImports = new HashSet<>((ArrayList) allCustomDefs.get("imports")); + allCustomImports.addAll((ArrayList) customType.get(typeDef)); + allCustomDefs.put("imports", new ArrayList<>(allCustomImports)); + } + } else { + outerCustomTypes = (LinkedHashMap) customType.get(typeDef); + if (outerCustomTypes != null) { + if (namespacePrefix != null && !namespacePrefix.isEmpty()) { + LinkedHashMap prefixCustomTypes = new LinkedHashMap(); + for (Map.Entry me : outerCustomTypes.entrySet()) { + String typeDefKey = me.getKey(); + String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; + prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); + } + customDefs.putAll(prefixCustomTypes); + allCustomDefs.putAll(prefixCustomTypes); + } else { + customDefs.putAll(outerCustomTypes); + allCustomDefs.putAll(outerCustomTypes); + } + } + } + } + } + + private void _updateNestedToscaTpls(String fullFileName, LinkedHashMap customTpl) { + if (fullFileName != null && customTpl != null) { + LinkedHashMap tt = new LinkedHashMap(); + tt.put(fullFileName, customTpl); + nestedToscaTpls.add(tt); + } } - private void _validateImportKeys(String importName, LinkedHashMap importUri) { - if(importUri.get(FILE) == null) { + private void _validateImportKeys(String importName, LinkedHashMap importUri) { + if (importUri.get(FILE) == null) { //log.warning(_('Missing keyname "file" in import "%(name)s".') % {'name': import_name}) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE187", String.format( - "MissingRequiredFieldError: Import of template \"%s\" is missing field %s",importName,FILE))); - } - for(String key: importUri.keySet()) { - boolean bFound = false; - for(String is: IMPORTS_SECTION) { - if(is.equals(key)) { - bFound = true; - break; - } - } - if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE187", String.format( + "MissingRequiredFieldError: Import of template \"%s\" is missing field %s", importName, FILE))); + } + for (String key : importUri.keySet()) { + boolean bFound = false; + for (String is : IMPORTS_SECTION) { + if (is.equals(key)) { + bFound = true; + break; + } + } + if (!bFound) { //log.warning(_('Unknown keyname "%(key)s" error in ' // 'imported definition "%(def)s".') // % {'key': key, 'def': import_name}) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE188", String.format( - "UnknownFieldError: Import of template \"%s\" has unknown fiels %s",importName,key))); - } - } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE188", String.format( + "UnknownFieldError: Import of template \"%s\" has unknown fiels %s", importName, key))); + } + } } @SuppressWarnings("unchecked") - private Object[] _loadImportTemplate(String importName, Object importUriDef) { + private Object[] _loadImportTemplate(String importName, Object importUriDef) { /* This method loads the custom type definitions referenced in "imports" section of the TOSCA YAML template by determining whether each import @@ -251,251 +246,237 @@ public class ImportsLoader { | URL | URL | OK | +----------+--------+------------------------------+ */ - Object al[] = new Object[2]; + Object al[] = new Object[2]; boolean shortImportNotation = false; String fileName; String repository; - if(importUriDef instanceof LinkedHashMap) { - _validateImportKeys(importName, (LinkedHashMap)importUriDef); - fileName = (String)((LinkedHashMap)importUriDef).get(FILE); - repository = (String)((LinkedHashMap)importUriDef).get(REPOSITORY); - if(repository != null) { - if(!repositories.keySet().contains(repository)) { + if (importUriDef instanceof LinkedHashMap) { + _validateImportKeys(importName, (LinkedHashMap) importUriDef); + fileName = (String) ((LinkedHashMap) importUriDef).get(FILE); + repository = (String) ((LinkedHashMap) importUriDef).get(REPOSITORY); + if (repository != null) { + if (!repositories.keySet().contains(repository)) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE189", String.format( - "InvalidPropertyValueError: Repository \"%s\" not found in \"%s\"", - repository,repositories.keySet().toString()))); - } + "InvalidPropertyValueError: Repository \"%s\" not found in \"%s\"", + repository, repositories.keySet().toString()))); + } } - } - else { - fileName = (String)importUriDef; + } else { + fileName = (String) importUriDef; repository = null; shortImportNotation = true; } - if(fileName == null || fileName.isEmpty()) { - //msg = (_('A template file name is not provided with import ' - // 'definition "%(import_name)s".') - // % {'import_name': import_name}) - //log.error(msg) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE190", String.format( - "ValidationError: A template file name is not provided with import definition \"%s\"",importName))); - al[0] = al[1] = null; - return al; + if (fileName == null || fileName.isEmpty()) { + //msg = (_('A template file name is not provided with import ' + // 'definition "%(import_name)s".') + // % {'import_name': import_name}) + //log.error(msg) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE190", String.format( + "ValidationError: A template file name is not provided with import definition \"%s\"", importName))); + al[0] = al[1] = null; + return al; } - if(UrlUtils.validateUrl(fileName)) { - try (InputStream input = new URL(fileName).openStream();) { - al[0] = fileName; - Yaml yaml = new Yaml(); - al[1] = yaml.load(input); - return al; - } - catch(IOException e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE191", String.format( - "ImportError: \"%s\" loading YAML import from \"%s\"",e.getClass().getSimpleName(),fileName))); - al[0] = al[1] = null; - return al; - } - } - else if(repository == null || repository.isEmpty()) { - boolean aFile = false; + if (UrlUtils.validateUrl(fileName)) { + try (InputStream input = new URL(fileName).openStream();) { + al[0] = fileName; + Yaml yaml = new Yaml(); + al[1] = yaml.load(input); + return al; + } catch (IOException e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE191", String.format( + "ImportError: \"%s\" loading YAML import from \"%s\"", e.getClass().getSimpleName(), fileName))); + al[0] = al[1] = null; + return al; + } + } else if (repository == null || repository.isEmpty()) { + boolean aFile = false; String importTemplate = null; - if(path != null && !path.isEmpty()) { - if(UrlUtils.validateUrl(path)) { - File fp = new File(path); - if(fp.isAbsolute()) { - String msg = String.format( - "ImportError: Absolute file name \"%s\" cannot be used in the URL-based input template \"%s\"", - fileName,path); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE192", msg)); + if (path != null && !path.isEmpty()) { + if (UrlUtils.validateUrl(path)) { + File fp = new File(path); + if (fp.isAbsolute()) { + String msg = String.format( + "ImportError: Absolute file name \"%s\" cannot be used in the URL-based input template \"%s\"", + fileName, path); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE192", msg)); al[0] = al[1] = null; return al; - } - importTemplate = UrlUtils.joinUrl(path,fileName); - aFile = false; - } - else { - + } + importTemplate = UrlUtils.joinUrl(path, fileName); + aFile = false; + } else { + aFile = true; - File fp = new File(path); - if(fp.isFile()) { + File fp = new File(path); + if (fp.isFile()) { File fn = new File(fileName); - if(fn.isFile()) { + if (fn.isFile()) { importTemplate = fileName; - } - else { - String fullPath = Paths.get(path).toAbsolutePath().getParent().toString() + File.separator + fileName; - File ffp = new File(fullPath); - if(ffp.isFile()) { + } else { + String fullPath = Paths.get(path).toAbsolutePath().getParent().toString() + File.separator + fileName; + File ffp = new File(fullPath); + if (ffp.isFile()) { importTemplate = fullPath; - } - else { - String dirPath = Paths.get(path).toAbsolutePath().getParent().toString(); - String filePath; - if(Paths.get(fileName).getParent() != null) { - filePath = Paths.get(fileName).getParent().toString(); - } - else { - filePath = ""; - } - if(!filePath.isEmpty() && dirPath.endsWith(filePath)) { - String sFileName = Paths.get(fileName).getFileName().toString(); - importTemplate = dirPath + File.separator + sFileName; - File fit = new File(importTemplate); - if(!fit.isFile()) { + } else { + String dirPath = Paths.get(path).toAbsolutePath().getParent().toString(); + String filePath; + if (Paths.get(fileName).getParent() != null) { + filePath = Paths.get(fileName).getParent().toString(); + } else { + filePath = ""; + } + if (!filePath.isEmpty() && dirPath.endsWith(filePath)) { + String sFileName = Paths.get(fileName).getFileName().toString(); + importTemplate = dirPath + File.separator + sFileName; + File fit = new File(importTemplate); + if (!fit.isFile()) { //msg = (_('"%(import_template)s" is' // 'not a valid file') // % {'import_template': // import_template}) //log.error(msg) - String msg = String.format( - "ValueError: \"%s\" is not a valid file",importTemplate); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE193", msg)); + String msg = String.format( + "ValueError: \"%s\" is not a valid file", importTemplate); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE193", msg)); log.debug("ImportsLoader - _loadImportTemplate - {}", msg); - } - } + } + } } } - } - } + } + } + } else { // template is pre-parsed + File fn = new File(fileName); + if (fn.isAbsolute() && fn.isFile()) { + aFile = true; + importTemplate = fileName; + } else { + String msg = String.format( + "Relative file name \"%s\" cannot be used in a pre-parsed input template", fileName); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE194", "ImportError: " + msg)); + al[0] = al[1] = null; + return al; + } } - else { // template is pre-parsed - File fn = new File(fileName); - if(fn.isAbsolute() && fn.isFile()) { - aFile = true; - importTemplate = fileName; - } - else { - String msg = String.format( - "Relative file name \"%s\" cannot be used in a pre-parsed input template",fileName); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE194", "ImportError: " + msg)); - al[0] = al[1] = null; - return al; - } - } - - if(importTemplate == null || importTemplate.isEmpty()) { + + if (importTemplate == null || importTemplate.isEmpty()) { //log.error(_('Import "%(name)s" is not valid.') % // {'name': import_uri_def}) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE195", String.format( - "ImportError: Import \"%s\" is not valid",importUriDef))); - al[0] = al[1] = null; - return al; + "ImportError: Import \"%s\" is not valid", importUriDef))); + al[0] = al[1] = null; + return al; } - + // for now, this must be a file - if(!aFile) { - log.error("ImportsLoader - _loadImportTemplate - Error!! Expected a file. importUriDef = {}, importTemplate = {}", importUriDef, importTemplate); + if (!aFile) { + log.error("ImportsLoader - _loadImportTemplate - Error!! Expected a file. importUriDef = {}, importTemplate = {}", importUriDef, importTemplate); ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE196", String.format( - "ImportError: Import \"%s\" is not a file",importName))); - al[0] = al[1] = null; - return al; + "ImportError: Import \"%s\" is not a file", importName))); + al[0] = al[1] = null; + return al; } try (BufferedReader br = new BufferedReader(new FileReader(importTemplate));) { - al[0] = importTemplate; + al[0] = importTemplate; - Yaml yaml = new Yaml(); - al[1] = yaml.load(br); - return al; - } - catch(FileNotFoundException e) { + Yaml yaml = new Yaml(); + al[1] = yaml.load(br); + return al; + } catch (FileNotFoundException e) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE197", String.format( - "ImportError: Failed to load YAML from \"%s\"" + e,importName))); - al[0] = al[1] = null; - return al; - } - catch(Exception e) { + "ImportError: Failed to load YAML from \"%s\"" + e, importName))); + al[0] = al[1] = null; + return al; + } catch (Exception e) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE198", String.format( - "ImportError: Exception from SnakeYAML file = \"%s\"" + e,importName))); - al[0] = al[1] = null; - return al; + "ImportError: Exception from SnakeYAML file = \"%s\"" + e, importName))); + al[0] = al[1] = null; + return al; } } - - if(shortImportNotation) { + + if (shortImportNotation) { //log.error(_('Import "%(name)s" is not valid.') % import_uri_def) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE199", String.format( - "ImportError: Import \"%s\" is not valid",importName))); - al[0] = al[1] = null; - return al; + "ImportError: Import \"%s\" is not valid", importName))); + al[0] = al[1] = null; + return al; } - + String fullUrl = ""; - String repoUrl = ""; - if(repository != null && !repository.isEmpty()) { - if(repositories != null) { - for(String repoName: repositories.keySet()) { - if(repoName.equals(repository)) { - Object repoDef = repositories.get(repoName); - if(repoDef instanceof String) { - repoUrl = (String)repoDef; - } - else if(repoDef instanceof LinkedHashMap) { - repoUrl = (String)((LinkedHashMap)repoDef).get("url"); - } - // Remove leading, ending spaces and strip - // the last character if "/" - repoUrl = repoUrl.trim(); - if(repoUrl.endsWith("/")) { - repoUrl = repoUrl.substring(0,repoUrl.length()-1); - } - fullUrl = repoUrl + "/" + fileName; - break; - } - } - } - if(fullUrl.isEmpty()) { + String repoUrl = ""; + if (repository != null && !repository.isEmpty()) { + if (repositories != null) { + for (String repoName : repositories.keySet()) { + if (repoName.equals(repository)) { + Object repoDef = repositories.get(repoName); + if (repoDef instanceof String) { + repoUrl = (String) repoDef; + } else if (repoDef instanceof LinkedHashMap) { + repoUrl = (String) ((LinkedHashMap) repoDef).get("url"); + } + // Remove leading, ending spaces and strip + // the last character if "/" + repoUrl = repoUrl.trim(); + if (repoUrl.endsWith("/")) { + repoUrl = repoUrl.substring(0, repoUrl.length() - 1); + } + fullUrl = repoUrl + "/" + fileName; + break; + } + } + } + if (fullUrl.isEmpty()) { String msg = String.format( - "referenced repository \"%s\" in import definition \"%s\" not found", - repository,importName); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE200", "ImportError: " + msg)); - al[0] = al[1] = null; - return al; - } - } - if(UrlUtils.validateUrl(fullUrl)) { - try (InputStream input = new URL(fullUrl).openStream();) { - al[0] = fullUrl; - Yaml yaml = new Yaml(); - al[1] = yaml.load(input); - return al; - } - catch(IOException e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE201", String.format( - "ImportError: Exception loading YAML import from \"%s\"",fullUrl))); - al[0] = al[1] = null; - return al; - } + "referenced repository \"%s\" in import definition \"%s\" not found", + repository, importName); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE200", "ImportError: " + msg)); + al[0] = al[1] = null; + return al; + } } - else { + if (UrlUtils.validateUrl(fullUrl)) { + try (InputStream input = new URL(fullUrl).openStream();) { + al[0] = fullUrl; + Yaml yaml = new Yaml(); + al[1] = yaml.load(input); + return al; + } catch (IOException e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE201", String.format( + "ImportError: Exception loading YAML import from \"%s\"", fullUrl))); + al[0] = al[1] = null; + return al; + } + } else { String msg = String.format( - "repository URL \"%s\" in import definition \"%s\" is not valid", - repoUrl,importName); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE202", "ImportError: " + msg)); + "repository URL \"%s\" in import definition \"%s\" is not valid", + repoUrl, importName); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE202", "ImportError: " + msg)); } - + // if we got here something is wrong with the flow... log.error("ImportsLoader - _loadImportTemplate - got to dead end (importName {})", importName); ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE203", String.format( - "ImportError: _loadImportTemplate got to dead end (importName %s)\n",importName))); + "ImportError: _loadImportTemplate got to dead end (importName %s)\n", importName))); al[0] = al[1] = null; return al; } - @Override - public String toString() { - return "ImportsLoader{" + - "IMPORTS_SECTION=" + Arrays.toString(IMPORTS_SECTION) + - ", importslist=" + importslist + - ", path='" + path + '\'' + - ", typeDefinitionList=" + typeDefinitionList + - ", customDefs=" + customDefs + - ", nestedToscaTpls=" + nestedToscaTpls + - ", repositories=" + repositories + - '}'; - } + @Override + public String toString() { + return "ImportsLoader{" + + "IMPORTS_SECTION=" + Arrays.toString(IMPORTS_SECTION) + + ", importslist=" + importslist + + ", path='" + path + '\'' + + ", typeDefinitionList=" + typeDefinitionList + + ", customDefs=" + customDefs + + ", nestedToscaTpls=" + nestedToscaTpls + + ", repositories=" + repositories + + '}'; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java index 6a2e9f6..4fabe38 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,532 +20,525 @@ package org.onap.sdc.toscaparser.api; -import static org.onap.sdc.toscaparser.api.elements.EntityType.TOSCA_DEF; - -import com.google.common.collect.Lists; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.EntityType; +import org.onap.sdc.toscaparser.api.elements.InterfacesDef; +import org.onap.sdc.toscaparser.api.elements.Metadata; +import org.onap.sdc.toscaparser.api.elements.NodeType; +import org.onap.sdc.toscaparser.api.elements.RelationshipType; +import org.onap.sdc.toscaparser.api.utils.CopyUtils; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import org.onap.sdc.toscaparser.api.elements.*; -import org.onap.sdc.toscaparser.api.utils.CopyUtils; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import static org.onap.sdc.toscaparser.api.elements.EntityType.TOSCA_DEF; public class NodeTemplate extends EntityTemplate { - - private LinkedHashMap templates; - private LinkedHashMap customDef; - private ArrayList availableRelTpls; - private LinkedHashMap availableRelTypes; - private LinkedHashMap related; - private ArrayList relationshipTpl; - private LinkedHashMap _relationships; - private SubstitutionMappings subMappingToscaTemplate; - private TopologyTemplate originComponentTemplate; - private Metadata metadata; - - private static final String METADATA = "metadata"; - - public NodeTemplate(String name, - LinkedHashMap ntnodeTemplates, - LinkedHashMap ntcustomDef, - ArrayList ntavailableRelTpls, - LinkedHashMap ntavailableRelTypes) { - this( name, ntnodeTemplates, ntcustomDef, ntavailableRelTpls, - ntavailableRelTypes, null); - } - - @SuppressWarnings("unchecked") - public NodeTemplate(String name, - LinkedHashMap ntnodeTemplates, - LinkedHashMap ntcustomDef, - ArrayList ntavailableRelTpls, - LinkedHashMap ntavailableRelTypes, - NodeTemplate parentNodeTemplate) { - - super(name, (LinkedHashMap)ntnodeTemplates.get(name), - "node_type", ntcustomDef, parentNodeTemplate); - - templates = ntnodeTemplates; - _validateFields((LinkedHashMap)templates.get(name)); - customDef = ntcustomDef; - related = new LinkedHashMap(); - relationshipTpl = new ArrayList(); - availableRelTpls = ntavailableRelTpls; - availableRelTypes = ntavailableRelTypes; - _relationships = new LinkedHashMap(); - subMappingToscaTemplate = null; - metadata = _metaData(); - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getRelationships() { - if(_relationships.isEmpty()) { - List requires = getRequirements().getAll(); - if(requires != null && requires instanceof List) { - for(RequirementAssignment r: requires) { - LinkedHashMap explicit = _getExplicitRelationship(r); - if(explicit != null) { - // _relationships.putAll(explicit)... - for(Map.Entry ee: explicit.entrySet()) { - _relationships.put(ee.getKey(), ee.getValue()); - } - } - } - } - } - return _relationships; - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _getExplicitRelationship(RequirementAssignment req) { + + private LinkedHashMap templates; + private LinkedHashMap customDef; + private ArrayList availableRelTpls; + private LinkedHashMap availableRelTypes; + private LinkedHashMap related; + private ArrayList relationshipTpl; + private LinkedHashMap _relationships; + private SubstitutionMappings subMappingToscaTemplate; + private TopologyTemplate originComponentTemplate; + private Metadata metadata; + + private static final String METADATA = "metadata"; + + public NodeTemplate(String name, + LinkedHashMap ntnodeTemplates, + LinkedHashMap ntcustomDef, + ArrayList ntavailableRelTpls, + LinkedHashMap ntavailableRelTypes) { + this(name, ntnodeTemplates, ntcustomDef, ntavailableRelTpls, + ntavailableRelTypes, null); + } + + @SuppressWarnings("unchecked") + public NodeTemplate(String name, + LinkedHashMap ntnodeTemplates, + LinkedHashMap ntcustomDef, + ArrayList ntavailableRelTpls, + LinkedHashMap ntavailableRelTypes, + NodeTemplate parentNodeTemplate) { + + super(name, (LinkedHashMap) ntnodeTemplates.get(name), + "node_type", ntcustomDef, parentNodeTemplate); + + templates = ntnodeTemplates; + _validateFields((LinkedHashMap) templates.get(name)); + customDef = ntcustomDef; + related = new LinkedHashMap(); + relationshipTpl = new ArrayList(); + availableRelTpls = ntavailableRelTpls; + availableRelTypes = ntavailableRelTypes; + _relationships = new LinkedHashMap(); + subMappingToscaTemplate = null; + metadata = _metaData(); + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getRelationships() { + if (_relationships.isEmpty()) { + List requires = getRequirements().getAll(); + if (requires != null && requires instanceof List) { + for (RequirementAssignment r : requires) { + LinkedHashMap explicit = _getExplicitRelationship(r); + if (explicit != null) { + // _relationships.putAll(explicit)... + for (Map.Entry ee : explicit.entrySet()) { + _relationships.put(ee.getKey(), ee.getValue()); + } + } + } + } + } + return _relationships; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _getExplicitRelationship(RequirementAssignment req) { // Handle explicit relationship // For example, // - req: // node: DBMS // relationship: tosca.relationships.HostedOn - - LinkedHashMap explicitRelation = new LinkedHashMap(); - String node = req.getNodeTemplateName(); - - if(node != null && !node.isEmpty()) { + + LinkedHashMap explicitRelation = new LinkedHashMap(); + String node = req.getNodeTemplateName(); + + if (node != null && !node.isEmpty()) { //msg = _('Lookup by TOSCA types is not supported. ' // 'Requirement for "%s" can not be full-filled.') % self.name - boolean bFound = false; - for(String k: EntityType.TOSCA_DEF.keySet()) { - if(k.equals(node)) { - bFound = true; - break; - } - } - if(bFound || customDef.get(node) != null) { + boolean bFound = false; + for (String k : EntityType.TOSCA_DEF.keySet()) { + if (k.equals(node)) { + bFound = true; + break; + } + } + if (bFound || customDef.get(node) != null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE205", String.format( - "NotImplementedError: Lookup by TOSCA types is not supported. Requirement for \"%s\" can not be full-filled", - getName()))); + "NotImplementedError: Lookup by TOSCA types is not supported. Requirement for \"%s\" can not be full-filled", + getName()))); return null; - } - if(templates.get(node) == null) { + } + if (templates.get(node) == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE206", String.format( - "KeyError: Node template \"%s\" was not found",node))); - return null; - } - NodeTemplate relatedTpl = new NodeTemplate(node,templates,customDef,null,null); - Object relationship = req.getRelationship(); - String relationshipString = null; + "KeyError: Node template \"%s\" was not found", node))); + return null; + } + NodeTemplate relatedTpl = new NodeTemplate(node, templates, customDef, null, null); + Object relationship = req.getRelationship(); + String relationshipString = null; // // here relationship can be a string or a LHM with 'type': - // check if its type has relationship defined - if(relationship == null) { - ArrayList parentReqs = ((NodeType)typeDefinition).getAllRequirements(); - if(parentReqs == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE207", "ValidationError: parent_req is null")); - } - else { + // check if its type has relationship defined + if (relationship == null) { + ArrayList parentReqs = ((NodeType) typeDefinition).getAllRequirements(); + if (parentReqs == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE207", "ValidationError: parent_req is null")); + } else { // for(String key: req.keySet()) { // boolean bFoundRel = false; - for(Object rdo: parentReqs) { - LinkedHashMap reqDict = (LinkedHashMap)rdo; - LinkedHashMap relDict = (LinkedHashMap)reqDict.get(req.getName()); - if(relDict != null) { - relationship = relDict.get("relationship"); - //BUG-python??? need to break twice? + for (Object rdo : parentReqs) { + LinkedHashMap reqDict = (LinkedHashMap) rdo; + LinkedHashMap relDict = (LinkedHashMap) reqDict.get(req.getName()); + if (relDict != null) { + relationship = relDict.get("relationship"); + //BUG-python??? need to break twice? // bFoundRel = true; - break; - } - } + break; + } + } // if(bFoundRel) { // break; // } // } - } - } - - if(relationship != null) { - // here relationship can be a string or a LHM with 'type': - if(relationship instanceof String) { - relationshipString = (String)relationship; - } - else if(relationship instanceof LinkedHashMap) { - relationshipString = (String)((LinkedHashMap)relationship).get("type"); - } - - boolean foundRelationshipTpl = false; - // apply available relationship templates if found - if(availableRelTpls != null) { - for(RelationshipTemplate tpl: availableRelTpls) { - if(tpl.getName().equals(relationshipString)) { - RelationshipType rtype = new RelationshipType(tpl.getType(),null,customDef); - explicitRelation.put(rtype, relatedTpl); - tpl.setTarget(relatedTpl); - tpl.setSource(this); - relationshipTpl.add(tpl); - foundRelationshipTpl = true; - } - } - } - // create relationship template object. - String relPrfx = EntityType.RELATIONSHIP_PREFIX; - if(!foundRelationshipTpl) { - if(relationship instanceof LinkedHashMap) { - relationshipString = (String)((LinkedHashMap)relationship).get("type"); - if(relationshipString != null) { - if(availableRelTypes != null && !availableRelTypes.isEmpty() && - availableRelTypes.get(relationshipString) != null) { - ; - } - else if(!(relationshipString).startsWith(relPrfx)) { - relationshipString = relPrfx + relationshipString; - } - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE208", String.format( - "MissingRequiredFieldError: \"relationship\" used in template \"%s\" is missing required field \"type\"", - relatedTpl.getName()))); - } - } - for(RelationshipType rtype: ((NodeType)typeDefinition).getRelationship().keySet()) { - if(rtype.getType().equals(relationshipString)) { - explicitRelation.put(rtype,relatedTpl); - relatedTpl._addRelationshipTemplate(req,rtype.getType(),this); - } - else if(availableRelTypes != null && !availableRelTypes.isEmpty()) { - LinkedHashMap relTypeDef = (LinkedHashMap)availableRelTypes.get(relationshipString); - if(relTypeDef != null) { - String superType = (String)relTypeDef.get("derived_from"); - if(superType != null) { - if(!superType.startsWith(relPrfx)) { - superType = relPrfx + superType; - } - if(rtype.getType().equals(superType)) { - explicitRelation.put(rtype,relatedTpl); - relatedTpl._addRelationshipTemplate(req,rtype.getType(),this); - } - } - } - } - } - } - } - } - return explicitRelation; - } - - @SuppressWarnings("unchecked") - private void _addRelationshipTemplate(RequirementAssignment requirement, String rtype, NodeTemplate source) { - LinkedHashMap req = new LinkedHashMap<>(); - req.put("relationship", CopyUtils.copyLhmOrAl(requirement.getRelationship())); - req.put("type",rtype); - RelationshipTemplate tpl = new RelationshipTemplate(req, rtype, customDef, this, source, getParentNodeTemplate()); - relationshipTpl.add(tpl); - } - - public ArrayList getRelationshipTemplate() { - return relationshipTpl; - } - - void _addNext(NodeTemplate nodetpl,RelationshipType relationship) { - related.put(nodetpl,relationship); - } - - public ArrayList getRelatedNodes() { - if(related.isEmpty()) { - for(Map.Entry me: ((NodeType)typeDefinition).getRelationship().entrySet()) { - RelationshipType relation = me.getKey(); - NodeType node = me.getValue(); - for(String tpl: templates.keySet()) { - if(tpl.equals(node.getType())) { - //BUG.. python has - // self.related[NodeTemplate(tpl)] = relation - // but NodeTemplate doesn't have a constructor with just name... - //???? - related.put(new NodeTemplate(tpl,null,null,null,null),relation); - } - } - } - } - return new ArrayList(related.keySet()); - } - - public void validate(/*tosca_tpl=none is not used...*/) { + } + } + + if (relationship != null) { + // here relationship can be a string or a LHM with 'type': + if (relationship instanceof String) { + relationshipString = (String) relationship; + } else if (relationship instanceof LinkedHashMap) { + relationshipString = (String) ((LinkedHashMap) relationship).get("type"); + } + + boolean foundRelationshipTpl = false; + // apply available relationship templates if found + if (availableRelTpls != null) { + for (RelationshipTemplate tpl : availableRelTpls) { + if (tpl.getName().equals(relationshipString)) { + RelationshipType rtype = new RelationshipType(tpl.getType(), null, customDef); + explicitRelation.put(rtype, relatedTpl); + tpl.setTarget(relatedTpl); + tpl.setSource(this); + relationshipTpl.add(tpl); + foundRelationshipTpl = true; + } + } + } + // create relationship template object. + String relPrfx = EntityType.RELATIONSHIP_PREFIX; + if (!foundRelationshipTpl) { + if (relationship instanceof LinkedHashMap) { + relationshipString = (String) ((LinkedHashMap) relationship).get("type"); + if (relationshipString != null) { + if (availableRelTypes != null && !availableRelTypes.isEmpty() && + availableRelTypes.get(relationshipString) != null) { + ; + } else if (!(relationshipString).startsWith(relPrfx)) { + relationshipString = relPrfx + relationshipString; + } + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE208", String.format( + "MissingRequiredFieldError: \"relationship\" used in template \"%s\" is missing required field \"type\"", + relatedTpl.getName()))); + } + } + for (RelationshipType rtype : ((NodeType) typeDefinition).getRelationship().keySet()) { + if (rtype.getType().equals(relationshipString)) { + explicitRelation.put(rtype, relatedTpl); + relatedTpl._addRelationshipTemplate(req, rtype.getType(), this); + } else if (availableRelTypes != null && !availableRelTypes.isEmpty()) { + LinkedHashMap relTypeDef = (LinkedHashMap) availableRelTypes.get(relationshipString); + if (relTypeDef != null) { + String superType = (String) relTypeDef.get("derived_from"); + if (superType != null) { + if (!superType.startsWith(relPrfx)) { + superType = relPrfx + superType; + } + if (rtype.getType().equals(superType)) { + explicitRelation.put(rtype, relatedTpl); + relatedTpl._addRelationshipTemplate(req, rtype.getType(), this); + } + } + } + } + } + } + } + } + return explicitRelation; + } + + @SuppressWarnings("unchecked") + private void _addRelationshipTemplate(RequirementAssignment requirement, String rtype, NodeTemplate source) { + LinkedHashMap req = new LinkedHashMap<>(); + req.put("relationship", CopyUtils.copyLhmOrAl(requirement.getRelationship())); + req.put("type", rtype); + RelationshipTemplate tpl = new RelationshipTemplate(req, rtype, customDef, this, source, getParentNodeTemplate()); + relationshipTpl.add(tpl); + } + + public ArrayList getRelationshipTemplate() { + return relationshipTpl; + } + + void _addNext(NodeTemplate nodetpl, RelationshipType relationship) { + related.put(nodetpl, relationship); + } + + public ArrayList getRelatedNodes() { + if (related.isEmpty()) { + for (Map.Entry me : ((NodeType) typeDefinition).getRelationship().entrySet()) { + RelationshipType relation = me.getKey(); + NodeType node = me.getValue(); + for (String tpl : templates.keySet()) { + if (tpl.equals(node.getType())) { + //BUG.. python has + // self.related[NodeTemplate(tpl)] = relation + // but NodeTemplate doesn't have a constructor with just name... + //???? + related.put(new NodeTemplate(tpl, null, null, null, null), relation); + } + } + } + } + return new ArrayList(related.keySet()); + } + + public void validate(/*tosca_tpl=none is not used...*/) { _validateCapabilities(); _validateRequirements(); - _validateProperties(entityTpl,(NodeType)typeDefinition); + _validateProperties(entityTpl, (NodeType) typeDefinition); _validateInterfaces(); - for(Property prop: getPropertiesObjects()) { - prop.validate(); + for (Property prop : getPropertiesObjects()) { + prop.validate(); } - } + } - public Object getPropertyValueFromTemplatesByName(String propertyName) { - LinkedHashMap nodeObject = (LinkedHashMap) templates.get(name); + public Object getPropertyValueFromTemplatesByName(String propertyName) { + LinkedHashMap nodeObject = (LinkedHashMap) templates.get(name); if (nodeObject != null) { - LinkedHashMap properties = (LinkedHashMap)nodeObject.get(PROPERTIES); + LinkedHashMap properties = (LinkedHashMap) nodeObject.get(PROPERTIES); if (properties != null) { return properties.get(propertyName); } } - return null; - } - - private Metadata _metaData() { - if(entityTpl.get(METADATA) != null) { - return new Metadata((Map)entityTpl.get(METADATA)); - } - else { - return null; - } - } - - @SuppressWarnings("unchecked") - private void _validateRequirements() { - ArrayList typeRequires = ((NodeType)typeDefinition).getAllRequirements(); - ArrayList allowedReqs = new ArrayList<>(); - allowedReqs.add("template"); - if(typeRequires != null) { - for(Object to: typeRequires) { - LinkedHashMap treq = (LinkedHashMap)to; - for(Map.Entry me: treq.entrySet()) { - String key = me.getKey(); - Object value = me.getValue(); - allowedReqs.add(key); - if(value instanceof LinkedHashMap) { - allowedReqs.addAll(((LinkedHashMap)value).keySet()); - } - } - - } - } - - ArrayList requires = (ArrayList)((NodeType)typeDefinition).getValue(REQUIREMENTS, entityTpl, false); - if(requires != null) { - if(!(requires instanceof ArrayList)) { + return null; + } + + private Metadata _metaData() { + if (entityTpl.get(METADATA) != null) { + return new Metadata((Map) entityTpl.get(METADATA)); + } else { + return null; + } + } + + @SuppressWarnings("unchecked") + private void _validateRequirements() { + ArrayList typeRequires = ((NodeType) typeDefinition).getAllRequirements(); + ArrayList allowedReqs = new ArrayList<>(); + allowedReqs.add("template"); + if (typeRequires != null) { + for (Object to : typeRequires) { + LinkedHashMap treq = (LinkedHashMap) to; + for (Map.Entry me : treq.entrySet()) { + String key = me.getKey(); + Object value = me.getValue(); + allowedReqs.add(key); + if (value instanceof LinkedHashMap) { + allowedReqs.addAll(((LinkedHashMap) value).keySet()); + } + } + + } + } + + ArrayList requires = (ArrayList) ((NodeType) typeDefinition).getValue(REQUIREMENTS, entityTpl, false); + if (requires != null) { + if (!(requires instanceof ArrayList)) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE209", String.format( - "TypeMismatchError: \"requirements\" of template \"%s\" are not of type \"list\"",name))); - } - else { - for(Object ro: requires) { - LinkedHashMap req = (LinkedHashMap)ro; - for(Map.Entry me: req.entrySet()) { - String rl = me.getKey(); - Object vo = me.getValue(); - if(vo instanceof LinkedHashMap) { - LinkedHashMap value = (LinkedHashMap)vo; - _validateRequirementsKeys(value); - _validateRequirementsProperties(value); - allowedReqs.add(rl); - } - } - _commonValidateField(req,allowedReqs,"requirements"); + "TypeMismatchError: \"requirements\" of template \"%s\" are not of type \"list\"", name))); + } else { + for (Object ro : requires) { + LinkedHashMap req = (LinkedHashMap) ro; + for (Map.Entry me : req.entrySet()) { + String rl = me.getKey(); + Object vo = me.getValue(); + if (vo instanceof LinkedHashMap) { + LinkedHashMap value = (LinkedHashMap) vo; + _validateRequirementsKeys(value); + _validateRequirementsProperties(value); + allowedReqs.add(rl); + } + } + _commonValidateField(req, allowedReqs, "requirements"); } - } - } - } + } + } + } - @SuppressWarnings("unchecked") - private void _validateRequirementsProperties(LinkedHashMap reqs) { + @SuppressWarnings("unchecked") + private void _validateRequirementsProperties(LinkedHashMap reqs) { // TO-DO(anyone): Only occurrences property of the requirements is // validated here. Validation of other requirement properties are being // validated in different files. Better to keep all the requirements // properties validation here. - for(Map.Entry me: reqs.entrySet()) { - if(me.getKey().equals("occurrences")) { - ArrayList val = (ArrayList)me.getValue(); - _validateOccurrences(val); - } - - } - } - - private void _validateOccurrences(ArrayList occurrences) { - DataEntity.validateDatatype("list",occurrences,null,null,null); - for(Object val: occurrences) { - DataEntity.validateDatatype("Integer",val,null,null,null); + for (Map.Entry me : reqs.entrySet()) { + if (me.getKey().equals("occurrences")) { + ArrayList val = (ArrayList) me.getValue(); + _validateOccurrences(val); + } + + } + } + + private void _validateOccurrences(ArrayList occurrences) { + DataEntity.validateDatatype("list", occurrences, null, null, null); + for (Object val : occurrences) { + DataEntity.validateDatatype("Integer", val, null, null, null); } - if(occurrences.size() != 2 || - !(0 <= (int)occurrences.get(0) && (int)occurrences.get(0) <= (int)occurrences.get(1)) || - (int)occurrences.get(1) == 0) { + if (occurrences.size() != 2 || + !(0 <= (int) occurrences.get(0) && (int) occurrences.get(0) <= (int) occurrences.get(1)) || + (int) occurrences.get(1) == 0) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE210", String.format( - "InvalidPropertyValueError: property has invalid value %s",occurrences.toString()))); + "InvalidPropertyValueError: property has invalid value %s", occurrences.toString()))); } - } - - private void _validateRequirementsKeys(LinkedHashMap reqs) { - for(String key: reqs.keySet()) { - boolean bFound = false; - for(int i=0; i< REQUIREMENTS_SECTION.length; i++) { - if(key.equals(REQUIREMENTS_SECTION[i])) { - bFound = true; - break; - } - } - if(!bFound) { + } + + private void _validateRequirementsKeys(LinkedHashMap reqs) { + for (String key : reqs.keySet()) { + boolean bFound = false; + for (int i = 0; i < REQUIREMENTS_SECTION.length; i++) { + if (key.equals(REQUIREMENTS_SECTION[i])) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE211", String.format( - "UnknownFieldError: \"requirements\" of template \"%s\" contains unknown field \"%s\"",name,key))); - } - } - } - - @SuppressWarnings("unchecked") - private void _validateInterfaces() { - LinkedHashMap ifaces = (LinkedHashMap) - ((NodeType)typeDefinition).getValue(INTERFACES, entityTpl, false); - if(ifaces != null) { - for(Map.Entry me: ifaces.entrySet()) { - String iname = me.getKey(); - LinkedHashMap value = (LinkedHashMap)me.getValue(); - if(iname.equals(InterfacesDef.LIFECYCLE) || iname.equals(InterfacesDef.LIFECYCLE_SHORTNAME)) { - // maybe we should convert [] to arraylist??? - ArrayList inlo = new ArrayList<>(); - for(int i=0; i ifaces = (LinkedHashMap) + ((NodeType) typeDefinition).getValue(INTERFACES, entityTpl, false); + if (ifaces != null) { + for (Map.Entry me : ifaces.entrySet()) { + String iname = me.getKey(); + LinkedHashMap value = (LinkedHashMap) me.getValue(); + if (iname.equals(InterfacesDef.LIFECYCLE) || iname.equals(InterfacesDef.LIFECYCLE_SHORTNAME)) { + // maybe we should convert [] to arraylist??? + ArrayList inlo = new ArrayList<>(); + for (int i = 0; i < InterfacesDef.INTERFACE_NODE_LIFECYCLE_OPERATIONS.length; i++) { + inlo.add(InterfacesDef.INTERFACE_NODE_LIFECYCLE_OPERATIONS[i]); + } + _commonValidateField(value, inlo, "interfaces"); + } else if (iname.equals(InterfacesDef.CONFIGURE) || iname.equals(InterfacesDef.CONFIGURE_SHORTNAME)) { + // maybe we should convert [] to arraylist??? + ArrayList irco = new ArrayList<>(); + for (int i = 0; i < InterfacesDef.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS.length; i++) { + irco.add(InterfacesDef.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS[i]); + } + _commonValidateField(value, irco, "interfaces"); + } else if (((NodeType) typeDefinition).getInterfaces().keySet().contains(iname)) { + _commonValidateField(value, _collectCustomIfaceOperations(iname), "interfaces"); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE212", String.format( + "UnknownFieldError: \"interfaces\" of template \"%s\" contains unknown field %s", name, iname))); + } + } + } + } + + @SuppressWarnings("unchecked") + private ArrayList _collectCustomIfaceOperations(String iname) { + ArrayList allowedOperations = new ArrayList<>(); + LinkedHashMap nodetypeIfaceDef = (LinkedHashMap) ((NodeType) + typeDefinition).getInterfaces().get(iname); + allowedOperations.addAll(nodetypeIfaceDef.keySet()); + String ifaceType = (String) nodetypeIfaceDef.get("type"); + if (ifaceType != null) { + LinkedHashMap ifaceTypeDef = null; + if (((NodeType) typeDefinition).customDef != null) { + ifaceTypeDef = (LinkedHashMap) ((NodeType) typeDefinition).customDef.get(ifaceType); + } + if (ifaceTypeDef == null) { + ifaceTypeDef = (LinkedHashMap) EntityType.TOSCA_DEF.get(ifaceType); + } + allowedOperations.addAll(ifaceTypeDef.keySet()); + } + // maybe we should convert [] to arraylist??? + ArrayList idrw = new ArrayList<>(); + for (int i = 0; i < InterfacesDef.INTERFACE_DEF_RESERVED_WORDS.length; i++) { + idrw.add(InterfacesDef.INTERFACE_DEF_RESERVED_WORDS[i]); + } + allowedOperations.removeAll(idrw); + return allowedOperations; + } + + /** + * Get all interface details for given node template.
+ * + * @return Map that contains the list of all interfaces and their definitions. + * If none found, an empty map will be returned. + */ + public Map> getAllInterfaceDetailsForNodeType() { + Map> interfaceMap = new LinkedHashMap<>(); + + // Get custom interface details + Map customInterfacesDetails = ((NodeType) typeDefinition).getInterfaces(); + // Get native interface details from tosca definitions + Object nativeInterfaceDetails = TOSCA_DEF.get(InterfacesDef.LIFECYCLE); + Map allInterfaceDetails = new LinkedHashMap<>(); + allInterfaceDetails.putAll(customInterfacesDetails); + if (nativeInterfaceDetails != null) { + allInterfaceDetails.put(InterfacesDef.LIFECYCLE, nativeInterfaceDetails); + } + + // Process all interface details from combined collection and return an interface Map with + // interface names and their definitions + for (Map.Entry me : allInterfaceDetails.entrySet()) { + ArrayList interfaces = new ArrayList<>(); + String interfaceType = me.getKey(); + Map interfaceValue = (Map) me.getValue(); + if (interfaceValue.containsKey("type")) { + interfaceType = (String) interfaceValue.get("type"); + } + + for (Map.Entry ve : interfaceValue.entrySet()) { + // Filter type as this is a reserved key and not an operation + if (!ve.getKey().equals("type")) { + InterfacesDef iface = new InterfacesDef(typeDefinition, interfaceType, this, ve.getKey(), ve.getValue()); + interfaces.add(iface); } - else if(iname.equals(InterfacesDef.CONFIGURE) || iname.equals(InterfacesDef.CONFIGURE_SHORTNAME)) { - // maybe we should convert [] to arraylist??? - ArrayList irco = new ArrayList<>(); - for(int i=0; i nodetemplate) { + for (String ntname : nodetemplate.keySet()) { + boolean bFound = false; + for (int i = 0; i < SECTIONS.length; i++) { + if (ntname.equals(SECTIONS[i])) { + bFound = true; + break; } - else if(((NodeType)typeDefinition).getInterfaces().keySet().contains(iname)) { - _commonValidateField(value,_collectCustomIfaceOperations(iname),"interfaces"); - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE212", String.format( - "UnknownFieldError: \"interfaces\" of template \"%s\" contains unknown field %s",name,iname))); - } - } - } - } - - @SuppressWarnings("unchecked") - private ArrayList _collectCustomIfaceOperations(String iname) { - ArrayList allowedOperations = new ArrayList<>(); - LinkedHashMap nodetypeIfaceDef = (LinkedHashMap)((NodeType) - typeDefinition).getInterfaces().get(iname); - allowedOperations.addAll(nodetypeIfaceDef.keySet()); - String ifaceType = (String)nodetypeIfaceDef.get("type"); - if(ifaceType != null) { - LinkedHashMap ifaceTypeDef = null; - if(((NodeType)typeDefinition).customDef != null) { - ifaceTypeDef = (LinkedHashMap)((NodeType)typeDefinition).customDef.get(ifaceType); - } - if(ifaceTypeDef == null) { - ifaceTypeDef = (LinkedHashMap)EntityType.TOSCA_DEF.get(ifaceType); - } - allowedOperations.addAll(ifaceTypeDef.keySet()); - } - // maybe we should convert [] to arraylist??? - ArrayList idrw = new ArrayList<>(); - for(int i=0; i - * @return Map that contains the list of all interfaces and their definitions. - * If none found, an empty map will be returned. - */ - public Map> getAllInterfaceDetailsForNodeType(){ - Map> interfaceMap = new LinkedHashMap<>(); - - // Get custom interface details - Map customInterfacesDetails = ((NodeType)typeDefinition).getInterfaces(); - // Get native interface details from tosca definitions - Object nativeInterfaceDetails = TOSCA_DEF.get(InterfacesDef.LIFECYCLE); - Map allInterfaceDetails = new LinkedHashMap<>(); - allInterfaceDetails.putAll(customInterfacesDetails); - if (nativeInterfaceDetails != null){ - allInterfaceDetails.put(InterfacesDef.LIFECYCLE, nativeInterfaceDetails); - } - - // Process all interface details from combined collection and return an interface Map with - // interface names and their definitions - for(Map.Entry me: allInterfaceDetails.entrySet()) { - ArrayList interfaces = new ArrayList<>(); - String interfaceType = me.getKey(); - Map interfaceValue = (Map)me.getValue(); - if(interfaceValue.containsKey("type")){ - interfaceType = (String) interfaceValue.get("type"); - } - - for(Map.Entry ve: interfaceValue.entrySet()) { - // Filter type as this is a reserved key and not an operation - if(!ve.getKey().equals("type")){ - InterfacesDef iface = new InterfacesDef(typeDefinition, interfaceType,this, ve.getKey(), ve.getValue()); - interfaces.add(iface); - } - } - interfaceMap.put(interfaceType, interfaces); - } - return interfaceMap; - } - - private void _validateFields(LinkedHashMap nodetemplate) { - for(String ntname: nodetemplate.keySet()) { - boolean bFound = false; - for(int i=0; i< SECTIONS.length; i++) { - if(ntname.equals(SECTIONS[i])) { - bFound = true; - break; - } - } - if(!bFound) { - for(int i=0; i< SPECIAL_SECTIONS.length; i++) { - if(ntname.equals(SPECIAL_SECTIONS[i])) { - bFound = true; - break; - } - } - - } - if(!bFound) { + } + if (!bFound) { + for (int i = 0; i < SPECIAL_SECTIONS.length; i++) { + if (ntname.equals(SPECIAL_SECTIONS[i])) { + bFound = true; + break; + } + } + + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE213", String.format( - "UnknownFieldError: Node template \"%s\" has unknown field \"%s\"",name,ntname))); - } - } - } - - // getter/setter - - // multilevel nesting - public SubstitutionMappings getSubMappingToscaTemplate() { - return subMappingToscaTemplate; - } - - public void setSubMappingToscaTemplate(SubstitutionMappings sm) { - subMappingToscaTemplate = sm; - } - - public Metadata getMetaData() { - return metadata; - } - - public void setMetaData(Metadata metadata) { - this.metadata = metadata; - } - - @Override - public String toString() { - return getName(); - } - - public TopologyTemplate getOriginComponentTemplate() { - return originComponentTemplate; - } - - public void setOriginComponentTemplate(TopologyTemplate originComponentTemplate) { - this.originComponentTemplate = originComponentTemplate; - } + "UnknownFieldError: Node template \"%s\" has unknown field \"%s\"", name, ntname))); + } + } + } + + // getter/setter + + // multilevel nesting + public SubstitutionMappings getSubMappingToscaTemplate() { + return subMappingToscaTemplate; + } + + public void setSubMappingToscaTemplate(SubstitutionMappings sm) { + subMappingToscaTemplate = sm; + } + + public Metadata getMetaData() { + return metadata; + } + + public void setMetaData(Metadata metadata) { + this.metadata = metadata; + } + + @Override + public String toString() { + return getName(); + } + + public TopologyTemplate getOriginComponentTemplate() { + return originComponentTemplate; + } + + public void setOriginComponentTemplate(TopologyTemplate originComponentTemplate) { + this.originComponentTemplate = originComponentTemplate; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java index 392a528..ca8ac55 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,138 +31,138 @@ import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.onap.sdc.toscaparser.api.utils.ValidateUtils; public class Policy extends EntityTemplate { - - - static final String TYPE = "type"; - static final String METADATA = "metadata"; - static final String DESCRIPTION = "description"; - static final String PROPERTIES = "properties"; - static final String TARGETS = "targets"; - private static final String TRIGGERS = "triggers"; - private static final String SECTIONS[] = { - TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS}; - - Metadata metaDataObject; - LinkedHashMap metaData = null; - ArrayList targetsList; // *** a list of NodeTemplate OR a list of Group *** - String targetsType; - ArrayList triggers; - LinkedHashMap properties; - - public Policy(String _name, - LinkedHashMap _policy, - ArrayList targetObjects, - String _targetsType, - LinkedHashMap _customDef) { - this(_name, _policy, targetObjects, _targetsType, _customDef, null); - } - - public Policy(String _name, - LinkedHashMap _policy, + + + static final String TYPE = "type"; + static final String METADATA = "metadata"; + static final String DESCRIPTION = "description"; + static final String PROPERTIES = "properties"; + static final String TARGETS = "targets"; + private static final String TRIGGERS = "triggers"; + private static final String SECTIONS[] = { + TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS}; + + Metadata metaDataObject; + LinkedHashMap metaData = null; + ArrayList targetsList; // *** a list of NodeTemplate OR a list of Group *** + String targetsType; + ArrayList triggers; + LinkedHashMap properties; + + public Policy(String _name, + LinkedHashMap _policy, + ArrayList targetObjects, + String _targetsType, + LinkedHashMap _customDef) { + this(_name, _policy, targetObjects, _targetsType, _customDef, null); + } + + public Policy(String _name, + LinkedHashMap _policy, // ArrayList targetObjects, - ArrayList targetObjects, - String _targetsType, - LinkedHashMap _customDef, NodeTemplate parentNodeTemplate) { - super(_name,_policy,"policy_type",_customDef, parentNodeTemplate); - - if(_policy.get(METADATA) != null) { - metaData = (LinkedHashMap)_policy.get(METADATA); - ValidateUtils.validateMap(metaData); - metaDataObject = new Metadata(metaData); + ArrayList targetObjects, + String _targetsType, + LinkedHashMap _customDef, NodeTemplate parentNodeTemplate) { + super(_name, _policy, "policy_type", _customDef, parentNodeTemplate); + + if (_policy.get(METADATA) != null) { + metaData = (LinkedHashMap) _policy.get(METADATA); + ValidateUtils.validateMap(metaData); + metaDataObject = new Metadata(metaData); } targetsList = targetObjects; targetsType = _targetsType; - triggers = _triggers((LinkedHashMap)_policy.get(TRIGGERS)); + triggers = _triggers((LinkedHashMap) _policy.get(TRIGGERS)); properties = null; - if(_policy.get("properties") != null) { - properties = (LinkedHashMap)_policy.get("properties"); + if (_policy.get("properties") != null) { + properties = (LinkedHashMap) _policy.get("properties"); } _validateKeys(); - } - - public ArrayList getTargets() { - return (ArrayList)entityTpl.get("targets"); - } - - public ArrayList getDescription() { - return (ArrayList)entityTpl.get("description"); - } - - public ArrayList getmetadata() { - return (ArrayList)entityTpl.get("metadata"); - } - - public String getTargetsType() { - return targetsType; - } - - public Metadata getMetaDataObj() { - return metaDataObject; - } - - public LinkedHashMap getMetaData() { - return metaData; - } - - // public ArrayList getTargetsList() { - public ArrayList getTargetsList() { - return targetsList; - } - - // entityTemplate already has a different getProperties... - // this is to access the local properties variable - public LinkedHashMap getPolicyProperties() { - return properties; - } - - private ArrayList _triggers(LinkedHashMap triggers) { - ArrayList triggerObjs = new ArrayList<>(); - if(triggers != null) { - for(Map.Entry me: triggers.entrySet()) { - String tname = me.getKey(); - LinkedHashMap ttriggerTpl = - (LinkedHashMap)me.getValue(); - Triggers triggersObj = new Triggers(tname,ttriggerTpl); + } + + public ArrayList getTargets() { + return (ArrayList) entityTpl.get("targets"); + } + + public ArrayList getDescription() { + return (ArrayList) entityTpl.get("description"); + } + + public ArrayList getmetadata() { + return (ArrayList) entityTpl.get("metadata"); + } + + public String getTargetsType() { + return targetsType; + } + + public Metadata getMetaDataObj() { + return metaDataObject; + } + + public LinkedHashMap getMetaData() { + return metaData; + } + + // public ArrayList getTargetsList() { + public ArrayList getTargetsList() { + return targetsList; + } + + // entityTemplate already has a different getProperties... + // this is to access the local properties variable + public LinkedHashMap getPolicyProperties() { + return properties; + } + + private ArrayList _triggers(LinkedHashMap triggers) { + ArrayList triggerObjs = new ArrayList<>(); + if (triggers != null) { + for (Map.Entry me : triggers.entrySet()) { + String tname = me.getKey(); + LinkedHashMap ttriggerTpl = + (LinkedHashMap) me.getValue(); + Triggers triggersObj = new Triggers(tname, ttriggerTpl); triggerObjs.add(triggersObj); - } - } - return triggerObjs; - } - - private void _validateKeys() { - for(String key: entityTpl.keySet()) { - boolean bFound = false; - for(int i=0; i customDef; - - public Property(Map.Entry propertyEntry){ + private static final Logger LOGGER = LoggerFactory.getLogger(Property.class.getName()); + + private static final String TYPE = "type"; + private static final String REQUIRED = "required"; + private static final String DESCRIPTION = "description"; + private static final String DEFAULT = "default"; + private static final String CONSTRAINTS = "constraints"; + private static String entrySchema = "entry_schema"; + private static String dataType = "datatypes"; + + private static final String[] PROPERTY_KEYS = { + TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS}; + + private static final String ENTRYTYPE = "type"; + private static final String ENTRYPROPERTIES = "properties"; + private static final String PATH_DELIMITER = "#"; + private static final String[] ENTRY_SCHEMA_KEYS = { + ENTRYTYPE, ENTRYPROPERTIES}; + + private String name; + private Object value; + private Schema schema; + private LinkedHashMap customDef; + + public Property(Map.Entry propertyEntry) { name = propertyEntry.getKey(); value = propertyEntry.getValue(); - } - public Property(String propname, - Object propvalue, - LinkedHashMap propschemaDict, - LinkedHashMap propcustomDef) { - + } + + public Property(String propname, + Object propvalue, + LinkedHashMap propschemaDict, + LinkedHashMap propcustomDef) { + name = propname; value = propvalue; customDef = propcustomDef; schema = new Schema(propname, propschemaDict); - } - - public String getType() { - return schema.getType(); - } - - public boolean isRequired() { - return schema.isRequired(); - } - - public String getDescription() { - return schema.getDescription(); - } - - public Object getDefault() { - return schema.getDefault(); - } - - public ArrayList getConstraints() { - return schema.getConstraints(); - } - - public LinkedHashMap getEntrySchema() { - return schema.getEntrySchema(); - } - - - public String getName() { - return name; - } - - public Object getValue() { - return value; - } - - // setter - public Object setValue(Object vob) { - value = vob; - return value; - } - - public void validate() { - // Validate if not a reference property - if(!Function.isFunction(value)) { - if(getType().equals(Schema.STRING)) { - value = value.toString(); - } - value = DataEntity.validateDatatype(getType(),value, - getEntrySchema(), - customDef, - name); - _validateConstraints(); - } - } - - private void _validateConstraints() { - if(getConstraints() != null) { - for(Constraint constraint: getConstraints()) { - constraint.validate(value); - } - } - } - - @Override - public String toString() { - return "Property{" + - "name='" + name + '\'' + - ", value=" + value + - ", schema=" + schema + - ", customDef=" + customDef + - '}'; - } + } + + public String getType() { + return schema.getType(); + } + + public boolean isRequired() { + return schema.isRequired(); + } + + public String getDescription() { + return schema.getDescription(); + } + + public Object getDefault() { + return schema.getDefault(); + } + + public ArrayList getConstraints() { + return schema.getConstraints(); + } + + public LinkedHashMap getEntrySchema() { + return schema.getEntrySchema(); + } + + + public String getName() { + return name; + } + + public Object getValue() { + return value; + } + + // setter + public Object setValue(Object vob) { + value = vob; + return value; + } + + public void validate() { + // Validate if not a reference property + if (!Function.isFunction(value)) { + if (getType().equals(Schema.STRING)) { + value = value.toString(); + } + value = DataEntity.validateDatatype(getType(), value, + getEntrySchema(), + customDef, + name); + validateConstraints(); + } + } + + private void validateConstraints() { + if (getConstraints() != null) { + for (Constraint constraint : getConstraints()) { + constraint.validate(value); + } + } + } + + @Override + public String toString() { + return "Property{" + + "name='" + name + '\'' + + ", value=" + value + + ", schema=" + schema + + ", customDef=" + customDef + + '}'; + } /** * Retrieves property value as list of strings if
* - the value is simple
* - the value is list of simple values
* - the provided path refers to a simple property inside a data type
- * @param propertyPath valid name of property for search.
- * If a name refers to a simple field inside a datatype, the property name should be defined with # delimiter.
* + * @param propertyPath valid name of property for search.
+ * If a name refers to a simple field inside a datatype, the property name should be defined with # delimiter.
* @return List of property values. If not found, empty list will be returned.
* If property value is a list either of simple fields or of simple fields inside a datatype, all values from the list should be returned */ public List getLeafPropertyValue(String propertyPath) { List propertyValueList = Collections.emptyList(); - if (logger.isDebugEnabled()) { - logger.debug("getLeafPropertyValue=> A new request: propertyPath: {}, value: {}", propertyPath, getValue()); - } - if (propertyPath == null || getValue() == null || - //if entry_schema disappears, it is datatype, - // otherwise it is map of simple types - should be ignored - isValueMapOfSimpleTypes()) { - logger.error("It is a wrong request - ignoring! propertyPath: {}, value: {}", propertyPath, getValue()); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("getLeafPropertyValue=> A new request: propertyPath: {}, value: {}", propertyPath, getValue()); + } + if (propertyPath == null || getValue() == null + //if entry_schema disappears, it is datatype, + // otherwise it is map of simple types - should be ignored + || isValueMapOfSimpleTypes()) { + LOGGER.error("It is a wrong request - ignoring! propertyPath: {}, value: {}", propertyPath, getValue()); return propertyValueList; } String[] path = propertyPath.split(PATH_DELIMITER); @@ -176,16 +177,15 @@ public class Property { if (Schema.isRequestedTypeSimple(getPropertyTypeByPath(path))) { //the internal property type in the path is either simple or list of simple types if (isValueInsideDataType()) { - if (logger.isDebugEnabled()) { - logger.debug("The requested is an internal simple property inside of a data type"); - } + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("The requested is an internal simple property inside of a data type"); + } //requested value is an internal simple property inside of a data type propertyValueList = getSimplePropertyValueForComplexType(path); - } - else { - if (logger.isDebugEnabled()) { - logger.debug("The requested property has simple type or list of simple types"); - } + } else { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("The requested property has simple type or list of simple types"); + } //the requested property is simple type or list of simple types propertyValueList = getSimplePropertyValueForSimpleType(); } @@ -194,44 +194,43 @@ public class Property { } private boolean isValueMapOfSimpleTypes() { - if (getValue() instanceof Map && getEntrySchema() != null) { - logger.warn("This property value is a map of simple types"); - return true; - } - return false; - } + if (getValue() instanceof Map && getEntrySchema() != null) { + LOGGER.warn("This property value is a map of simple types"); + return true; + } + return false; + } private boolean isValueInsideDataType() { //value is either a list of values for data type //or data type return (Schema.LIST.equals(getType()) && isDataTypeInEntrySchema()) - || (getEntrySchema() == null && getType().contains(DATA_TYPE)); + || (getEntrySchema() == null && getType().contains(dataType)); } private Object getSimpleValueFromComplexObject(Object current, String[] path) { - if (current == null) { - return null; - } - int index = 0; - - if (path.length > index) { - for (int i = index; i < path.length; i++) { - if (current instanceof Map) { - current = ((Map) current).get(path[i]); - } else if (current instanceof List) { - current = ((List) current).get(0); - i--; - } - else { - return null; - } - } - } - if (current != null) { - return current; - } - return null; - } + if (current == null) { + return null; + } + int index = 0; + + if (path.length > index) { + for (int i = index; i < path.length; i++) { + if (current instanceof Map) { + current = ((Map) current).get(path[i]); + } else if (current instanceof List) { + current = ((List) current).get(0); + i--; + } else { + return null; + } + } + } + if (current != null) { + return current; + } + return null; + } private List getSimplePropertyValueForSimpleType() { if (getValue() instanceof List || getValue() instanceof Map) { @@ -240,32 +239,32 @@ public class Property { return Lists.newArrayList(String.valueOf(value)); } - private List getSimplePropertyValueForComplexType(String[] path) { - if (getValue() instanceof List ) { - return ((List) getValue()).stream() - .map(v -> { - if (path != null) { - return getSimpleValueFromComplexObject(v, path); - } else { - return v; - } - }) - //it might be null when get_input can't be resolved - // e.g.: - // - get_input has two parameters: 1. list and 2. index in this list - //and list has no value - // - neither value no default is defined for get_input - .filter(Objects::nonNull) - .map(String::valueOf) - .collect(Collectors.toList()); - } - //it is data type - List valueList = Lists.newArrayList(); - String valueString = String.valueOf(getSimpleValueFromComplexObject(getValue(), path)); - if (Objects.nonNull(valueString)) { - valueList.add(valueString); - } - return valueList; + private List getSimplePropertyValueForComplexType(String[] path) { + if (getValue() instanceof List) { + return ((List) getValue()).stream() + .map(v -> { + if (path != null) { + return getSimpleValueFromComplexObject(v, path); + } else { + return v; + } + }) + //it might be null when get_input can't be resolved + // e.g.: + // - get_input has two parameters: 1. list and 2. index in this list + //and list has no value + // - neither value no default is defined for get_input + .filter(Objects::nonNull) + .map(String::valueOf) + .collect(Collectors.toList()); + } + //it is data type + List valueList = Lists.newArrayList(); + String valueString = String.valueOf(getSimpleValueFromComplexObject(getValue(), path)); + if (Objects.nonNull(valueString)) { + valueList.add(valueString); + } + return valueList; } private String getPropertyTypeByPath(String[] path) { @@ -281,7 +280,7 @@ public class Property { String propertyType = getType(); if (Schema.LIST.equals(propertyType)) { //if it is list, return entry schema type - return (String)getEntrySchema().get(ENTRYTYPE); + return (String) getEntrySchema().get(ENTRYTYPE); } return propertyType; } @@ -297,7 +296,7 @@ public class Property { private String getInternalPropertyType(String dataTypeName, String[] path, int index) { if (path.length > index) { - LinkedHashMap complexProperty = (LinkedHashMap)customDef.get(dataTypeName); + LinkedHashMap complexProperty = (LinkedHashMap) customDef.get(dataTypeName); if (complexProperty != null) { LinkedHashMap dataTypeProperties = (LinkedHashMap) complexProperty.get(ENTRYPROPERTIES); return getPropertyTypeFromCustomDefDeeply(path, index, dataTypeProperties); @@ -308,7 +307,7 @@ public class Property { } private String getEntrySchemaType(LinkedHashMap property) { - LinkedHashMap entrySchema = (LinkedHashMap)property.get(ENTRY_SCHEMA); + LinkedHashMap entrySchema = (LinkedHashMap) property.get(Property.entrySchema); if (entrySchema != null) { return (String) entrySchema.get(TYPE); } @@ -320,7 +319,7 @@ public class Property { LinkedHashMap foundProperty = (LinkedHashMap) (properties).get(path[index]); if (foundProperty != null) { String propertyType = calculatePropertyType(foundProperty); - if (propertyType == null || index == path.length - 1){ + if (propertyType == null || index == path.length - 1) { return propertyType; } return getInternalPropertyType(propertyType, path, index + 1); @@ -330,8 +329,8 @@ public class Property { } private boolean isDataTypeInEntrySchema() { - String entrySchemaType = (String)getEntrySchema().get(ENTRYTYPE); - return entrySchemaType != null && entrySchemaType.contains(DATA_TYPE); + String entrySchemaType = (String) getEntrySchema().get(ENTRYTYPE); + return entrySchemaType != null && entrySchemaType.contains(dataType); } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java index 1b5d58a..d1a1383 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,134 +29,134 @@ import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; import org.onap.sdc.toscaparser.api.elements.EntityType; public class RelationshipTemplate extends EntityTemplate { - - private static final String DERIVED_FROM = "derived_from"; - private static final String PROPERTIES = "properties"; - private static final String REQUIREMENTS = "requirements"; - private static final String INTERFACES = "interfaces"; - private static final String CAPABILITIES = "capabilities"; - private static final String TYPE = "type"; - @SuppressWarnings("unused") - private static final String SECTIONS[] = { - DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, CAPABILITIES, TYPE}; - - private String name; - private NodeTemplate target; - private NodeTemplate source; - private ArrayList _properties; - - public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, - String rtname, - LinkedHashMap rtcustomDef, - NodeTemplate rttarget, - NodeTemplate rtsource) { - this(rtrelationshipTemplate, rtname, rtcustomDef, rttarget, rtsource, null); - } - - public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, - String rtname, - LinkedHashMap rtcustomDef, - NodeTemplate rttarget, - NodeTemplate rtsource, NodeTemplate parentNodeTemplate) { - super(rtname,rtrelationshipTemplate,"relationship_type",rtcustomDef, parentNodeTemplate); - - name = rtname; - target = rttarget; - source = rtsource; - _properties = null; - } - - public ArrayList getPropertiesObjects() { - // Return properties objects for this template - if(_properties == null) { + + private static final String DERIVED_FROM = "derived_from"; + private static final String PROPERTIES = "properties"; + private static final String REQUIREMENTS = "requirements"; + private static final String INTERFACES = "interfaces"; + private static final String CAPABILITIES = "capabilities"; + private static final String TYPE = "type"; + @SuppressWarnings("unused") + private static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, CAPABILITIES, TYPE}; + + private String name; + private NodeTemplate target; + private NodeTemplate source; + private ArrayList _properties; + + public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, + String rtname, + LinkedHashMap rtcustomDef, + NodeTemplate rttarget, + NodeTemplate rtsource) { + this(rtrelationshipTemplate, rtname, rtcustomDef, rttarget, rtsource, null); + } + + public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, + String rtname, + LinkedHashMap rtcustomDef, + NodeTemplate rttarget, + NodeTemplate rtsource, NodeTemplate parentNodeTemplate) { + super(rtname, rtrelationshipTemplate, "relationship_type", rtcustomDef, parentNodeTemplate); + + name = rtname; + target = rttarget; + source = rtsource; + _properties = null; + } + + public ArrayList getPropertiesObjects() { + // Return properties objects for this template + if (_properties == null) { _properties = _createRelationshipProperties(); - } + } return _properties; - } - - @SuppressWarnings({ "unchecked", "unused" }) - public ArrayList _createRelationshipProperties() { - ArrayList props = new ArrayList (); - LinkedHashMap properties = new LinkedHashMap(); - LinkedHashMap relationship = (LinkedHashMap)entityTpl.get("relationship"); - - if(relationship == null) { - for(Object val: entityTpl.values()) { - if(val instanceof LinkedHashMap) { - relationship = (LinkedHashMap)((LinkedHashMap)val).get("relationship"); - break; - } - } - } - - if(relationship != null) { - properties = (LinkedHashMap)((EntityType)typeDefinition).getValue(PROPERTIES,relationship,false); - } - if(properties == null) { - properties = new LinkedHashMap(); - } - if(properties == null) { - properties = (LinkedHashMap)entityTpl.get(PROPERTIES); - } - if(properties == null) { - properties = new LinkedHashMap(); - } - - if(properties != null) { - for(Map.Entry me: properties.entrySet()) { - String pname = me.getKey(); - Object pvalue = me.getValue(); - LinkedHashMap propsDef = ((StatefulEntityType)typeDefinition).getPropertiesDef(); - if(propsDef != null && propsDef.get(pname) != null) { - if(properties.get(pname) != null) { - pvalue = properties.get(name); - } - PropertyDef pd = (PropertyDef)propsDef.get(pname); - Property prop = new Property(pname,pvalue,pd.getSchema(),customDef); - props.add(prop); - } - } - } - ArrayList pds = ((StatefulEntityType)typeDefinition).getPropertiesDefObjects(); - for(PropertyDef p: pds) { - if(p.getDefault() != null && properties.get(p.getName()) == null) { - Property prop = new Property(p.getName(), (LinkedHashMap)p.getDefault(), p.getSchema(), customDef); + } + + @SuppressWarnings({"unchecked", "unused"}) + public ArrayList _createRelationshipProperties() { + ArrayList props = new ArrayList(); + LinkedHashMap properties = new LinkedHashMap(); + LinkedHashMap relationship = (LinkedHashMap) entityTpl.get("relationship"); + + if (relationship == null) { + for (Object val : entityTpl.values()) { + if (val instanceof LinkedHashMap) { + relationship = (LinkedHashMap) ((LinkedHashMap) val).get("relationship"); + break; + } + } + } + + if (relationship != null) { + properties = (LinkedHashMap) ((EntityType) typeDefinition).getValue(PROPERTIES, relationship, false); + } + if (properties == null) { + properties = new LinkedHashMap(); + } + if (properties == null) { + properties = (LinkedHashMap) entityTpl.get(PROPERTIES); + } + if (properties == null) { + properties = new LinkedHashMap(); + } + + if (properties != null) { + for (Map.Entry me : properties.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + LinkedHashMap propsDef = ((StatefulEntityType) typeDefinition).getPropertiesDef(); + if (propsDef != null && propsDef.get(pname) != null) { + if (properties.get(pname) != null) { + pvalue = properties.get(name); + } + PropertyDef pd = (PropertyDef) propsDef.get(pname); + Property prop = new Property(pname, pvalue, pd.getSchema(), customDef); + props.add(prop); + } + } + } + ArrayList pds = ((StatefulEntityType) typeDefinition).getPropertiesDefObjects(); + for (PropertyDef p : pds) { + if (p.getDefault() != null && properties.get(p.getName()) == null) { + Property prop = new Property(p.getName(), (LinkedHashMap) p.getDefault(), p.getSchema(), customDef); props.add(prop); - } - } + } + } return props; - } - + } + public void validate() { - _validateProperties(entityTpl,(StatefulEntityType)typeDefinition); + _validateProperties(entityTpl, (StatefulEntityType) typeDefinition); } - + // getters/setters public NodeTemplate getTarget() { - return target; + return target; } - + public NodeTemplate getSource() { - return source; + return source; } - + public void setSource(NodeTemplate nt) { - source = nt; + source = nt; } - + public void setTarget(NodeTemplate nt) { - target = nt; + target = nt; } - @Override - public String toString() { - return "RelationshipTemplate{" + - "name='" + name + '\'' + - ", target=" + target.getName() + - ", source=" + source.getName() + - ", _properties=" + _properties + - '}'; - } + @Override + public String toString() { + return "RelationshipTemplate{" + + "name='" + name + '\'' + + ", target=" + target.getName() + + ", source=" + source.getName() + + ", _properties=" + _properties + + '}'; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Repository.java b/src/main/java/org/onap/sdc/toscaparser/api/Repository.java index 2fff7f6..ee5e5bc 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Repository.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Repository.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,69 +27,69 @@ import org.onap.sdc.toscaparser.api.utils.UrlUtils; import java.util.LinkedHashMap; public class Repository { - - private static final String DESCRIPTION = "description"; - private static final String URL = "url"; - private static final String CREDENTIAL = "credential"; - private static final String SECTIONS[] ={DESCRIPTION, URL, CREDENTIAL}; - - private String name; - private Object reposit; - private String url; - - @SuppressWarnings("unchecked") - public Repository(String repName,Object repValue) { - name = repName; - reposit = repValue; - if(reposit instanceof LinkedHashMap) { - url = (String)((LinkedHashMap)reposit).get("url"); - if(url == null) { + + private static final String DESCRIPTION = "description"; + private static final String URL = "url"; + private static final String CREDENTIAL = "credential"; + private static final String SECTIONS[] = {DESCRIPTION, URL, CREDENTIAL}; + + private String name; + private Object reposit; + private String url; + + @SuppressWarnings("unchecked") + public Repository(String repName, Object repValue) { + name = repName; + reposit = repValue; + if (reposit instanceof LinkedHashMap) { + url = (String) ((LinkedHashMap) reposit).get("url"); + if (url == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE229", String.format( - "MissingRequiredFieldError: Repository \"%s\" is missing required field \"url\"", - name))); + "MissingRequiredFieldError: Repository \"%s\" is missing required field \"url\"", + name))); } - } - loadAndValidate(name,reposit); - } - - @SuppressWarnings("unchecked") - private void loadAndValidate(String val,Object repositDef) { - String keyname = val; - if(repositDef instanceof LinkedHashMap) { - for(String key: ((LinkedHashMap)reposit).keySet()) { - boolean bFound = false; - for(String sect: SECTIONS) { - if(key.equals(sect)) { - bFound = true; - break; - } - } - if(!bFound) { + } + loadAndValidate(name, reposit); + } + + @SuppressWarnings("unchecked") + private void loadAndValidate(String val, Object repositDef) { + String keyname = val; + if (repositDef instanceof LinkedHashMap) { + for (String key : ((LinkedHashMap) reposit).keySet()) { + boolean bFound = false; + for (String sect : SECTIONS) { + if (key.equals(sect)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE230", String.format( - "UnknownFieldError: repositories \"%s\" contains unknown field \"%s\"", - keyname,key))); - } - } - - String repositUrl = (String)((LinkedHashMap)repositDef).get("url"); - if(repositUrl != null) { - boolean urlVal = UrlUtils.validateUrl(repositUrl); - if(!urlVal) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE231", String.format( - "URLException: repsositories \"%s\" Invalid Url",keyname))); - } - } - } - } - - @Override - public String toString() { - return "Repository{" + - "name='" + name + '\'' + - ", reposit=" + reposit + - ", url='" + url + '\'' + - '}'; - } + "UnknownFieldError: repositories \"%s\" contains unknown field \"%s\"", + keyname, key))); + } + } + + String repositUrl = (String) ((LinkedHashMap) repositDef).get("url"); + if (repositUrl != null) { + boolean urlVal = UrlUtils.validateUrl(repositUrl); + if (!urlVal) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE231", String.format( + "URLException: repsositories \"%s\" Invalid Url", keyname))); + } + } + } + } + + @Override + public String toString() { + return "Repository{" + + "name='" + name + '\'' + + ", reposit=" + reposit + + ", url='" + url + '\'' + + '}'; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java index f980e0c..227b2a9 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,7 +20,6 @@ package org.onap.sdc.toscaparser.api; -import java.util.Map; public class RequirementAssignment { @@ -49,6 +48,7 @@ public class RequirementAssignment { /** * Get the name for requirement assignment. + * * @return the name for requirement assignment. */ public String getName() { @@ -57,6 +57,7 @@ public class RequirementAssignment { /** * Set the name for requirement + * * @param name - the name for requirement to set */ public void setName(String name) { @@ -65,6 +66,7 @@ public class RequirementAssignment { /** * Get the node name for requirement assignment. + * * @return the node name for requirement */ public String getNodeTemplateName() { @@ -73,6 +75,7 @@ public class RequirementAssignment { /** * Set the node name for requirement + * * @param nodeName - the node name for requirement to set */ public void setNodeTemplateName(String nodeName) { @@ -81,6 +84,7 @@ public class RequirementAssignment { /** * Get the capability name for requirement assignment. + * * @return the capability name for requirement */ public String getCapabilityName() { @@ -89,6 +93,7 @@ public class RequirementAssignment { /** * Set the capability name for requirement assignment. + * * @param capabilityName - the capability name for requirement to set */ public void setCapabilityName(String capabilityName) { @@ -97,6 +102,7 @@ public class RequirementAssignment { /** * Get the relationship object for requirement + * * @return the relationship object for requirement */ public Object getRelationship() { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java index 1425f6c..2ba6230 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,6 +35,7 @@ public class RequirementAssignments { /** * Get all requirement assignments for Node Template.
* This object can be either the original one, holding all requirement assignments for this node template,or a filtered one, holding a filtered subset.
+ * * @return list of requirement assignments for the node template.
* If there are no requirement assignments, empty list is returned. */ @@ -44,6 +45,7 @@ public class RequirementAssignments { /** * Filter requirement assignments by requirement name. + * * @param reqName - The name of requirement * @return RequirementAssignments object, containing requirement assignments of this type.
* If no such found, filtering will result in an empty collection. diff --git a/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java b/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java index 1dec80a..a622a9a 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,55 +39,55 @@ public class SubstitutionMappings { // SubstitutionMappings exports the topology template as an // implementation of a Node type. - private static final String NODE_TYPE = "node_type"; - private static final String REQUIREMENTS = "requirements"; - private static final String CAPABILITIES = "capabilities"; - - private static final String SECTIONS[] = {NODE_TYPE, REQUIREMENTS, CAPABILITIES}; - - private static final String OPTIONAL_OUTPUTS[] = {"tosca_id", "tosca_name", "state"}; - - private LinkedHashMap subMappingDef; - private ArrayList nodetemplates; - private ArrayList inputs; - private ArrayList outputs; - private ArrayList groups; - private NodeTemplate subMappedNodeTemplate; - private LinkedHashMap customDefs; - private LinkedHashMap _capabilities; - private LinkedHashMap _requirements; - - public SubstitutionMappings(LinkedHashMap smsubMappingDef, - ArrayList smnodetemplates, - ArrayList sminputs, - ArrayList smoutputs, - ArrayList smgroups, - NodeTemplate smsubMappedNodeTemplate, - LinkedHashMap smcustomDefs) { - + private static final String NODE_TYPE = "node_type"; + private static final String REQUIREMENTS = "requirements"; + private static final String CAPABILITIES = "capabilities"; + + private static final String SECTIONS[] = {NODE_TYPE, REQUIREMENTS, CAPABILITIES}; + + private static final String OPTIONAL_OUTPUTS[] = {"tosca_id", "tosca_name", "state"}; + + private LinkedHashMap subMappingDef; + private ArrayList nodetemplates; + private ArrayList inputs; + private ArrayList outputs; + private ArrayList groups; + private NodeTemplate subMappedNodeTemplate; + private LinkedHashMap customDefs; + private LinkedHashMap _capabilities; + private LinkedHashMap _requirements; + + public SubstitutionMappings(LinkedHashMap smsubMappingDef, + ArrayList smnodetemplates, + ArrayList sminputs, + ArrayList smoutputs, + ArrayList smgroups, + NodeTemplate smsubMappedNodeTemplate, + LinkedHashMap smcustomDefs) { + subMappingDef = smsubMappingDef; nodetemplates = smnodetemplates; inputs = sminputs != null ? sminputs : new ArrayList(); outputs = smoutputs != null ? smoutputs : new ArrayList(); groups = smgroups != null ? smgroups : new ArrayList(); subMappedNodeTemplate = smsubMappedNodeTemplate; - customDefs = smcustomDefs != null ? smcustomDefs : new LinkedHashMap(); + customDefs = smcustomDefs != null ? smcustomDefs : new LinkedHashMap(); _validate(); _capabilities = null; _requirements = null; - } - - public String getType() { - if(subMappingDef != null) { - return (String)subMappingDef.get(NODE_TYPE); - } - return null; - } - - public ArrayList getNodeTemplates() { - return nodetemplates; - } + } + + public String getType() { + if (subMappingDef != null) { + return (String) subMappingDef.get(NODE_TYPE); + } + return null; + } + + public ArrayList getNodeTemplates() { + return nodetemplates; + } /* @classmethod @@ -95,39 +95,39 @@ public class SubstitutionMappings { if isinstance(sub_mapping_def, dict): return sub_mapping_def.get(cls.NODE_TYPE) */ - - public static String stGetNodeType(LinkedHashMap _subMappingDef) { - if(_subMappingDef instanceof LinkedHashMap) { - return (String)_subMappingDef.get(NODE_TYPE); - } - return null; - } - - public String getNodeType() { - return (String)subMappingDef.get(NODE_TYPE); - } - - public ArrayList getInputs() { - return inputs; - } - - public ArrayList getGroups() { - return groups; - } - - public LinkedHashMap getCapabilities() { - return (LinkedHashMap)subMappingDef.get(CAPABILITIES); - } - - public LinkedHashMap getRequirements() { - return (LinkedHashMap)subMappingDef.get(REQUIREMENTS); - } - - public NodeType getNodeDefinition() { - return new NodeType(getNodeType(), customDefs); - } - - private void _validate() { + + public static String stGetNodeType(LinkedHashMap _subMappingDef) { + if (_subMappingDef instanceof LinkedHashMap) { + return (String) _subMappingDef.get(NODE_TYPE); + } + return null; + } + + public String getNodeType() { + return (String) subMappingDef.get(NODE_TYPE); + } + + public ArrayList getInputs() { + return inputs; + } + + public ArrayList getGroups() { + return groups; + } + + public LinkedHashMap getCapabilities() { + return (LinkedHashMap) subMappingDef.get(CAPABILITIES); + } + + public LinkedHashMap getRequirements() { + return (LinkedHashMap) subMappingDef.get(REQUIREMENTS); + } + + public NodeType getNodeDefinition() { + return new NodeType(getNodeType(), customDefs); + } + + private void _validate() { // Basic validation _validateKeys(); _validateType(); @@ -137,149 +137,149 @@ public class SubstitutionMappings { _validateCapabilities(); _validateRequirements(); _validateOutputs(); - } - - private void _validateKeys() { - // validate the keys of substitution mappings - for(String key: subMappingDef.keySet()) { - boolean bFound = false; - for(String s: SECTIONS) { - if(s.equals(key)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE232", String.format( - "UnknownFieldError: SubstitutionMappings contain unknown field \"%s\"", - key))); - } - } - } - - private void _validateType() { + } + + private void _validateKeys() { + // validate the keys of substitution mappings + for (String key : subMappingDef.keySet()) { + boolean bFound = false; + for (String s : SECTIONS) { + if (s.equals(key)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE232", String.format( + "UnknownFieldError: SubstitutionMappings contain unknown field \"%s\"", + key))); + } + } + } + + private void _validateType() { // validate the node_type of substitution mappings - String nodeType = (String)subMappingDef.get(NODE_TYPE); - if(nodeType == null) { + String nodeType = (String) subMappingDef.get(NODE_TYPE); + if (nodeType == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE233", String.format( - "MissingRequiredFieldError: SubstitutionMappings used in topology_template is missing required field \"%s\"", - NODE_TYPE))); + "MissingRequiredFieldError: SubstitutionMappings used in topology_template is missing required field \"%s\"", + NODE_TYPE))); } Object nodeTypeDef = customDefs.get(nodeType); - if(nodeTypeDef == null) { + if (nodeTypeDef == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE234", String.format( - "InvalidNodeTypeError: \"%s\" is invalid",nodeType))); + "InvalidNodeTypeError: \"%s\" is invalid", nodeType))); } - } + } - private void _validateInputs() { + private void _validateInputs() { // validate the inputs of substitution mappings. // The inputs defined by the topology template have to match the // properties of the node type or the substituted node. If there are // more inputs than the substituted node has properties, default values //must be defined for those inputs. - - HashSet allInputs = new HashSet<>(); - for(Input inp: inputs) { - allInputs.add(inp.getName()); - } - HashSet requiredProperties = new HashSet<>(); - for(PropertyDef pd: getNodeDefinition().getPropertiesDefObjects()) { - if(pd.isRequired() && pd.getDefault() == null) { - requiredProperties.add(pd.getName()); - } - } + + HashSet allInputs = new HashSet<>(); + for (Input inp : inputs) { + allInputs.add(inp.getName()); + } + HashSet requiredProperties = new HashSet<>(); + for (PropertyDef pd : getNodeDefinition().getPropertiesDefObjects()) { + if (pd.isRequired() && pd.getDefault() == null) { + requiredProperties.add(pd.getName()); + } + } // Must provide inputs for required properties of node type. - for(String property: requiredProperties) { + for (String property : requiredProperties) { // Check property which is 'required' and has no 'default' value - if(!allInputs.contains(property)) { + if (!allInputs.contains(property)) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE235", String.format( - "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", - getNodeType(),property))); + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", + getNodeType(), property))); } } // If the optional properties of node type need to be customized by // substituted node, it also is necessary to define inputs for them, // otherwise they are not mandatory to be defined. - HashSet customizedParameters = new HashSet<>(); - if(subMappedNodeTemplate != null) { - customizedParameters.addAll(subMappedNodeTemplate.getProperties().keySet()); - } - HashSet allProperties = new HashSet( - getNodeDefinition().getPropertiesDef().keySet()); - HashSet diffset = customizedParameters; - diffset.removeAll(allInputs); - for(String parameter: diffset) { - if(allProperties.contains(parameter)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE236", String.format( - "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", - getNodeType(),parameter))); - } - } - // Additional inputs are not in the properties of node type must - // provide default values. Currently the scenario may not happen - // because of parameters validation in nodetemplate, here is a - // guarantee. - for(Input inp: inputs) { - diffset = allInputs; - diffset.removeAll(allProperties); - if(diffset.contains(inp.getName()) && inp.getDefault() == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE237", String.format( - "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing rquired input \"%s\"", - getNodeType(),inp.getName()))); - } - } - } - - private void _validateCapabilities() { + HashSet customizedParameters = new HashSet<>(); + if (subMappedNodeTemplate != null) { + customizedParameters.addAll(subMappedNodeTemplate.getProperties().keySet()); + } + HashSet allProperties = new HashSet( + getNodeDefinition().getPropertiesDef().keySet()); + HashSet diffset = customizedParameters; + diffset.removeAll(allInputs); + for (String parameter : diffset) { + if (allProperties.contains(parameter)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE236", String.format( + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", + getNodeType(), parameter))); + } + } + // Additional inputs are not in the properties of node type must + // provide default values. Currently the scenario may not happen + // because of parameters validation in nodetemplate, here is a + // guarantee. + for (Input inp : inputs) { + diffset = allInputs; + diffset.removeAll(allProperties); + if (diffset.contains(inp.getName()) && inp.getDefault() == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE237", String.format( + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing rquired input \"%s\"", + getNodeType(), inp.getName()))); + } + } + } + + private void _validateCapabilities() { // validate the capabilities of substitution mappings // The capabilities must be in node template which be mapped. - LinkedHashMap tplsCapabilities = - (LinkedHashMap)subMappingDef.get(CAPABILITIES); - List nodeCapabilities = null; - if(subMappedNodeTemplate != null) { - nodeCapabilities = subMappedNodeTemplate.getCapabilities().getAll(); - } - if(nodeCapabilities != null) { - for(CapabilityAssignment cap: nodeCapabilities) { - if(tplsCapabilities != null && tplsCapabilities.get(cap.getName()) == null) { - ; //pass - // ValidationIssueCollector.appendException( - // UnknownFieldError(what='SubstitutionMappings', - // field=cap)) - } - } - } - } - - private void _validateRequirements() { + LinkedHashMap tplsCapabilities = + (LinkedHashMap) subMappingDef.get(CAPABILITIES); + List nodeCapabilities = null; + if (subMappedNodeTemplate != null) { + nodeCapabilities = subMappedNodeTemplate.getCapabilities().getAll(); + } + if (nodeCapabilities != null) { + for (CapabilityAssignment cap : nodeCapabilities) { + if (tplsCapabilities != null && tplsCapabilities.get(cap.getName()) == null) { + ; //pass + // ValidationIssueCollector.appendException( + // UnknownFieldError(what='SubstitutionMappings', + // field=cap)) + } + } + } + } + + private void _validateRequirements() { // validate the requirements of substitution mappings - //***************************************************** - //TO-DO - Different from Python code!! one is a bug... - //***************************************************** + //***************************************************** + //TO-DO - Different from Python code!! one is a bug... + //***************************************************** // The requirements must be in node template which be mapped. - LinkedHashMap tplsRequirements = - (LinkedHashMap)subMappingDef.get(REQUIREMENTS); - List nodeRequirements = null; - if(subMappedNodeTemplate != null) { - nodeRequirements = subMappedNodeTemplate.getRequirements().getAll(); - } - if(nodeRequirements != null) { - for(RequirementAssignment ro: nodeRequirements) { - String cap = ro.getName(); - if(tplsRequirements != null && tplsRequirements.get(cap) == null) { - ; //pass - // ValidationIssueCollector.appendException( - // UnknownFieldError(what='SubstitutionMappings', - // field=cap)) - } - } - } - } - - private void _validateOutputs() { + LinkedHashMap tplsRequirements = + (LinkedHashMap) subMappingDef.get(REQUIREMENTS); + List nodeRequirements = null; + if (subMappedNodeTemplate != null) { + nodeRequirements = subMappedNodeTemplate.getRequirements().getAll(); + } + if (nodeRequirements != null) { + for (RequirementAssignment ro : nodeRequirements) { + String cap = ro.getName(); + if (tplsRequirements != null && tplsRequirements.get(cap) == null) { + ; //pass + // ValidationIssueCollector.appendException( + // UnknownFieldError(what='SubstitutionMappings', + // field=cap)) + } + } + } + } + + private void _validateOutputs() { // validate the outputs of substitution mappings. // The outputs defined by the topology template have to match the @@ -292,46 +292,46 @@ public class SubstitutionMappings { // attributes of the node type according to the specification, but // it's reasonable that there are more inputs than the node type // has properties, the specification will be amended? - - for(Output output: outputs) { - Object ado = getNodeDefinition().getAttributesDef(); - if(ado != null && ((LinkedHashMap)ado).get(output.getName()) == null) { + + for (Output output : outputs) { + Object ado = getNodeDefinition().getAttributesDef(); + if (ado != null && ((LinkedHashMap) ado).get(output.getName()) == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE238", String.format( - "UnknownOutputError: Unknown output \"%s\" in SubstitutionMappings with node_type \"%s\"", - output.getName(),getNodeType()))); - } + "UnknownOutputError: Unknown output \"%s\" in SubstitutionMappings with node_type \"%s\"", + output.getName(), getNodeType()))); + } } - } + } - @Override - public String toString() { - return "SubstitutionMappings{" + + @Override + public String toString() { + return "SubstitutionMappings{" + // "subMappingDef=" + subMappingDef + // ", nodetemplates=" + nodetemplates + // ", inputs=" + inputs + // ", outputs=" + outputs + // ", groups=" + groups + - ", subMappedNodeTemplate=" + (subMappedNodeTemplate==null?"":subMappedNodeTemplate.getName()) + + ", subMappedNodeTemplate=" + (subMappedNodeTemplate == null ? "" : subMappedNodeTemplate.getName()) + // ", customDefs=" + customDefs + // ", _capabilities=" + _capabilities + // ", _requirements=" + _requirements + - '}'; - } - - @Deprecated - public String toLimitedString() { - return "SubstitutionMappings{" + - "subMappingDef=" + subMappingDef + - ", nodetemplates=" + nodetemplates + - ", inputs=" + inputs + - ", outputs=" + outputs + - ", groups=" + groups + - ", subMappedNodeTemplate=" + (subMappedNodeTemplate==null?"":subMappedNodeTemplate.getName()) + - ", customDefs=" + customDefs + - ", _capabilities=" + _capabilities + - ", _requirements=" + _requirements + - '}'; - } + '}'; + } + + @Deprecated + public String toLimitedString() { + return "SubstitutionMappings{" + + "subMappingDef=" + subMappingDef + + ", nodetemplates=" + nodetemplates + + ", inputs=" + inputs + + ", outputs=" + outputs + + ", groups=" + groups + + ", subMappedNodeTemplate=" + (subMappedNodeTemplate == null ? "" : subMappedNodeTemplate.getName()) + + ", customDefs=" + customDefs + + ", _capabilities=" + _capabilities + + ", _requirements=" + _requirements + + '}'; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java index 4c4afd3..2160527 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -32,528 +32,522 @@ import org.onap.sdc.toscaparser.api.parameters.Output; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; -import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; public class TopologyTemplate { - private static final String DESCRIPTION = "description"; - private static final String INPUTS = "inputs"; - private static final String NODE_TEMPLATES = "node_templates"; - private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; - private static final String OUTPUTS = "outputs"; - private static final String GROUPS = "groups"; - private static final String SUBSTITUTION_MAPPINGS = "substitution_mappings"; - private static final String POLICIES = "policies"; - private static final String METADATA = "metadata"; - - private static String SECTIONS[] = { - DESCRIPTION, INPUTS, NODE_TEMPLATES, RELATIONSHIP_TEMPLATES, - OUTPUTS, GROUPS, SUBSTITUTION_MAPPINGS, POLICIES, METADATA - }; - - private LinkedHashMap tpl; - LinkedHashMap metaData; + private static final String DESCRIPTION = "description"; + private static final String INPUTS = "inputs"; + private static final String NODE_TEMPLATES = "node_templates"; + private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; + private static final String OUTPUTS = "outputs"; + private static final String GROUPS = "groups"; + private static final String SUBSTITUTION_MAPPINGS = "substitution_mappings"; + private static final String POLICIES = "policies"; + private static final String METADATA = "metadata"; + + private static String[] SECTIONS = { + DESCRIPTION, INPUTS, NODE_TEMPLATES, RELATIONSHIP_TEMPLATES, + OUTPUTS, GROUPS, SUBSTITUTION_MAPPINGS, POLICIES, METADATA + }; + + private LinkedHashMap tpl; + LinkedHashMap metaData; private ArrayList inputs; private ArrayList outputs; private ArrayList relationshipTemplates; private ArrayList nodeTemplates; - private LinkedHashMap customDefs; - private LinkedHashMap relTypes;//TYPE + private LinkedHashMap customDefs; + private LinkedHashMap relTypes;//TYPE private NodeTemplate subMappedNodeTemplate; private ArrayList groups; private ArrayList policies; - private LinkedHashMap parsedParams = null;//TYPE + private LinkedHashMap parsedParams = null;//TYPE private String description; private ToscaGraph graph; private SubstitutionMappings substitutionMappings; - private boolean resolveGetInput; - - public TopologyTemplate( - LinkedHashMap _template, - LinkedHashMap _customDefs, - LinkedHashMap _relTypes,//TYPE + private boolean resolveGetInput; + + public TopologyTemplate( + LinkedHashMap _template, + LinkedHashMap _customDefs, + LinkedHashMap _relTypes,//TYPE LinkedHashMap _parsedParams, - NodeTemplate _subMappedNodeTemplate, - boolean _resolveGetInput) { - - tpl = _template; - if(tpl != null) { - subMappedNodeTemplate = _subMappedNodeTemplate; - metaData = _metaData(); - customDefs = _customDefs; - relTypes = _relTypes; - parsedParams = _parsedParams; - resolveGetInput = _resolveGetInput; - _validateField(); - description = _tplDescription(); - inputs = _inputs(); - relationshipTemplates =_relationshipTemplates(); - //todo: pass subMappedNodeTemplate to ET constractor - nodeTemplates = _nodeTemplates(); - outputs = _outputs(); - if(nodeTemplates != null) { - graph = new ToscaGraph(nodeTemplates); - } - groups = _groups(); - policies = _policies(); - _processIntrinsicFunctions(); - substitutionMappings = _substitutionMappings(); - } - } - - @SuppressWarnings("unchecked") - private ArrayList _inputs() { - //DumpUtils.dumpYaml(customDefs,0); - ArrayList alInputs = new ArrayList<>(); - for(String name: _tplInputs().keySet()) { - Object attrs = _tplInputs().get(name); - Input input = new Input(name,(LinkedHashMap)attrs,customDefs); - if(parsedParams != null && parsedParams.get(name) != null) { - input.validate(parsedParams.get(name)); + NodeTemplate _subMappedNodeTemplate, + boolean _resolveGetInput) { + + tpl = _template; + if (tpl != null) { + subMappedNodeTemplate = _subMappedNodeTemplate; + metaData = _metaData(); + customDefs = _customDefs; + relTypes = _relTypes; + parsedParams = _parsedParams; + resolveGetInput = _resolveGetInput; + _validateField(); + description = _tplDescription(); + inputs = _inputs(); + relationshipTemplates = _relationshipTemplates(); + //todo: pass subMappedNodeTemplate to ET constractor + nodeTemplates = _nodeTemplates(); + outputs = _outputs(); + if (nodeTemplates != null) { + graph = new ToscaGraph(nodeTemplates); } - else { + groups = _groups(); + policies = _policies(); + _processIntrinsicFunctions(); + substitutionMappings = _substitutionMappings(); + } + } + + @SuppressWarnings("unchecked") + private ArrayList _inputs() { + ArrayList alInputs = new ArrayList<>(); + for (String name : _tplInputs().keySet()) { + Object attrs = _tplInputs().get(name); + Input input = new Input(name, (LinkedHashMap) attrs, customDefs); + if (parsedParams != null && parsedParams.get(name) != null) { + input.validate(parsedParams.get(name)); + } else { Object _default = input.getDefault(); - if(_default != null) { + if (_default != null) { input.validate(_default); } } - if((parsedParams != null && parsedParams.get(input.getName()) == null || parsedParams == null) - && input.isRequired() && input.getDefault() == null) { - System.out.format("Log warning: The required parameter \"%s\" is not provided\n",input.getName()); + if ((parsedParams != null && parsedParams.get(input.getName()) == null || parsedParams == null) + && input.isRequired() && input.getDefault() == null) { + System.out.format("Log warning: The required parameter \"%s\" is not provided\n", input.getName()); } alInputs.add(input); - } + } return alInputs; - - } - private LinkedHashMap _metaData() { - if(tpl.get(METADATA) != null) { - return (LinkedHashMap)tpl.get(METADATA); + } + + private LinkedHashMap _metaData() { + if (tpl.get(METADATA) != null) { + return (LinkedHashMap) tpl.get(METADATA); + } else { + return new LinkedHashMap(); + } + + } + + private ArrayList _nodeTemplates() { + ArrayList alNodeTemplates = new ArrayList<>(); + LinkedHashMap tpls = _tplNodeTemplates(); + if (tpls != null) { + for (String name : tpls.keySet()) { + NodeTemplate tpl = new NodeTemplate(name, + tpls, + customDefs, + relationshipTemplates, + relTypes, + subMappedNodeTemplate); + if (tpl.getTypeDefinition() != null) { + boolean b = NodeType.TOSCA_DEF.get(tpl.getType()) != null; + if (b || (tpl.getCustomDef() != null && !tpl.getCustomDef().isEmpty())) { + tpl.validate(); + alNodeTemplates.add(tpl); + } + } + } + } + return alNodeTemplates; + } + + @SuppressWarnings("unchecked") + private ArrayList _relationshipTemplates() { + ArrayList alRelationshipTemplates = new ArrayList<>(); + LinkedHashMap tpls = _tplRelationshipTemplates(); + if (tpls != null) { + for (String name : tpls.keySet()) { + RelationshipTemplate tpl = new RelationshipTemplate( + (LinkedHashMap) tpls.get(name), name, customDefs, null, null, subMappedNodeTemplate); + + alRelationshipTemplates.add(tpl); + } } - else { - return new LinkedHashMap(); + return alRelationshipTemplates; + } + + private ArrayList _outputs() { + ArrayList alOutputs = new ArrayList<>(); + for (Map.Entry me : _tplOutputs().entrySet()) { + String oname = me.getKey(); + LinkedHashMap oattrs = (LinkedHashMap) me.getValue(); + Output o = new Output(oname, oattrs); + o.validate(); + alOutputs.add(o); + } + return alOutputs; + } + + private SubstitutionMappings _substitutionMappings() { + LinkedHashMap tplSubstitutionMapping = (LinkedHashMap) _tplSubstitutionMappings(); + + //*** the commenting-out below and the weaker condition are in the Python source + // #if tpl_substitution_mapping and self.sub_mapped_node_template: + if (tplSubstitutionMapping != null && tplSubstitutionMapping.size() > 0) { + return new SubstitutionMappings(tplSubstitutionMapping, + nodeTemplates, + inputs, + outputs, + groups, + subMappedNodeTemplate, + customDefs); + } + return null; + + } + + @SuppressWarnings("unchecked") + private ArrayList _policies() { + ArrayList alPolicies = new ArrayList<>(); + for (Map.Entry me : _tplPolicies().entrySet()) { + String policyName = me.getKey(); + LinkedHashMap policyTpl = (LinkedHashMap) me.getValue(); + ArrayList targetList = (ArrayList) policyTpl.get("targets"); + ArrayList targetNodes = new ArrayList<>(); + ArrayList targetObjects = new ArrayList<>(); + ArrayList targetGroups = new ArrayList<>(); + String targetsType = "groups"; + if (targetList != null && targetList.size() >= 1) { + targetGroups = _getPolicyGroups(targetList); + if (targetGroups == null || targetGroups.isEmpty()) { + targetsType = "node_templates"; + targetNodes = _getGroupMembers(targetList); + for (NodeTemplate nt : targetNodes) { + targetObjects.add(nt); + } + } else { + for (Group gr : targetGroups) { + targetObjects.add(gr); + } + } + } + Policy policyObj = new Policy(policyName, + policyTpl, + targetObjects, + targetsType, + customDefs, + subMappedNodeTemplate); + alPolicies.add(policyObj); } - - } - - private ArrayList _nodeTemplates() { - ArrayList alNodeTemplates = new ArrayList<>(); - LinkedHashMap tpls = _tplNodeTemplates(); - if(tpls != null) { - for(String name: tpls.keySet()) { - NodeTemplate tpl = new NodeTemplate(name, - tpls, - customDefs, - relationshipTemplates, - relTypes, - subMappedNodeTemplate); - if(tpl.getTypeDefinition() != null) { - boolean b = NodeType.TOSCA_DEF.get(tpl.getType()) != null; - if(b || (tpl.getCustomDef() != null && !tpl.getCustomDef().isEmpty())) { - tpl.validate(); - alNodeTemplates.add(tpl); - } - } - } - } - return alNodeTemplates; - } - - @SuppressWarnings("unchecked") - private ArrayList _relationshipTemplates() { - ArrayList alRelationshipTemplates = new ArrayList<>(); - LinkedHashMap tpls = _tplRelationshipTemplates(); - if(tpls != null) { - for(String name: tpls.keySet()) { - RelationshipTemplate tpl = new RelationshipTemplate( - (LinkedHashMap)tpls.get(name),name,customDefs,null,null, subMappedNodeTemplate); - - alRelationshipTemplates.add(tpl); - } - } - return alRelationshipTemplates; - } - - private ArrayList _outputs() { - ArrayList alOutputs = new ArrayList<>(); - for(Map.Entry me: _tplOutputs().entrySet()) { - String oname = me.getKey(); - LinkedHashMap oattrs = (LinkedHashMap)me.getValue(); - Output o = new Output(oname,oattrs); - o.validate(); - alOutputs.add(o); - } - return alOutputs; - } - - private SubstitutionMappings _substitutionMappings() { - LinkedHashMap tplSubstitutionMapping = (LinkedHashMap) _tplSubstitutionMappings(); - - //*** the commenting-out below and the weaker condition are in the Python source - // #if tpl_substitution_mapping and self.sub_mapped_node_template: - if(tplSubstitutionMapping != null && tplSubstitutionMapping.size() > 0) { - return new SubstitutionMappings(tplSubstitutionMapping, - nodeTemplates, - inputs, - outputs, - groups, - subMappedNodeTemplate, - customDefs); - } - return null; - - } - - @SuppressWarnings("unchecked") - private ArrayList _policies() { - ArrayList alPolicies = new ArrayList<>(); - for(Map.Entry me: _tplPolicies().entrySet()) { - String policyName = me.getKey(); - LinkedHashMap policyTpl = (LinkedHashMap)me.getValue(); - ArrayList targetList = (ArrayList)policyTpl.get("targets"); - ArrayList targetNodes = new ArrayList<>(); - ArrayList targetObjects = new ArrayList<>(); - ArrayList targetGroups = new ArrayList<>(); - String targetsType = "groups"; - if(targetList != null && targetList.size() >= 1) { - targetGroups = _getPolicyGroups(targetList); - if(targetGroups == null || targetGroups.isEmpty()) { - targetsType = "node_templates"; - targetNodes = _getGroupMembers(targetList); - for(NodeTemplate nt: targetNodes) { - targetObjects.add(nt); - } - } - else { - for(Group gr: targetGroups) { - targetObjects.add(gr); - } - } - } - Policy policyObj = new Policy(policyName, - policyTpl, - targetObjects, - targetsType, - customDefs, - subMappedNodeTemplate); - alPolicies.add(policyObj); - } return alPolicies; - } - - private ArrayList _groups() { - ArrayList groups = new ArrayList<>(); - ArrayList memberNodes = null; - for(Map.Entry me: _tplGroups().entrySet()) { - String groupName = me.getKey(); - LinkedHashMap groupTpl = (LinkedHashMap)me.getValue(); - ArrayList memberNames = (ArrayList)groupTpl.get("members"); - if(memberNames != null) { - DataEntity.validateDatatype("list", memberNames,null,null,null); - if(memberNames.size() < 1 || - (new HashSet(memberNames)).size() != memberNames.size()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE005",String.format( + } + + private ArrayList _groups() { + ArrayList groups = new ArrayList<>(); + ArrayList memberNodes = null; + for (Map.Entry me : _tplGroups().entrySet()) { + String groupName = me.getKey(); + LinkedHashMap groupTpl = (LinkedHashMap) me.getValue(); + ArrayList memberNames = (ArrayList) groupTpl.get("members"); + if (memberNames != null) { + DataEntity.validateDatatype("list", memberNames, null, null, null); + if (memberNames.size() < 1 || + (new HashSet(memberNames)).size() != memberNames.size()) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE005", String.format( "InvalidGroupTargetException: Member nodes \"%s\" should be >= 1 and not repeated", memberNames.toString()))); - } - else { - memberNodes = _getGroupMembers(memberNames); - } - } + } else { + memberNodes = _getGroupMembers(memberNames); + } + } Group group = new Group(groupName, - groupTpl, - memberNodes, - customDefs, subMappedNodeTemplate); + groupTpl, + memberNodes, + customDefs, subMappedNodeTemplate); groups.add(group); - } - return groups; - } - - private ArrayList _getGroupMembers(ArrayList memberNames) { - ArrayList memberNodes = new ArrayList<>(); - _validateGroupMembers(memberNames); - for(String member: memberNames) { - for(NodeTemplate node: nodeTemplates) { - if(member.equals(node.getName())) { - memberNodes.add(node); - } - } - } - return memberNodes; - } - - private ArrayList _getPolicyGroups(ArrayList memberNames) { - ArrayList memberGroups = new ArrayList<>(); - for(String member: memberNames) { - for(Group group: groups) { - if(member.equals(group.getName())) { - memberGroups.add(group); - } - } - } - return memberGroups; - } - - private void _validateGroupMembers(ArrayList members) { - ArrayList nodeNames = new ArrayList<>(); - for(NodeTemplate node: nodeTemplates) { - nodeNames.add(node.getName()); - } - for(String member: members) { - if(!nodeNames.contains(member)) { + } + return groups; + } + + private ArrayList _getGroupMembers(ArrayList memberNames) { + ArrayList memberNodes = new ArrayList<>(); + _validateGroupMembers(memberNames); + for (String member : memberNames) { + for (NodeTemplate node : nodeTemplates) { + if (member.equals(node.getName())) { + memberNodes.add(node); + } + } + } + return memberNodes; + } + + private ArrayList _getPolicyGroups(ArrayList memberNames) { + ArrayList memberGroups = new ArrayList<>(); + for (String member : memberNames) { + for (Group group : groups) { + if (member.equals(group.getName())) { + memberGroups.add(group); + } + } + } + return memberGroups; + } + + private void _validateGroupMembers(ArrayList members) { + ArrayList nodeNames = new ArrayList<>(); + for (NodeTemplate node : nodeTemplates) { + nodeNames.add(node.getName()); + } + for (String member : members) { + if (!nodeNames.contains(member)) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE239", String.format( - "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"",member))); - } - } - } - - // topology template can act like node template - // it is exposed by substitution_mappings. - - public String nodetype() { - return substitutionMappings.getNodeType(); - } - - public LinkedHashMap capabilities() { - return substitutionMappings.getCapabilities(); - } - - public LinkedHashMap requirements() { - return substitutionMappings.getRequirements(); - } - - private String _tplDescription() { - return (String)tpl.get(DESCRIPTION); + "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"", member))); + } + } + } + + // topology template can act like node template + // it is exposed by substitution_mappings. + + public String nodetype() { + return substitutionMappings.getNodeType(); + } + + public LinkedHashMap capabilities() { + return substitutionMappings.getCapabilities(); + } + + public LinkedHashMap requirements() { + return substitutionMappings.getRequirements(); + } + + private String _tplDescription() { + return (String) tpl.get(DESCRIPTION); //if description: // return description.rstrip() - } + } - @SuppressWarnings("unchecked") - private LinkedHashMap _tplInputs() { - if(tpl.get(INPUTS) != null) { - return (LinkedHashMap)tpl.get(INPUTS); + @SuppressWarnings("unchecked") + private LinkedHashMap _tplInputs() { + if (tpl.get(INPUTS) != null) { + return (LinkedHashMap) tpl.get(INPUTS); } - return new LinkedHashMap(); + return new LinkedHashMap(); } @SuppressWarnings("unchecked") - private LinkedHashMap _tplNodeTemplates() { - return (LinkedHashMap)tpl.get(NODE_TEMPLATES); + private LinkedHashMap _tplNodeTemplates() { + return (LinkedHashMap) tpl.get(NODE_TEMPLATES); } @SuppressWarnings("unchecked") - private LinkedHashMap _tplRelationshipTemplates() { - if(tpl.get(RELATIONSHIP_TEMPLATES) != null) { - return (LinkedHashMap)tpl.get(RELATIONSHIP_TEMPLATES); + private LinkedHashMap _tplRelationshipTemplates() { + if (tpl.get(RELATIONSHIP_TEMPLATES) != null) { + return (LinkedHashMap) tpl.get(RELATIONSHIP_TEMPLATES); } - return new LinkedHashMap(); + return new LinkedHashMap(); } @SuppressWarnings("unchecked") - private LinkedHashMap _tplOutputs() { - if(tpl.get(OUTPUTS) != null) { - return (LinkedHashMap)tpl.get(OUTPUTS); - } - return new LinkedHashMap(); - } + private LinkedHashMap _tplOutputs() { + if (tpl.get(OUTPUTS) != null) { + return (LinkedHashMap) tpl.get(OUTPUTS); + } + return new LinkedHashMap(); + } @SuppressWarnings("unchecked") - private LinkedHashMap _tplSubstitutionMappings() { - if(tpl.get(SUBSTITUTION_MAPPINGS) != null) { - return (LinkedHashMap)tpl.get(SUBSTITUTION_MAPPINGS); + private LinkedHashMap _tplSubstitutionMappings() { + if (tpl.get(SUBSTITUTION_MAPPINGS) != null) { + return (LinkedHashMap) tpl.get(SUBSTITUTION_MAPPINGS); } - return new LinkedHashMap(); + return new LinkedHashMap(); } @SuppressWarnings("unchecked") - private LinkedHashMap _tplGroups() { - if(tpl.get(GROUPS) != null) { - return (LinkedHashMap)tpl.get(GROUPS); + private LinkedHashMap _tplGroups() { + if (tpl.get(GROUPS) != null) { + return (LinkedHashMap) tpl.get(GROUPS); } - return new LinkedHashMap(); + return new LinkedHashMap(); } @SuppressWarnings("unchecked") - private LinkedHashMap _tplPolicies() { - if(tpl.get(POLICIES) != null) { - return (LinkedHashMap)tpl.get(POLICIES); + private LinkedHashMap _tplPolicies() { + if (tpl.get(POLICIES) != null) { + return (LinkedHashMap) tpl.get(POLICIES); } - return new LinkedHashMap<>(); + return new LinkedHashMap<>(); } private void _validateField() { - for(String name: tpl.keySet()) { - boolean bFound = false; - for(String section: SECTIONS) { - if(name.equals(section)) { - bFound = true; - break; - } - } - if(!bFound) { + for (String name : tpl.keySet()) { + boolean bFound = false; + for (String section : SECTIONS) { + if (name.equals(section)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE240", String.format( - "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"",name))); - } - } + "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"", name))); + } + } } @SuppressWarnings("unchecked") - private void _processIntrinsicFunctions() { + private void _processIntrinsicFunctions() { // Process intrinsic functions // Current implementation processes functions within node template // properties, requirements, interfaces inputs and template outputs. - - if(nodeTemplates != null) { - for(NodeTemplate nt: nodeTemplates) { - for(Property prop: nt.getPropertiesObjects()) { - prop.setValue(Function.getFunction(this,nt,prop.getValue(), resolveGetInput)); - } - for(InterfacesDef ifd: nt.getInterfaces()) { - LinkedHashMap ifin = ifd.getInputs(); - if(ifin != null) { - for(Map.Entry me: ifin.entrySet()) { - String name = me.getKey(); - Object value = Function.getFunction(this,nt,me.getValue(), resolveGetInput); - ifd.setInput(name,value); - } - } - } - if(nt.getRequirements() != null) { - for(RequirementAssignment req: nt.getRequirements().getAll()) { - LinkedHashMap rel; - Object t = req.getRelationship(); - // it can be a string or a LHM... - if(t instanceof LinkedHashMap) { - rel = (LinkedHashMap)t; - } - else { - // we set it to null to fail the next test - // and avoid the get("proprties") - rel = null; - } - - if(rel != null && rel.get("properties") != null) { - LinkedHashMap relprops = - (LinkedHashMap)rel.get("properties"); - for(String key: relprops.keySet()) { - Object value = relprops.get(key); - Object func = Function.getFunction(this,req,value, resolveGetInput); - relprops.put(key,func); - } - } - } - } - if(nt.getCapabilitiesObjects() != null) { - for(CapabilityAssignment cap: nt.getCapabilitiesObjects()) { - if(cap.getPropertiesObjects() != null) { - for(Property prop: cap.getPropertiesObjects()) { - Object propvalue = Function.getFunction(this,nt,prop.getValue(), resolveGetInput); - if(propvalue instanceof GetInput) { - propvalue = ((GetInput)propvalue).result(); - for(String p: cap.getProperties().keySet()) { - //Object v = cap.getProperties().get(p); - if(p.equals(prop.getName())) { - cap.setProperty(p,propvalue); - } - } - } - } - } - } - } - for(RelationshipType rel: nt.getRelationships().keySet()) { - NodeTemplate node = nt.getRelationships().get(rel); - ArrayList relTpls = node.getRelationshipTemplate(); - if(relTpls != null) { - for(RelationshipTemplate relTpl: relTpls) { - // TT 5 - for(InterfacesDef iface: relTpl.getInterfaces()) { - if(iface.getInputs() != null) { - for(String name: iface.getInputs().keySet()) { - Object value = iface.getInputs().get(name); - Object func = Function.getFunction( - this, - relTpl, - value, - resolveGetInput); - iface.setInput(name,func); - } - } - } - } - } - } - } - } - for(Output output: outputs) { - Object func = Function.getFunction(this,outputs,output.getValue(), resolveGetInput); - if(func instanceof GetAttribute) { - output.setAttr(Output.VALUE,func); - } - } - } - - public static String getSubMappingNodeType(LinkedHashMap topologyTpl) { - if(topologyTpl != null && topologyTpl instanceof LinkedHashMap) { - Object submapTpl = topologyTpl.get(SUBSTITUTION_MAPPINGS); - return SubstitutionMappings.stGetNodeType((LinkedHashMap)submapTpl); - } - return null; - } - + + if (nodeTemplates != null) { + for (NodeTemplate nt : nodeTemplates) { + for (Property prop : nt.getPropertiesObjects()) { + prop.setValue(Function.getFunction(this, nt, prop.getValue(), resolveGetInput)); + } + for (InterfacesDef ifd : nt.getInterfaces()) { + LinkedHashMap ifin = ifd.getInputs(); + if (ifin != null) { + for (Map.Entry me : ifin.entrySet()) { + String name = me.getKey(); + Object value = Function.getFunction(this, nt, me.getValue(), resolveGetInput); + ifd.setInput(name, value); + } + } + } + if (nt.getRequirements() != null) { + for (RequirementAssignment req : nt.getRequirements().getAll()) { + LinkedHashMap rel; + Object t = req.getRelationship(); + // it can be a string or a LHM... + if (t instanceof LinkedHashMap) { + rel = (LinkedHashMap) t; + } else { + // we set it to null to fail the next test + // and avoid the get("proprties") + rel = null; + } + + if (rel != null && rel.get("properties") != null) { + LinkedHashMap relprops = + (LinkedHashMap) rel.get("properties"); + for (String key : relprops.keySet()) { + Object value = relprops.get(key); + Object func = Function.getFunction(this, req, value, resolveGetInput); + relprops.put(key, func); + } + } + } + } + if (nt.getCapabilitiesObjects() != null) { + for (CapabilityAssignment cap : nt.getCapabilitiesObjects()) { + if (cap.getPropertiesObjects() != null) { + for (Property prop : cap.getPropertiesObjects()) { + Object propvalue = Function.getFunction(this, nt, prop.getValue(), resolveGetInput); + if (propvalue instanceof GetInput) { + propvalue = ((GetInput) propvalue).result(); + for (String p : cap.getProperties().keySet()) { + //Object v = cap.getProperties().get(p); + if (p.equals(prop.getName())) { + cap.setProperty(p, propvalue); + } + } + } + } + } + } + } + for (RelationshipType rel : nt.getRelationships().keySet()) { + NodeTemplate node = nt.getRelationships().get(rel); + ArrayList relTpls = node.getRelationshipTemplate(); + if (relTpls != null) { + for (RelationshipTemplate relTpl : relTpls) { + // TT 5 + for (InterfacesDef iface : relTpl.getInterfaces()) { + if (iface.getInputs() != null) { + for (String name : iface.getInputs().keySet()) { + Object value = iface.getInputs().get(name); + Object func = Function.getFunction( + this, + relTpl, + value, + resolveGetInput); + iface.setInput(name, func); + } + } + } + } + } + } + } + } + for (Output output : outputs) { + Object func = Function.getFunction(this, outputs, output.getValue(), resolveGetInput); + if (func instanceof GetAttribute) { + output.setAttr(Output.VALUE, func); + } + } + } + + public static String getSubMappingNodeType(LinkedHashMap topologyTpl) { + if (topologyTpl != null && topologyTpl instanceof LinkedHashMap) { + Object submapTpl = topologyTpl.get(SUBSTITUTION_MAPPINGS); + return SubstitutionMappings.stGetNodeType((LinkedHashMap) submapTpl); + } + return null; + } + // getters - - public LinkedHashMap getTpl() { - return tpl; - } - - public LinkedHashMap getMetadata() { - return metaData; - } - - public ArrayList getInputs() { - return inputs; - } - - public ArrayList getOutputs() { - return outputs; - } - - public ArrayList getPolicies() { - return policies; - } - - public ArrayList getRelationshipTemplates() { - return relationshipTemplates; - } - - public ArrayList getNodeTemplates() { - return nodeTemplates; - } - - public ArrayList getGroups() { - return groups; - } - - public SubstitutionMappings getSubstitutionMappings() { - return substitutionMappings; - } - - public LinkedHashMap getParsedParams() { - return parsedParams; - } - - public boolean getResolveGetInput() { - return resolveGetInput; - } - public LinkedHashMap getCustomDefs() { - return customDefs; - } + + public LinkedHashMap getTpl() { + return tpl; + } + + public LinkedHashMap getMetadata() { + return metaData; + } + + public ArrayList getInputs() { + return inputs; + } + + public ArrayList getOutputs() { + return outputs; + } + + public ArrayList getPolicies() { + return policies; + } + + public ArrayList getRelationshipTemplates() { + return relationshipTemplates; + } + + public ArrayList getNodeTemplates() { + return nodeTemplates; + } + + public ArrayList getGroups() { + return groups; + } + + public SubstitutionMappings getSubstitutionMappings() { + return substitutionMappings; + } + + public LinkedHashMap getParsedParams() { + return parsedParams; + } + + public boolean getResolveGetInput() { + return resolveGetInput; + } + + public LinkedHashMap getCustomDefs() { + return customDefs; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java index 1799f2e..1706cdc 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,75 +20,75 @@ package org.onap.sdc.toscaparser.api; +import org.onap.sdc.toscaparser.api.elements.RelationshipType; + import java.util.ArrayList; import java.util.LinkedHashMap; -import org.onap.sdc.toscaparser.api.elements.RelationshipType; - //import java.util.Iterator; public class ToscaGraph { // Graph of Tosca Node Templates - private ArrayList nodeTemplates; - private LinkedHashMap vertices; - - public ToscaGraph(ArrayList inodeTemplates) { - nodeTemplates = inodeTemplates; - vertices = new LinkedHashMap(); - _create(); - } - - private void _createVertex(NodeTemplate node) { - if(vertices.get(node.getName()) == null) { - vertices.put(node.getName(),node); + private ArrayList nodeTemplates; + private LinkedHashMap vertices; + + public ToscaGraph(ArrayList inodeTemplates) { + nodeTemplates = inodeTemplates; + vertices = new LinkedHashMap(); + create(); + } + + private void createVertex(NodeTemplate node) { + if (vertices.get(node.getName()) == null) { + vertices.put(node.getName(), node); } - } - - private void _createEdge(NodeTemplate node1, - NodeTemplate node2, - RelationshipType relation) { - if(vertices.get(node1.getName()) == null) { - _createVertex(node1); - vertices.get(node1.name)._addNext(node2,relation); - } - } - - public NodeTemplate vertex(String name) { - if(vertices.get(name) != null) { + } + + private void createEdge(NodeTemplate node1, + NodeTemplate node2, + RelationshipType relation) { + if (vertices.get(node1.getName()) == null) { + createVertex(node1); + vertices.get(node1.name)._addNext(node2, relation); + } + } + + public NodeTemplate vertex(String name) { + if (vertices.get(name) != null) { return vertices.get(name); } return null; - } - -// public Iterator getIter() { + } + +// public Iterator getIter() { // return vertices.values().iterator(); // } - - private void _create() { - for(NodeTemplate node: nodeTemplates) { - LinkedHashMap relation = node.getRelationships(); - if(relation != null) { - for(RelationshipType rel: relation.keySet()) { - NodeTemplate nodeTpls = relation.get(rel); - for(NodeTemplate tpl: nodeTemplates) { - if(tpl.getName().equals(nodeTpls.getName())) { - _createEdge(node,tpl,rel); - } - } - } - } - _createVertex(node); - } - } - - @Override - public String toString() { - return "ToscaGraph{" + - "nodeTemplates=" + nodeTemplates + - ", vertices=" + vertices + - '}'; - } + + private void create() { + for (NodeTemplate node : nodeTemplates) { + LinkedHashMap relation = node.getRelationships(); + if (relation != null) { + for (RelationshipType rel : relation.keySet()) { + NodeTemplate nodeTpls = relation.get(rel); + for (NodeTemplate tpl : nodeTemplates) { + if (tpl.getName().equals(nodeTpls.getName())) { + createEdge(node, tpl, rel); + } + } + } + } + createVertex(node); + } + } + + @Override + public String toString() { + return "ToscaGraph{" + + "nodeTemplates=" + nodeTemplates + + ", vertices=" + vertices + + '}'; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java index 6edc291..ddb8ddb 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java @@ -5,9 +5,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -55,58 +55,58 @@ import org.yaml.snakeyaml.Yaml; public class ToscaTemplate extends Object { - public static final int MAX_LEVELS = 20; - private static Logger log = LoggerFactory.getLogger(ToscaTemplate.class.getName()); - - // TOSCA template key names - private static final String DEFINITION_VERSION = "tosca_definitions_version"; - private static final String DEFAULT_NAMESPACE = "tosca_default_namespace"; - private static final String TEMPLATE_NAME = "template_name"; - private static final String TOPOLOGY_TEMPLATE = "topology_template"; - private static final String TEMPLATE_AUTHOR = "template_author"; - private static final String TEMPLATE_VERSION = "template_version"; - private static final String DESCRIPTION = "description"; - private static final String IMPORTS = "imports"; - private static final String DSL_DEFINITIONS = "dsl_definitions"; - private static final String NODE_TYPES = "node_types"; - private static final String RELATIONSHIP_TYPES = "relationship_types"; - private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; - private static final String CAPABILITY_TYPES = "capability_types"; - private static final String ARTIFACT_TYPES = "artifact_types"; - private static final String DATA_TYPES = "data_types"; - private static final String INTERFACE_TYPES = "interface_types"; - private static final String POLICY_TYPES = "policy_types"; - private static final String GROUP_TYPES = "group_types"; - private static final String REPOSITORIES = "repositories"; - - private static String SECTIONS[] = { - DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME, + public static final int MAX_LEVELS = 20; + private static Logger log = LoggerFactory.getLogger(ToscaTemplate.class.getName()); + + // TOSCA template key names + private static final String DEFINITION_VERSION = "tosca_definitions_version"; + private static final String DEFAULT_NAMESPACE = "tosca_default_namespace"; + private static final String TEMPLATE_NAME = "template_name"; + private static final String TOPOLOGY_TEMPLATE = "topology_template"; + private static final String TEMPLATE_AUTHOR = "template_author"; + private static final String TEMPLATE_VERSION = "template_version"; + private static final String DESCRIPTION = "description"; + private static final String IMPORTS = "imports"; + private static final String DSL_DEFINITIONS = "dsl_definitions"; + private static final String NODE_TYPES = "node_types"; + private static final String RELATIONSHIP_TYPES = "relationship_types"; + private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; + private static final String CAPABILITY_TYPES = "capability_types"; + private static final String ARTIFACT_TYPES = "artifact_types"; + private static final String DATA_TYPES = "data_types"; + private static final String INTERFACE_TYPES = "interface_types"; + private static final String POLICY_TYPES = "policy_types"; + private static final String GROUP_TYPES = "group_types"; + private static final String REPOSITORIES = "repositories"; + + private static String SECTIONS[] = { + DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME, TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION, DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES, RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES, CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES, REPOSITORIES - }; + }; + + // Sections that are specific to individual template definitions + private static final String METADATA = "metadata"; + private static ArrayList SPECIAL_SECTIONS; - // Sections that are specific to individual template definitions - private static final String METADATA = "metadata"; - private static ArrayList SPECIAL_SECTIONS; - private ExtTools exttools = new ExtTools(); private ArrayList VALID_TEMPLATE_VERSIONS; - private LinkedHashMap> ADDITIONAL_SECTIONS; - - private boolean isFile; - private String path; - private String inputPath; - private String rootPath; - private LinkedHashMap parsedParams; - private boolean resolveGetInput; - private LinkedHashMap tpl; + private LinkedHashMap> ADDITIONAL_SECTIONS; + + private boolean isFile; + private String path; + private String inputPath; + private String rootPath; + private LinkedHashMap parsedParams; + private boolean resolveGetInput; + private LinkedHashMap tpl; private String version; private ArrayList imports; - private LinkedHashMap relationshipTypes; + private LinkedHashMap relationshipTypes; private Metadata metaData; private String description; private TopologyTemplate topologyTemplate; @@ -115,117 +115,113 @@ public class ToscaTemplate extends Object { private ArrayList relationshipTemplates; private ArrayList nodeTemplates; private ArrayList outputs; - private ArrayList policies; - private ArrayList groups; - private ConcurrentHashMap nestedToscaTplsWithTopology; + private ArrayList policies; + private ArrayList groups; + private ConcurrentHashMap nestedToscaTplsWithTopology; private ArrayList nestedToscaTemplatesWithTopology; private ToscaGraph graph; private String csarTempDir; private int nestingLoopCounter; - private LinkedHashMap> metaProperties; - private Set processedImports; - private LinkedHashMap customDefsFinal = new LinkedHashMap<>(); - private HashSet dataTypes; - - public ToscaTemplate(String _path, - LinkedHashMap _parsedParams, - boolean aFile, - LinkedHashMap yamlDictTpl) throws JToscaException { - init(_path, _parsedParams, aFile, yamlDictTpl, true); - } - - public ToscaTemplate(String _path, - LinkedHashMap _parsedParams, - boolean aFile, - LinkedHashMap yamlDictTpl, boolean resolveGetInput) throws JToscaException { - init(_path, _parsedParams, aFile, yamlDictTpl, resolveGetInput); - } - - @SuppressWarnings("unchecked") - private void init(String _path, - LinkedHashMap _parsedParams, - boolean aFile, - LinkedHashMap yamlDictTpl, boolean _resolveGetInput) throws JToscaException { - - ThreadLocalsHolder.setCollector(new ValidationIssueCollector()); - - VALID_TEMPLATE_VERSIONS = new ArrayList<>(); - VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_0"); - VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_1"); - VALID_TEMPLATE_VERSIONS.addAll(exttools.getVersions()); - ADDITIONAL_SECTIONS = new LinkedHashMap<>(); - SPECIAL_SECTIONS = new ArrayList<>(); - SPECIAL_SECTIONS.add(METADATA); - ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_0",SPECIAL_SECTIONS); - ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_1",SPECIAL_SECTIONS); - ADDITIONAL_SECTIONS.putAll(exttools.getSections()); - - //long startTime = System.nanoTime(); - - - isFile = aFile; - inputPath = null; - path = null; - tpl = null; - csarTempDir = null; - nestedToscaTplsWithTopology = new ConcurrentHashMap<>(); - nestedToscaTemplatesWithTopology = new ArrayList(); - resolveGetInput = _resolveGetInput; - metaProperties = new LinkedHashMap<>(); - - if(_path != null && !_path.isEmpty()) { - // save the original input path - inputPath = _path; - // get the actual path (will change with CSAR) - path = _getPath(_path); - // load the YAML template - if (path != null && !path.isEmpty()) { - try (InputStream input = new FileInputStream(new File(path));){ - //System.out.println("Loading YAML file " + path); - log.debug("ToscaTemplate Loading YAMEL file {}", path); - Yaml yaml = new Yaml(); - Object data = yaml.load(input); - this.tpl = (LinkedHashMap) data; - } - catch (FileNotFoundException e) { - log.error("ToscaTemplate - Exception loading yaml: {}", e.getMessage()); - log.error("Exception", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", - "ToscaTemplate - Exception loading yaml: -> " + e.getMessage())); - return; - } - catch(Exception e) { - log.error("ToscaTemplate - Error loading yaml, aborting -> ", e.getMessage()); - log.error("Exception", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", - "ToscaTemplate - Error loading yaml, aborting -> " + e.getMessage())); - return; - } - - if(yamlDictTpl != null) { - //msg = (_('Both path and yaml_dict_tpl arguments were ' - // 'provided. Using path and ignoring yaml_dict_tpl.')) - //log.info(msg) - log.debug("ToscaTemplate - Both path and yaml_dict_tpl arguments were provided. Using path and ignoring yaml_dict_tpl"); - } - } else { - // no input to process... - _abort(); - } - } - else { - if(yamlDictTpl != null) { + private LinkedHashMap> metaProperties; + private Set processedImports; + private LinkedHashMap customDefsFinal = new LinkedHashMap<>(); + private HashSet dataTypes; + + public ToscaTemplate(String _path, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl) throws JToscaException { + init(_path, _parsedParams, aFile, yamlDictTpl, true); + } + + public ToscaTemplate(String _path, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl, boolean resolveGetInput) throws JToscaException { + init(_path, _parsedParams, aFile, yamlDictTpl, resolveGetInput); + } + + @SuppressWarnings("unchecked") + private void init(String _path, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl, boolean _resolveGetInput) throws JToscaException { + + ThreadLocalsHolder.setCollector(new ValidationIssueCollector()); + + VALID_TEMPLATE_VERSIONS = new ArrayList<>(); + VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_0"); + VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_1"); + VALID_TEMPLATE_VERSIONS.addAll(exttools.getVersions()); + ADDITIONAL_SECTIONS = new LinkedHashMap<>(); + SPECIAL_SECTIONS = new ArrayList<>(); + SPECIAL_SECTIONS.add(METADATA); + ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_0", SPECIAL_SECTIONS); + ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_1", SPECIAL_SECTIONS); + ADDITIONAL_SECTIONS.putAll(exttools.getSections()); + + //long startTime = System.nanoTime(); + + + isFile = aFile; + inputPath = null; + path = null; + tpl = null; + csarTempDir = null; + nestedToscaTplsWithTopology = new ConcurrentHashMap<>(); + nestedToscaTemplatesWithTopology = new ArrayList(); + resolveGetInput = _resolveGetInput; + metaProperties = new LinkedHashMap<>(); + + if (_path != null && !_path.isEmpty()) { + // save the original input path + inputPath = _path; + // get the actual path (will change with CSAR) + path = _getPath(_path); + // load the YAML template + if (path != null && !path.isEmpty()) { + try (InputStream input = new FileInputStream(new File(path));) { + //System.out.println("Loading YAML file " + path); + log.debug("ToscaTemplate Loading YAMEL file {}", path); + Yaml yaml = new Yaml(); + Object data = yaml.load(input); + this.tpl = (LinkedHashMap) data; + } catch (FileNotFoundException e) { + log.error("ToscaTemplate - Exception loading yaml: {}", e.getMessage()); + log.error("Exception", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", + "ToscaTemplate - Exception loading yaml: -> " + e.getMessage())); + return; + } catch (Exception e) { + log.error("ToscaTemplate - Error loading yaml, aborting -> ", e.getMessage()); + log.error("Exception", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", + "ToscaTemplate - Error loading yaml, aborting -> " + e.getMessage())); + return; + } + + if (yamlDictTpl != null) { + //msg = (_('Both path and yaml_dict_tpl arguments were ' + // 'provided. Using path and ignoring yaml_dict_tpl.')) + //log.info(msg) + log.debug("ToscaTemplate - Both path and yaml_dict_tpl arguments were provided. Using path and ignoring yaml_dict_tpl"); + } + } else { + // no input to process... + _abort(); + } + } else { + if (yamlDictTpl != null) { tpl = yamlDictTpl; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE244", - "ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse")); - log.debug("ToscaTemplate ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse"); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE244", + "ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse")); + log.debug("ToscaTemplate ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse"); - } - } + } + } - if(tpl != null) { + if (tpl != null) { parsedParams = _parsedParams; _validateField(); this.rootPath = path; @@ -235,10 +231,10 @@ public class ToscaTemplate extends Object { this.metaData = _tplMetaData(); this.relationshipTypes = _tplRelationshipTypes(); this.description = _tplDescription(); - this.dataTypes = getTopologyDataTypes(); - this.topologyTemplate = _topologyTemplate(); + this.dataTypes = getTopologyDataTypes(); + this.topologyTemplate = _topologyTemplate(); this.repositories = _tplRepositories(); - if(topologyTemplate.getTpl() != null) { + if (topologyTemplate.getTpl() != null) { this.inputs = _inputs(); this.relationshipTemplates = _relationshipTemplates(); this.nodeTemplates = _nodeTemplates(); @@ -246,422 +242,419 @@ public class ToscaTemplate extends Object { this.policies = _policies(); this.groups = _groups(); // _handleNestedToscaTemplatesWithTopology(); - _handleNestedToscaTemplatesWithTopology(topologyTemplate); + _handleNestedToscaTemplatesWithTopology(topologyTemplate); graph = new ToscaGraph(nodeTemplates); } } - if(csarTempDir != null) { - CSAR.deleteDir(new File(csarTempDir)); - csarTempDir = null; + if (csarTempDir != null) { + CSAR.deleteDir(new File(csarTempDir)); + csarTempDir = null; + } + + verifyTemplate(); + + } + + private void _abort() throws JToscaException { + // print out all exceptions caught + verifyTemplate(); + throw new JToscaException("jtosca aborting", JToscaErrorCodes.PATH_NOT_VALID.getValue()); + } + + private TopologyTemplate _topologyTemplate() { + return new TopologyTemplate( + _tplTopologyTemplate(), + _getAllCustomDefs(imports), + relationshipTypes, + parsedParams, + null, + resolveGetInput); + } + + private ArrayList _inputs() { + return topologyTemplate.getInputs(); + } + + private ArrayList _nodeTemplates() { + return topologyTemplate.getNodeTemplates(); + } + + private ArrayList _relationshipTemplates() { + return topologyTemplate.getRelationshipTemplates(); + } + + private ArrayList _outputs() { + return topologyTemplate.getOutputs(); + } + + private String _tplVersion() { + return (String) tpl.get(DEFINITION_VERSION); + } + + @SuppressWarnings("unchecked") + private Metadata _tplMetaData() { + Object mdo = tpl.get(METADATA); + if (mdo instanceof LinkedHashMap) { + return new Metadata((Map) mdo); + } else { + return null; + } + } + + private String _tplDescription() { + return (String) tpl.get(DESCRIPTION); + } + + @SuppressWarnings("unchecked") + private ArrayList _tplImports() { + return (ArrayList) tpl.get(IMPORTS); + } + + @SuppressWarnings("unchecked") + private ArrayList _tplRepositories() { + LinkedHashMap repositories = + (LinkedHashMap) tpl.get(REPOSITORIES); + ArrayList reposit = new ArrayList<>(); + if (repositories != null) { + for (Map.Entry me : repositories.entrySet()) { + Repository reposits = new Repository(me.getKey(), me.getValue()); + reposit.add(reposits); + } + } + return reposit; + } + + private LinkedHashMap _tplRelationshipTypes() { + return (LinkedHashMap) _getCustomTypes(RELATIONSHIP_TYPES, null); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplTopologyTemplate() { + return (LinkedHashMap) tpl.get(TOPOLOGY_TEMPLATE); + } + + private ArrayList _policies() { + return topologyTemplate.getPolicies(); + } + + private ArrayList _groups() { + return topologyTemplate.getGroups(); + } + + /** + * Read datatypes field + * + * @return return list of datatypes. + */ + @SuppressWarnings("unchecked") + private HashSet getTopologyDataTypes() { + LinkedHashMap value = + (LinkedHashMap) tpl.get(DATA_TYPES); + HashSet datatypes = new HashSet<>(); + if (value != null) { + customDefsFinal.putAll(value); + for (Map.Entry me : value.entrySet()) { + DataType datatype = new DataType(me.getKey(), value); + datatypes.add(datatype); + } } - - verifyTemplate(); - - } - - private void _abort() throws JToscaException { - // print out all exceptions caught - verifyTemplate(); - throw new JToscaException("jtosca aborting", JToscaErrorCodes.PATH_NOT_VALID.getValue()); - } - - private TopologyTemplate _topologyTemplate() { - return new TopologyTemplate( - _tplTopologyTemplate(), - _getAllCustomDefs(imports), - relationshipTypes, - parsedParams, - null, - resolveGetInput); - } - - private ArrayList _inputs() { - return topologyTemplate.getInputs(); - } - - private ArrayList _nodeTemplates() { - return topologyTemplate.getNodeTemplates(); - } - - private ArrayList _relationshipTemplates() { - return topologyTemplate.getRelationshipTemplates(); - } - - private ArrayList _outputs() { - return topologyTemplate.getOutputs(); - } - - private String _tplVersion() { - return (String)tpl.get(DEFINITION_VERSION); - } - - @SuppressWarnings("unchecked") - private Metadata _tplMetaData() { - Object mdo = tpl.get(METADATA); - if(mdo instanceof LinkedHashMap) { - return new Metadata((Map)mdo); - } - else { - return null; - } - } - - private String _tplDescription() { - return (String)tpl.get(DESCRIPTION); - } - - @SuppressWarnings("unchecked") - private ArrayList _tplImports() { - return (ArrayList)tpl.get(IMPORTS); - } - - @SuppressWarnings("unchecked") - private ArrayList _tplRepositories() { - LinkedHashMap repositories = - (LinkedHashMap)tpl.get(REPOSITORIES); - ArrayList reposit = new ArrayList<>(); - if(repositories != null) { - for(Map.Entry me: repositories.entrySet()) { - Repository reposits = new Repository(me.getKey(),me.getValue()); - reposit.add(reposits); - } - } - return reposit; - } - - private LinkedHashMap _tplRelationshipTypes() { - return (LinkedHashMap)_getCustomTypes(RELATIONSHIP_TYPES,null); - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplTopologyTemplate() { - return (LinkedHashMap)tpl.get(TOPOLOGY_TEMPLATE); - } - - private ArrayList _policies() { - return topologyTemplate.getPolicies(); - } - - private ArrayList _groups() { - return topologyTemplate.getGroups(); - } - - /** - * Read datatypes field - * @return return list of datatypes. - */ - @SuppressWarnings("unchecked") - private HashSet getTopologyDataTypes(){ - LinkedHashMap value = - (LinkedHashMap)tpl.get(DATA_TYPES); - HashSet datatypes = new HashSet<>(); - if(value != null) { - customDefsFinal.putAll(value); - for(Map.Entry me: value.entrySet()) { - DataType datatype = new DataType(me.getKey(), value); - datatypes.add(datatype); - } - } - - - return datatypes; - } - - /** - * This method is used to get consolidated custom definitions from all imports - * It is logically divided in two parts to handle imports; map and list formats. - * Before processing the imports; it sorts them to make sure the current directory imports are - * being processed first and then others. Once sorted; it processes each import one by one in - * recursive manner. - * To avoid cyclic dependency among imports; this method uses a set to keep track of all - * imports which are already processed and filters the imports which occurs more than once. - * - * @param alImports all imports which needs to be processed - * @return the linked hash map containing all import definitions - */ - - @SuppressWarnings("unchecked") - private LinkedHashMap _getAllCustomDefs(Object alImports) { - - - String types[] = { - IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, - DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES - }; - - List> imports = (List>) alImports; - if (imports != null && !imports.isEmpty()) { - if (imports.get(0) instanceof LinkedHashMap) { - imports = sortImports(imports); - - for (Map map : imports) { - List> singleImportList = new ArrayList<>(); - singleImportList.add(map); - - Map importNameDetails = getValidFileNameForImportReference(singleImportList); - singleImportList = filterImportsForRecursion(singleImportList, importNameDetails); - - if(!singleImportList.get(0).isEmpty()){ - LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(singleImportList)); - processedImports.add(importNameDetails.get("importFileName")); - - if (customDefs != null) { - customDefsFinal.putAll(customDefs); - - if (customDefs.get(IMPORTS) != null) { - resetPathForRecursiveImports(importNameDetails.get("importRelativeName")); - LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); - customDefsFinal.putAll(importDefs); - } - } - } - } - } else { - LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(imports)); - if (customDefs != null) { - customDefsFinal.putAll(customDefs); - - if (customDefs.get(IMPORTS) != null) { - LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); - customDefsFinal.putAll(importDefs); - } - } - } - } - - // As imports are not custom_types, remove from the dict - customDefsFinal.remove(IMPORTS); - - return customDefsFinal; - } - - /** - * This method is used to sort the imports in order so that same directory - * imports will be processed first - * - * @param customImports the custom imports - * @return the sorted list of imports - */ - private List> sortImports(List> customImports){ - List> finalList1 = new ArrayList<>(); - List> finalList2 = new ArrayList<>(); - Iterator> itr = customImports.iterator(); - while(itr.hasNext()) { - Map innerMap = itr.next(); - if (innerMap.toString().contains("../")) { - finalList2.add(innerMap); - itr.remove(); - } - else if (innerMap.toString().contains("/")) { - finalList1.add(innerMap); - itr.remove(); - } - } - - customImports.addAll(finalList1); - customImports.addAll(finalList2); - return customImports; - } - - /** - * This method is used to reset PATH variable after processing of current import file is done - * This is required because of relative path nature of imports present in files. - * - * @param currImportRelativeName the current import relative name - */ - private void resetPathForRecursiveImports(String currImportRelativeName){ - path = getPath(path, currImportRelativeName); - } - - /** - * This is a recursive method which starts from current import and then recursively finds a - * valid path relative to current import file name. - * By doing this it handles all nested hierarchy of imports defined in CSARs - * - * @param path the path - * @param importFileName the import file name - * @return the string containing updated path value - */ - private String getPath(String path, String importFileName){ - String tempFullPath = (Paths.get(path).toAbsolutePath().getParent() - .toString() + File.separator + importFileName.replace("../", "")).replace('\\', '/'); - String tempPartialPath = (Paths.get(path).toAbsolutePath().getParent().toString()).replace('\\', '/'); - if(Files.exists(Paths.get(tempFullPath))) - return tempFullPath; - else - return getPath(tempPartialPath, importFileName); - } - - /** - * This method is used to get full path name for the file which needs to be processed. It helps - * in situation where files are present in different directory and are references as relative - * paths. - * - * @param customImports the custom imports - * @return the map containing import file full and relative paths - */ - private Map getValidFileNameForImportReference(List> customImports){ - String importFileName; - Map retMap = new HashMap<>(); - for (Map map1 : customImports) { - for (Map.Entry entry : map1.entrySet()) { - Map innerMostMap = (Map) entry.getValue(); - Iterator> it = innerMostMap.entrySet().iterator(); - while (it.hasNext()) { - Map.Entry val = it.next(); - if(val.getValue().contains("/")){ - importFileName = (Paths.get(rootPath).toAbsolutePath().getParent().toString() + File - .separator + val.getValue().replace("../", "")).replace('\\', '/'); - } - else { - importFileName = (Paths.get(path).toAbsolutePath().getParent().toString() + File - .separator + val.getValue().replace("../", "")).replace('\\', '/'); - } - retMap.put("importFileName", importFileName); - retMap.put("importRelativeName", val.getValue()); - } - } - } - return retMap; - } - - /** - * This method is used to filter the imports which already gets processed in previous step. - * It handles the use case of cyclic dependency in imports which may cause Stack Overflow - * exception - * - * @param customImports the custom imports - * @param importNameDetails the import name details - * @return the list containing filtered imports - */ - private List> filterImportsForRecursion(List> - customImports, Map importNameDetails){ - for (Map map1 : customImports) { - for (Map.Entry entry : map1.entrySet()) { - Map innerMostMap = (Map) entry.getValue(); - Iterator> it = innerMostMap.entrySet().iterator(); - while (it.hasNext()) { - it.next(); - if (processedImports.contains(importNameDetails.get("importFileName"))) { - it.remove(); - } - } - } - } - - // Remove Empty elements - Iterator> itr = customImports.iterator(); - while(itr.hasNext()) { - Map innerMap = itr.next(); - Predicate predicate = p-> p.values().isEmpty(); - innerMap.values().removeIf(predicate); - } - - return customImports; - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _getCustomTypes(Object typeDefinitions,ArrayList alImports) { - + + + return datatypes; + } + + /** + * This method is used to get consolidated custom definitions from all imports + * It is logically divided in two parts to handle imports; map and list formats. + * Before processing the imports; it sorts them to make sure the current directory imports are + * being processed first and then others. Once sorted; it processes each import one by one in + * recursive manner. + * To avoid cyclic dependency among imports; this method uses a set to keep track of all + * imports which are already processed and filters the imports which occurs more than once. + * + * @param alImports all imports which needs to be processed + * @return the linked hash map containing all import definitions + */ + + @SuppressWarnings("unchecked") + private LinkedHashMap _getAllCustomDefs(Object alImports) { + + + String types[] = { + IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, + DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES + }; + + List> imports = (List>) alImports; + if (imports != null && !imports.isEmpty()) { + if (imports.get(0) instanceof LinkedHashMap) { + imports = sortImports(imports); + + for (Map map : imports) { + List> singleImportList = new ArrayList<>(); + singleImportList.add(map); + + Map importNameDetails = getValidFileNameForImportReference(singleImportList); + singleImportList = filterImportsForRecursion(singleImportList, importNameDetails); + + if (!singleImportList.get(0).isEmpty()) { + LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(singleImportList)); + processedImports.add(importNameDetails.get("importFileName")); + + if (customDefs != null) { + customDefsFinal.putAll(customDefs); + + if (customDefs.get(IMPORTS) != null) { + resetPathForRecursiveImports(importNameDetails.get("importRelativeName")); + LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); + customDefsFinal.putAll(importDefs); + } + } + } + } + } else { + LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(imports)); + if (customDefs != null) { + customDefsFinal.putAll(customDefs); + + if (customDefs.get(IMPORTS) != null) { + LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); + customDefsFinal.putAll(importDefs); + } + } + } + } + + // As imports are not custom_types, remove from the dict + customDefsFinal.remove(IMPORTS); + + return customDefsFinal; + } + + /** + * This method is used to sort the imports in order so that same directory + * imports will be processed first + * + * @param customImports the custom imports + * @return the sorted list of imports + */ + private List> sortImports(List> customImports) { + List> finalList1 = new ArrayList<>(); + List> finalList2 = new ArrayList<>(); + Iterator> itr = customImports.iterator(); + while (itr.hasNext()) { + Map innerMap = itr.next(); + if (innerMap.toString().contains("../")) { + finalList2.add(innerMap); + itr.remove(); + } else if (innerMap.toString().contains("/")) { + finalList1.add(innerMap); + itr.remove(); + } + } + + customImports.addAll(finalList1); + customImports.addAll(finalList2); + return customImports; + } + + /** + * This method is used to reset PATH variable after processing of current import file is done + * This is required because of relative path nature of imports present in files. + * + * @param currImportRelativeName the current import relative name + */ + private void resetPathForRecursiveImports(String currImportRelativeName) { + path = getPath(path, currImportRelativeName); + } + + /** + * This is a recursive method which starts from current import and then recursively finds a + * valid path relative to current import file name. + * By doing this it handles all nested hierarchy of imports defined in CSARs + * + * @param path the path + * @param importFileName the import file name + * @return the string containing updated path value + */ + private String getPath(String path, String importFileName) { + String tempFullPath = (Paths.get(path).toAbsolutePath().getParent() + .toString() + File.separator + importFileName.replace("../", "")).replace('\\', '/'); + String tempPartialPath = (Paths.get(path).toAbsolutePath().getParent().toString()).replace('\\', '/'); + if (Files.exists(Paths.get(tempFullPath))) + return tempFullPath; + else + return getPath(tempPartialPath, importFileName); + } + + /** + * This method is used to get full path name for the file which needs to be processed. It helps + * in situation where files are present in different directory and are references as relative + * paths. + * + * @param customImports the custom imports + * @return the map containing import file full and relative paths + */ + private Map getValidFileNameForImportReference(List> customImports) { + String importFileName; + Map retMap = new HashMap<>(); + for (Map map1 : customImports) { + for (Map.Entry entry : map1.entrySet()) { + Map innerMostMap = (Map) entry.getValue(); + Iterator> it = innerMostMap.entrySet().iterator(); + while (it.hasNext()) { + Map.Entry val = it.next(); + if (val.getValue().contains("/")) { + importFileName = (Paths.get(rootPath).toAbsolutePath().getParent().toString() + File + .separator + val.getValue().replace("../", "")).replace('\\', '/'); + } else { + importFileName = (Paths.get(path).toAbsolutePath().getParent().toString() + File + .separator + val.getValue().replace("../", "")).replace('\\', '/'); + } + retMap.put("importFileName", importFileName); + retMap.put("importRelativeName", val.getValue()); + } + } + } + return retMap; + } + + /** + * This method is used to filter the imports which already gets processed in previous step. + * It handles the use case of cyclic dependency in imports which may cause Stack Overflow + * exception + * + * @param customImports the custom imports + * @param importNameDetails the import name details + * @return the list containing filtered imports + */ + private List> filterImportsForRecursion(List> + customImports, Map importNameDetails) { + for (Map map1 : customImports) { + for (Map.Entry entry : map1.entrySet()) { + Map innerMostMap = (Map) entry.getValue(); + Iterator> it = innerMostMap.entrySet().iterator(); + while (it.hasNext()) { + it.next(); + if (processedImports.contains(importNameDetails.get("importFileName"))) { + it.remove(); + } + } + } + } + + // Remove Empty elements + Iterator> itr = customImports.iterator(); + while (itr.hasNext()) { + Map innerMap = itr.next(); + Predicate predicate = p -> p.values().isEmpty(); + innerMap.values().removeIf(predicate); + } + + return customImports; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _getCustomTypes(Object typeDefinitions, ArrayList alImports) { + // Handle custom types defined in imported template files // This method loads the custom type definitions referenced in "imports" // section of the TOSCA YAML template. - - LinkedHashMap customDefs = new LinkedHashMap(); + + LinkedHashMap customDefs = new LinkedHashMap(); ArrayList typeDefs = new ArrayList(); - if(typeDefinitions instanceof String[]) { - for(String s: (String[])typeDefinitions) { - typeDefs.add(s); - } - } - else { - typeDefs.add((String)typeDefinitions); + if (typeDefinitions instanceof String[]) { + for (String s : (String[]) typeDefinitions) { + typeDefs.add(s); + } + } else { + typeDefs.add((String) typeDefinitions); } - if(alImports == null) { + if (alImports == null) { alImports = _tplImports(); } - if(alImports != null) { - ImportsLoader customService = new ImportsLoader(alImports,path,typeDefs,tpl); - ArrayList> nestedToscaTpls = customService.getNestedToscaTpls(); - _updateNestedToscaTplsWithTopology(nestedToscaTpls); + if (alImports != null) { + ImportsLoader customService = new ImportsLoader(alImports, path, typeDefs, tpl); + ArrayList> nestedToscaTpls = customService.getNestedToscaTpls(); + _updateNestedToscaTplsWithTopology(nestedToscaTpls); - customDefs = customService.getCustomDefs(); - if(customDefs == null) { - return null; - } + customDefs = customService.getCustomDefs(); + if (customDefs == null) { + return null; + } } //Handle custom types defined in current template file - for(String td: typeDefs) { - if(!td.equals(IMPORTS)) { - LinkedHashMap innerCustomTypes = (LinkedHashMap )tpl.get(td); - if(innerCustomTypes != null) { - customDefs.putAll(innerCustomTypes); - } - } + for (String td : typeDefs) { + if (!td.equals(IMPORTS)) { + LinkedHashMap innerCustomTypes = (LinkedHashMap) tpl.get(td); + if (innerCustomTypes != null) { + customDefs.putAll(innerCustomTypes); + } + } } return customDefs; - } - - private void _updateNestedToscaTplsWithTopology(ArrayList> nestedToscaTpls) { - for(LinkedHashMap ntpl: nestedToscaTpls) { - // there is just one key:value pair in ntpl - for(Map.Entry me: ntpl.entrySet()) { - String fileName = me.getKey(); - @SuppressWarnings("unchecked") - LinkedHashMap toscaTpl = (LinkedHashMap)me.getValue(); - if(toscaTpl.get(TOPOLOGY_TEMPLATE) != null) { - if(nestedToscaTplsWithTopology.get(fileName) == null) { - nestedToscaTplsWithTopology.putAll(ntpl); - } - } - } - } - } - - // multi level nesting - RECURSIVE - @SuppressWarnings("unchecked") - private void _handleNestedToscaTemplatesWithTopology(TopologyTemplate tt) { - if(++nestingLoopCounter > MAX_LEVELS) { - log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting"); - return; - } - // Reset Processed Imports for nested templates - this.processedImports = new HashSet<>(); - for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { - LinkedHashMap toscaTpl = - (LinkedHashMap)me.getValue(); - for(NodeTemplate nt: tt.getNodeTemplates()) { - if(_isSubMappedNode(nt,toscaTpl)) { - parsedParams = _getParamsForNestedTemplate(nt); - ArrayList alim = (ArrayList)toscaTpl.get(IMPORTS); - LinkedHashMap topologyTpl = - (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE); - TopologyTemplate topologyWithSubMapping = - new TopologyTemplate(topologyTpl, - _getAllCustomDefs(alim), - relationshipTypes, - parsedParams, - nt, - resolveGetInput); - nt.setOriginComponentTemplate(topologyWithSubMapping); - if(topologyWithSubMapping.getSubstitutionMappings() != null) { + } + + private void _updateNestedToscaTplsWithTopology(ArrayList> nestedToscaTpls) { + for (LinkedHashMap ntpl : nestedToscaTpls) { + // there is just one key:value pair in ntpl + for (Map.Entry me : ntpl.entrySet()) { + String fileName = me.getKey(); + @SuppressWarnings("unchecked") + LinkedHashMap toscaTpl = (LinkedHashMap) me.getValue(); + if (toscaTpl.get(TOPOLOGY_TEMPLATE) != null) { + if (nestedToscaTplsWithTopology.get(fileName) == null) { + nestedToscaTplsWithTopology.putAll(ntpl); + } + } + } + } + } + + // multi level nesting - RECURSIVE + @SuppressWarnings("unchecked") + private void _handleNestedToscaTemplatesWithTopology(TopologyTemplate tt) { + if (++nestingLoopCounter > MAX_LEVELS) { + log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting"); + return; + } + // Reset Processed Imports for nested templates + this.processedImports = new HashSet<>(); + for (Map.Entry me : nestedToscaTplsWithTopology.entrySet()) { + LinkedHashMap toscaTpl = + (LinkedHashMap) me.getValue(); + for (NodeTemplate nt : tt.getNodeTemplates()) { + if (_isSubMappedNode(nt, toscaTpl)) { + parsedParams = _getParamsForNestedTemplate(nt); + ArrayList alim = (ArrayList) toscaTpl.get(IMPORTS); + LinkedHashMap topologyTpl = + (LinkedHashMap) toscaTpl.get(TOPOLOGY_TEMPLATE); + TopologyTemplate topologyWithSubMapping = + new TopologyTemplate(topologyTpl, + _getAllCustomDefs(alim), + relationshipTypes, + parsedParams, + nt, + resolveGetInput); + nt.setOriginComponentTemplate(topologyWithSubMapping); + if (topologyWithSubMapping.getSubstitutionMappings() != null) { // Record nested topology templates in top level template //nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); // Set substitution mapping object for mapped node nt.setSubMappingToscaTemplate( - topologyWithSubMapping.getSubstitutionMappings()); + topologyWithSubMapping.getSubstitutionMappings()); _handleNestedToscaTemplatesWithTopology(topologyWithSubMapping); - } - } - } - } - } - + } + } + } + } + } + // private void _handleNestedToscaTemplatesWithTopology() { // for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { // String fname = me.getKey(); @@ -692,150 +685,145 @@ public class ToscaTemplate extends Object { // } // } - private void _validateField() { - String sVersion = _tplVersion(); - if(sVersion == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE245", String.format( - "MissingRequiredField: Template is missing required field \"%s\"",DEFINITION_VERSION))); - } - else { - _validateVersion(sVersion); - this.version = sVersion; - } - - for (String sKey : tpl.keySet()) { - boolean bFound = false; - for (String sSection: SECTIONS) { - if(sKey.equals(sSection)) { - bFound = true; - break; - } - } - // check ADDITIONAL_SECTIONS - if(!bFound) { - if(ADDITIONAL_SECTIONS.get(version) != null && - ADDITIONAL_SECTIONS.get(version).contains(sKey)) { - bFound = true; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE246", String.format( - "UnknownFieldError: Template contains unknown field \"%s\"", - sKey))); - } - } - } - - private void _validateVersion(String sVersion) { - boolean bFound = false; - for(String vtv: VALID_TEMPLATE_VERSIONS) { - if(sVersion.equals(vtv)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE247", String.format( - "InvalidTemplateVersion: \"%s\" is invalid. Valid versions are %s", - sVersion,VALID_TEMPLATE_VERSIONS.toString()))); - } - else if ((!sVersion.equals("tosca_simple_yaml_1_0") && !sVersion.equals("tosca_simple_yaml_1_1"))) { - EntityType.updateDefinitions(sVersion); - - } - } - - private String _getPath(String _path) throws JToscaException { - if (_path.toLowerCase().endsWith(".yaml") || _path.toLowerCase().endsWith(".yml")) { - return _path; - } - else if (_path.toLowerCase().endsWith(".zip") || _path.toLowerCase().endsWith(".csar")) { - // a CSAR archive - CSAR csar = new CSAR(_path, isFile); - if (csar.validate()) { - try { - csar.decompress(); - metaProperties = csar.getMetaProperties(); - } - catch (IOException e) { - log.error("ToscaTemplate - _getPath - IOException trying to decompress {}", _path); - return null; - } - isFile = true; // the file has been decompressed locally - csar.cleanup(); - csarTempDir = csar.getTempDir(); - return csar.getTempDir() + File.separator + csar.getMainTemplate(); - } - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE248", "ValueError: " + _path + " is not a valid file")); - return null; - } - return null; - } - - private void verifyTemplate() throws JToscaException { - //Criticals - int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); - if (validationIssuesCaught > 0) { - List validationIssueStrings = ThreadLocalsHolder.getCollector().getValidationIssueReport(); - log.trace("####################################################################################################"); - log.trace("ToscaTemplate - verifyTemplate - {} Parsing Critical{} occurred...", validationIssuesCaught, (validationIssuesCaught > 1 ? "s" : "")); - for (String s : validationIssueStrings) { - log.trace("{}. CSAR name - {}", s, inputPath); - } - log.trace("####################################################################################################"); - } - - } - - public String getPath() { - return path; - } - - public String getVersion() { - return version; - } - - public String getDescription() { - return description; - } - - public TopologyTemplate getTopologyTemplate() { - return topologyTemplate; - } - - public Metadata getMetaData() { - return metaData; - } - - public ArrayList getInputs() { - if(inputs != null){ - inputs.stream().forEach(Input::resetAnnotaions); - } - return inputs; - } - - public ArrayList getOutputs() { - return outputs; - } - - public ArrayList getPolicies() { - return policies; - } - - public ArrayList getGroups() { - return groups; - } - - public ArrayList getNodeTemplates() { - return nodeTemplates; - } - - public LinkedHashMap getMetaProperties(String propertiesFile) { - return metaProperties.get(propertiesFile); - } - + private void _validateField() { + String sVersion = _tplVersion(); + if (sVersion == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE245", String.format( + "MissingRequiredField: Template is missing required field \"%s\"", DEFINITION_VERSION))); + } else { + _validateVersion(sVersion); + this.version = sVersion; + } + + for (String sKey : tpl.keySet()) { + boolean bFound = false; + for (String sSection : SECTIONS) { + if (sKey.equals(sSection)) { + bFound = true; + break; + } + } + // check ADDITIONAL_SECTIONS + if (!bFound) { + if (ADDITIONAL_SECTIONS.get(version) != null && + ADDITIONAL_SECTIONS.get(version).contains(sKey)) { + bFound = true; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE246", String.format( + "UnknownFieldError: Template contains unknown field \"%s\"", + sKey))); + } + } + } + + private void _validateVersion(String sVersion) { + boolean bFound = false; + for (String vtv : VALID_TEMPLATE_VERSIONS) { + if (sVersion.equals(vtv)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE247", String.format( + "InvalidTemplateVersion: \"%s\" is invalid. Valid versions are %s", + sVersion, VALID_TEMPLATE_VERSIONS.toString()))); + } else if ((!sVersion.equals("tosca_simple_yaml_1_0") && !sVersion.equals("tosca_simple_yaml_1_1"))) { + EntityType.updateDefinitions(sVersion); + + } + } + + private String _getPath(String _path) throws JToscaException { + if (_path.toLowerCase().endsWith(".yaml") || _path.toLowerCase().endsWith(".yml")) { + return _path; + } else if (_path.toLowerCase().endsWith(".zip") || _path.toLowerCase().endsWith(".csar")) { + // a CSAR archive + CSAR csar = new CSAR(_path, isFile); + if (csar.validate()) { + try { + csar.decompress(); + metaProperties = csar.getMetaProperties(); + } catch (IOException e) { + log.error("ToscaTemplate - _getPath - IOException trying to decompress {}", _path); + return null; + } + isFile = true; // the file has been decompressed locally + csar.cleanup(); + csarTempDir = csar.getTempDir(); + return csar.getTempDir() + File.separator + csar.getMainTemplate(); + } + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE248", "ValueError: " + _path + " is not a valid file")); + return null; + } + return null; + } + + private void verifyTemplate() throws JToscaException { + //Criticals + int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); + if (validationIssuesCaught > 0) { + List validationIssueStrings = ThreadLocalsHolder.getCollector().getValidationIssueReport(); + log.trace("####################################################################################################"); + log.trace("ToscaTemplate - verifyTemplate - {} Parsing Critical{} occurred...", validationIssuesCaught, (validationIssuesCaught > 1 ? "s" : "")); + for (String s : validationIssueStrings) { + log.trace("{}. CSAR name - {}", s, inputPath); + } + log.trace("####################################################################################################"); + } + + } + + public String getPath() { + return path; + } + + public String getVersion() { + return version; + } + + public String getDescription() { + return description; + } + + public TopologyTemplate getTopologyTemplate() { + return topologyTemplate; + } + + public Metadata getMetaData() { + return metaData; + } + + public ArrayList getInputs() { + if (inputs != null) { + inputs.stream().forEach(Input::resetAnnotaions); + } + return inputs; + } + + public ArrayList getOutputs() { + return outputs; + } + + public ArrayList getPolicies() { + return policies; + } + + public ArrayList getGroups() { + return groups; + } + + public ArrayList getNodeTemplates() { + return nodeTemplates; + } + + public LinkedHashMap getMetaProperties(String propertiesFile) { + return metaProperties.get(propertiesFile); + } + // private boolean _isSubMappedNode(NodeTemplate nt,LinkedHashMap toscaTpl) { // // Return True if the nodetemple is substituted // if(nt != null && nt.getSubMappingToscaTemplate() == null && @@ -846,105 +834,105 @@ public class ToscaTemplate extends Object { // return false; // } - private boolean _isSubMappedNode(NodeTemplate nt, LinkedHashMap toscaTpl) { - // Return True if the nodetemple is substituted - if(nt != null && nt.getSubMappingToscaTemplate() == null && - getSubMappingNodeType(toscaTpl).equals(nt.getType()) && - nt.getInterfaces().size() < 1) { - return true; - } - return false; - } - - private LinkedHashMap _getParamsForNestedTemplate(NodeTemplate nt) { - // Return total params for nested_template - LinkedHashMap pparams; - if(parsedParams != null) { - pparams = parsedParams; - } - else { - pparams = new LinkedHashMap(); - } - if(nt != null) { - for(String pname: nt.getProperties().keySet()) { - pparams.put(pname,nt.getPropertyValue(pname)); - } - } - return pparams; - } - - @SuppressWarnings("unchecked") - private String getSubMappingNodeType(LinkedHashMap toscaTpl) { - // Return substitution mappings node type - if(toscaTpl != null) { - return TopologyTemplate.getSubMappingNodeType( - (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE)); - } - return null; - } - - public boolean hasNestedTemplates() { + private boolean _isSubMappedNode(NodeTemplate nt, LinkedHashMap toscaTpl) { + // Return True if the nodetemple is substituted + if (nt != null && nt.getSubMappingToscaTemplate() == null && + getSubMappingNodeType(toscaTpl).equals(nt.getType()) && + nt.getInterfaces().size() < 1) { + return true; + } + return false; + } + + private LinkedHashMap _getParamsForNestedTemplate(NodeTemplate nt) { + // Return total params for nested_template + LinkedHashMap pparams; + if (parsedParams != null) { + pparams = parsedParams; + } else { + pparams = new LinkedHashMap(); + } + if (nt != null) { + for (String pname : nt.getProperties().keySet()) { + pparams.put(pname, nt.getPropertyValue(pname)); + } + } + return pparams; + } + + @SuppressWarnings("unchecked") + private String getSubMappingNodeType(LinkedHashMap toscaTpl) { + // Return substitution mappings node type + if (toscaTpl != null) { + return TopologyTemplate.getSubMappingNodeType( + (LinkedHashMap) toscaTpl.get(TOPOLOGY_TEMPLATE)); + } + return null; + } + + public boolean hasNestedTemplates() { // Return True if the tosca template has nested templates return nestedToscaTemplatesWithTopology != null && - nestedToscaTemplatesWithTopology.size() >= 1; - - } - - public ArrayList getNestedTemplates() { - return nestedToscaTemplatesWithTopology; - } - - public ConcurrentHashMap getNestedTopologyTemplates() { - return nestedToscaTplsWithTopology; - } - - /** - * Get datatypes. - * @return return list of datatypes. - */ - public HashSet getDataTypes() { - return dataTypes; - } - - @Override - public String toString() { - return "ToscaTemplate{" + - "exttools=" + exttools + - ", VALID_TEMPLATE_VERSIONS=" + VALID_TEMPLATE_VERSIONS + - ", ADDITIONAL_SECTIONS=" + ADDITIONAL_SECTIONS + - ", isFile=" + isFile + - ", path='" + path + '\'' + - ", inputPath='" + inputPath + '\'' + - ", parsedParams=" + parsedParams + - ", tpl=" + tpl + - ", version='" + version + '\'' + - ", imports=" + imports + - ", relationshipTypes=" + relationshipTypes + - ", metaData=" + metaData + - ", description='" + description + '\'' + - ", topologyTemplate=" + topologyTemplate + - ", repositories=" + repositories + - ", inputs=" + inputs + - ", relationshipTemplates=" + relationshipTemplates + - ", nodeTemplates=" + nodeTemplates + - ", outputs=" + outputs + - ", policies=" + policies + - ", nestedToscaTplsWithTopology=" + nestedToscaTplsWithTopology + - ", nestedToscaTemplatesWithTopology=" + nestedToscaTemplatesWithTopology + - ", graph=" + graph + - ", csarTempDir='" + csarTempDir + '\'' + - ", nestingLoopCounter=" + nestingLoopCounter + - ", dataTypes=" + dataTypes + - '}'; - } - - public List getInputs(boolean annotationsRequired) { - if(inputs != null && annotationsRequired){ - inputs.stream().forEach(Input::parseAnnotations); - return inputs; - } - return getInputs(); - } + nestedToscaTemplatesWithTopology.size() >= 1; + + } + + public ArrayList getNestedTemplates() { + return nestedToscaTemplatesWithTopology; + } + + public ConcurrentHashMap getNestedTopologyTemplates() { + return nestedToscaTplsWithTopology; + } + + /** + * Get datatypes. + * + * @return return list of datatypes. + */ + public HashSet getDataTypes() { + return dataTypes; + } + + @Override + public String toString() { + return "ToscaTemplate{" + + "exttools=" + exttools + + ", VALID_TEMPLATE_VERSIONS=" + VALID_TEMPLATE_VERSIONS + + ", ADDITIONAL_SECTIONS=" + ADDITIONAL_SECTIONS + + ", isFile=" + isFile + + ", path='" + path + '\'' + + ", inputPath='" + inputPath + '\'' + + ", parsedParams=" + parsedParams + + ", tpl=" + tpl + + ", version='" + version + '\'' + + ", imports=" + imports + + ", relationshipTypes=" + relationshipTypes + + ", metaData=" + metaData + + ", description='" + description + '\'' + + ", topologyTemplate=" + topologyTemplate + + ", repositories=" + repositories + + ", inputs=" + inputs + + ", relationshipTemplates=" + relationshipTemplates + + ", nodeTemplates=" + nodeTemplates + + ", outputs=" + outputs + + ", policies=" + policies + + ", nestedToscaTplsWithTopology=" + nestedToscaTplsWithTopology + + ", nestedToscaTemplatesWithTopology=" + nestedToscaTemplatesWithTopology + + ", graph=" + graph + + ", csarTempDir='" + csarTempDir + '\'' + + ", nestingLoopCounter=" + nestingLoopCounter + + ", dataTypes=" + dataTypes + + '}'; + } + + public List getInputs(boolean annotationsRequired) { + if (inputs != null && annotationsRequired) { + inputs.stream().forEach(Input::parseAnnotations); + return inputs; + } + return getInputs(); + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java b/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java index 91545c2..c78978f 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,121 +28,119 @@ import java.util.LinkedHashMap; public class Triggers extends EntityTemplate { - private static final String DESCRIPTION = "description"; - private static final String EVENT = "event_type"; - private static final String SCHEDULE = "schedule"; - private static final String TARGET_FILTER = "target_filter"; - private static final String CONDITION = "condition"; - private static final String ACTION = "action"; - - private static final String SECTIONS[] = { - DESCRIPTION, EVENT, SCHEDULE, TARGET_FILTER, CONDITION, ACTION - }; - - private static final String METER_NAME = "meter_name"; - private static final String CONSTRAINT = "constraint"; - private static final String PERIOD = "period"; - private static final String EVALUATIONS = "evaluations"; - private static final String METHOD = "method"; - private static final String THRESHOLD = "threshold"; - private static final String COMPARISON_OPERATOR = "comparison_operator"; - - private static final String CONDITION_KEYNAMES[] = { - METER_NAME, CONSTRAINT, PERIOD, EVALUATIONS, METHOD, THRESHOLD, COMPARISON_OPERATOR - }; - - private String name; - private LinkedHashMap triggerTpl; - - public Triggers(String _name,LinkedHashMap _triggerTpl) { - super(); // dummy. don't want super - name = _name; - triggerTpl = _triggerTpl; - _validateKeys(); - _validateCondition(); - _validateInput(); - } - - public String getDescription() { - return (String)triggerTpl.get("description"); - } - - public String getEvent() { - return (String)triggerTpl.get("event_type"); - } - - public LinkedHashMap getSchedule() { - return (LinkedHashMap)triggerTpl.get("schedule"); - } - - public LinkedHashMap getTargetFilter() { - return (LinkedHashMap)triggerTpl.get("target_filter"); - } - - public LinkedHashMap getCondition() { - return (LinkedHashMap)triggerTpl.get("condition"); - } - - public LinkedHashMap getAction() { - return (LinkedHashMap)triggerTpl.get("action"); - } - - private void _validateKeys() { - for(String key: triggerTpl.keySet()) { - boolean bFound = false; - for(int i=0; i triggerTpl; + + public Triggers(String name, LinkedHashMap triggerTpl) { + super(); // dummy. don't want super + this.name = name; + this.triggerTpl = triggerTpl; + validateKeys(); + validateCondition(); + validateInput(); + } + + public String getDescription() { + return (String) triggerTpl.get("description"); + } + + public String getEvent() { + return (String) triggerTpl.get("event_type"); + } + + public LinkedHashMap getSchedule() { + return (LinkedHashMap) triggerTpl.get("schedule"); + } + + public LinkedHashMap getTargetFilter() { + return (LinkedHashMap) triggerTpl.get("target_filter"); + } + + public LinkedHashMap getCondition() { + return (LinkedHashMap) triggerTpl.get("condition"); + } + + public LinkedHashMap getAction() { + return (LinkedHashMap) triggerTpl.get("action"); + } + + private void validateKeys() { + for (String key : triggerTpl.keySet()) { + boolean bFound = false; + for (int i = 0; i < SECTIONS.length; i++) { + if (key.equals(SECTIONS[i])) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE249", String.format( + "UnknownFieldError: Triggers \"%s\" contains unknown field \"%s\"", + name, key))); + } + } + } + + private void validateCondition() { + for (String key : getCondition().keySet()) { + boolean bFound = false; + for (int i = 0; i < CONDITION_KEYNAMES.length; i++) { + if (key.equals(CONDITION_KEYNAMES[i])) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE250", String.format( + "UnknownFieldError: Triggers \"%s\" contains unknown field \"%s\"", + name, key))); + } + } + } + + private void validateInput() { + for (String key : getCondition().keySet()) { + Object value = getCondition().get(key); + if (key.equals(PERIOD) || key.equals(EVALUATIONS)) { + ValidateUtils.validateInteger(value); + } else if (key.equals(THRESHOLD)) { + ValidateUtils.validateNumeric(value); + } else if (key.equals(METER_NAME) || key.equals(METHOD)) { + ValidateUtils.validateString(value); + } + } + } + + @Override + public String toString() { + return "Triggers{" + + "name='" + name + '\'' + + ", triggerTpl=" + triggerTpl + + '}'; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java b/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java index b7adfa4..f2bb650 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,20 +38,23 @@ public class UnsupportedType { of un_supported_types. As tosca-parser move to provide support for version 1.1 and higher, they will be removed. */ - - private static final String unsupportedTypes[] = { - "tosca.test.invalidtype", - "tosca.nodes.Storage.ObjectStorage", - "tosca.nodes.Storage.BlockStorage"}; + + private UnsupportedType() { + } + + private static final String[] UNSUPPORTED_TYPES = { + "tosca.test.invalidtype", + "tosca.nodes.Storage.ObjectStorage", + "tosca.nodes.Storage.BlockStorage"}; public static boolean validateType(String entityType) { - for(String ust: unsupportedTypes) { - if(ust.equals(entityType)) { + for (String ust : UNSUPPORTED_TYPES) { + if (ust.equals(entityType)) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE251", String.format( - "UnsupportedTypeError: Entity type \"%s\" is not supported",entityType))); - return true; - } - } + "UnsupportedTypeError: Entity type \"%s\" is not supported", entityType))); + return true; + } + } return false; } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java index b96399b..56416c6 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,26 +22,26 @@ package org.onap.sdc.toscaparser.api.common; public class JToscaException extends Exception { - private static final long serialVersionUID = 1L; - private String code; + private static final long serialVersionUID = 1L; + private String code; - public JToscaException(String message, String code) { - super(message); - this.code = code; - } + public JToscaException(String message, String code) { + super(message); + this.code = code; + } - public String getCode() { - return code; - } + public String getCode() { + return code; + } - public void setCode(String code) { - this.code = code; - } + public void setCode(String code) { + this.code = code; + } - //JE1001 - Meta file missing - //JE1002 - Invalid yaml content - //JE1003 - Entry-Definition not defined in meta file - //JE1004 - Entry-Definition file missing - //JE1005 - General Error - //JE1006 - General Error/Path not valid + //JE1001 - Meta file missing + //JE1002 - Invalid yaml content + //JE1003 - Entry-Definition not defined in meta file + //JE1004 - Entry-Definition file missing + //JE1005 - General Error + //JE1006 - General Error/Path not valid } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java index 9eb8f54..19c9583 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,34 +22,34 @@ package org.onap.sdc.toscaparser.api.common; public class JToscaValidationIssue { - private String code; - private String message; + private String code; + private String message; - public JToscaValidationIssue(String code, String message) { - super(); - this.code = code; - this.message = message; - } + public JToscaValidationIssue(String code, String message) { + super(); + this.code = code; + this.message = message; + } - public String getMessage() { - return message; - } + public String getMessage() { + return message; + } - public void setMessage(String message) { - this.message = message; - } + public void setMessage(String message) { + this.message = message; + } - public String getCode() { - return code; - } + public String getCode() { + return code; + } - public void setCode(String code) { - this.code = code; - } - - @Override - public String toString() { - return "JToscaError [code=" + code + ", message=" + message + "]"; - } + public void setCode(String code) { + this.code = code; + } + + @Override + public String toString() { + return "JToscaError [code=" + code + ", message=" + message + "]"; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java b/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java index 2769c1a..c109ffd 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,37 +23,36 @@ package org.onap.sdc.toscaparser.api.common; import java.util.IllegalFormatException; public class TOSCAException extends Exception { - private String message = "An unkown exception has occurred"; - private static boolean FATAL_EXCEPTION_FORMAT_ERRORS = false; - private String msgFmt = null; - - public TOSCAException(String...strings) { - try { - message = String.format(msgFmt,(Object[])strings); - } - catch (IllegalFormatException e) { - // TODO log - - if(FATAL_EXCEPTION_FORMAT_ERRORS) { - throw e; - } - - } - - } - - public String __str__() { - return message; - } - - public static void generate_inv_schema_property_error(String name, String attr, String value, String valid_values) { - //TODO - - } - - public static void setFatalFormatException(boolean flag) { - FATAL_EXCEPTION_FORMAT_ERRORS = flag; - } - + private String message = "An unkown exception has occurred"; + private static boolean FATAL_EXCEPTION_FORMAT_ERRORS = false; + private String msgFmt = null; + + public TOSCAException(String... strings) { + try { + message = String.format(msgFmt, (Object[]) strings); + } catch (IllegalFormatException e) { + // TODO log + + if (FATAL_EXCEPTION_FORMAT_ERRORS) { + throw e; + } + + } + + } + + public String __str__() { + return message; + } + + public static void generate_inv_schema_property_error(String name, String attr, String value, String valid_values) { + //TODO + + } + + public static void setFatalFormatException(boolean flag) { + FATAL_EXCEPTION_FORMAT_ERRORS = flag; + } + } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java b/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java index 25bb854..71c0401 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,10 +26,11 @@ import java.util.*; public class ValidationIssueCollector { - private Map validationIssues = new HashMap(); + private Map validationIssues = new HashMap(); + public void appendValidationIssue(JToscaValidationIssue issue) { - validationIssues.put(issue.getMessage(),issue); + validationIssues.put(issue.getMessage(), issue); } @@ -37,13 +38,14 @@ public class ValidationIssueCollector { List report = new ArrayList<>(); if (!validationIssues.isEmpty()) { for (JToscaValidationIssue exception : validationIssues.values()) { - report.add("["+exception.getCode()+"]: "+ exception.getMessage()); + report.add("[" + exception.getCode() + "]: " + exception.getMessage()); } } return report; } - public Map getValidationIssues() { + + public Map getValidationIssues() { return validationIssues; } @@ -51,5 +53,5 @@ public class ValidationIssueCollector { public int validationIssuesCaught() { return validationIssues.size(); } - + } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java index 3dce5e6..9cf8c6c 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,67 +23,63 @@ package org.onap.sdc.toscaparser.api.elements; import java.util.LinkedHashMap; public class ArtifactTypeDef extends StatefulEntityType { - - private String type; - private LinkedHashMap customDef; - private LinkedHashMap properties; - private LinkedHashMap parentArtifacts; - - - - public ArtifactTypeDef(String atype,LinkedHashMap _customDef) { - super(atype,ARTIFACT_PREFIX,_customDef); - - type = atype; - customDef = _customDef; - properties = null; - if(defs != null) { - properties = (LinkedHashMap)defs.get(PROPERTIES); - } - parentArtifacts = _getParentArtifacts(); - } - - private LinkedHashMap _getParentArtifacts() { - LinkedHashMap artifacts = new LinkedHashMap<>(); - String parentArtif = null; - if(getParentType() != null) { - parentArtif = getParentType().getType(); - } - if(parentArtif != null && !parentArtif.isEmpty()) { - while(!parentArtif.equals("tosca.artifacts.Root")) { - Object ob = TOSCA_DEF.get(parentArtif); - artifacts.put(parentArtif,ob); - parentArtif = - (String)((LinkedHashMap)ob).get("derived_from"); + + private String type; + private LinkedHashMap customDef; + private LinkedHashMap properties; + private LinkedHashMap parentArtifacts; + + + public ArtifactTypeDef(String type, LinkedHashMap customDef) { + super(type, ARTIFACT_PREFIX, customDef); + + this.type = type; + this.customDef = customDef; + properties = defs != null ? (LinkedHashMap) defs.get(PROPERTIES) : null; + parentArtifacts = getParentArtifacts(); + } + + private LinkedHashMap getParentArtifacts() { + LinkedHashMap artifacts = new LinkedHashMap<>(); + String parentArtif = null; + if (getParentType() != null) { + parentArtif = getParentType().getType(); + } + if (parentArtif != null && !parentArtif.isEmpty()) { + while (!parentArtif.equals("tosca.artifacts.Root")) { + Object ob = TOSCA_DEF.get(parentArtif); + artifacts.put(parentArtif, ob); + parentArtif = + (String) ((LinkedHashMap) ob).get("derived_from"); } - } - return artifacts; - } - - public ArtifactTypeDef getParentType() { + } + return artifacts; + } + + public ArtifactTypeDef getParentType() { // Return a artifact entity from which this entity is derived - if(defs == null) { - return null; + if (defs == null) { + return null; } String partifactEntity = derivedFrom(defs); - if(partifactEntity != null) { - return new ArtifactTypeDef(partifactEntity,customDef); + if (partifactEntity != null) { + return new ArtifactTypeDef(partifactEntity, customDef); } return null; - } - - public Object getArtifact(String name) { + } + + public Object getArtifact(String name) { // Return the definition of an artifact field by name - if(defs != null) { + if (defs != null) { return defs.get(name); } return null; - } - - public String getType() { - return type; - } - + } + + public String getType() { + return type; + } + } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java index 2070c50..e4a30f1 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,27 +24,27 @@ import java.util.LinkedHashMap; public class AttributeDef { // TOSCA built-in Attribute type - - private String name; - private Object value; - private LinkedHashMap schema; - public AttributeDef(String adName, Object adValue, LinkedHashMap adSchema) { + private String name; + private Object value; + private LinkedHashMap schema; + + public AttributeDef(String adName, Object adValue, LinkedHashMap adSchema) { name = adName; value = adValue; schema = adSchema; } - + public String getName() { - return name; + return name; } public Object getValue() { - return value; + return value; } - public LinkedHashMap getSchema() { - return schema; + public LinkedHashMap getSchema() { + return schema; } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java index 9f9610e..e3c24b3 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,134 +25,132 @@ import java.util.LinkedHashMap; import java.util.Map; public class CapabilityTypeDef extends StatefulEntityType { - // TOSCA built-in capabilities type + // TOSCA built-in capabilities type - private static final String TOSCA_TYPEURI_CAPABILITY_ROOT = "tosca.capabilities.Root"; + private static final String TOSCA_TYPEURI_CAPABILITY_ROOT = "tosca.capabilities.Root"; - private String name; - private String nodetype; - private LinkedHashMap customDef; - private LinkedHashMap properties; - private LinkedHashMap parentCapabilities; + private String name; + private String nodetype; + private LinkedHashMap customDef; + private LinkedHashMap properties; + private LinkedHashMap parentCapabilities; - @SuppressWarnings("unchecked") - public CapabilityTypeDef(String cname,String ctype,String ntype,LinkedHashMap ccustomDef) { - super(ctype,CAPABILITY_PREFIX,ccustomDef); - - name = cname; + @SuppressWarnings("unchecked") + public CapabilityTypeDef(String cname, String ctype, String ntype, LinkedHashMap ccustomDef) { + super(ctype, CAPABILITY_PREFIX, ccustomDef); + + name = cname; nodetype = ntype; properties = null; customDef = ccustomDef; - if(defs != null) { - properties = (LinkedHashMap)defs.get(PROPERTIES); + if (defs != null) { + properties = (LinkedHashMap) defs.get(PROPERTIES); } - parentCapabilities = _getParentCapabilities(customDef); - } - - @SuppressWarnings("unchecked") - public ArrayList getPropertiesDefObjects () { + parentCapabilities = getParentCapabilities(customDef); + } + + @SuppressWarnings("unchecked") + public ArrayList getPropertiesDefObjects() { // Return a list of property definition objects - ArrayList propsdefs = new ArrayList<>(); - LinkedHashMap parentProperties = new LinkedHashMap<>(); - if(parentCapabilities != null) { - for(Map.Entry me: parentCapabilities.entrySet()) { - parentProperties.put(me.getKey(),((LinkedHashMap)me.getValue()).get("properties")); - } - } - if(properties != null) { - for(Map.Entry me: properties.entrySet()) { - propsdefs.add(new PropertyDef(me.getKey(),null,(LinkedHashMap)me.getValue())); - } - } - if(parentProperties != null) { - for(Map.Entry me: parentProperties.entrySet()) { - LinkedHashMap props = (LinkedHashMap)me.getValue(); - if (props != null) { - for(Map.Entry pe: props.entrySet()) { - String prop = pe.getKey(); - LinkedHashMap schema = (LinkedHashMap)pe.getValue(); - // add parent property if not overridden by children type - if(properties == null || properties.get(prop) == null) { - propsdefs.add(new PropertyDef(prop, null, schema)); - } - } - } - } - } - return propsdefs; - } - - public LinkedHashMap getPropertiesDef() { - LinkedHashMap pds = new LinkedHashMap<>(); - for(PropertyDef pd: getPropertiesDefObjects()) { - pds.put(pd.getName(),pd); - } - return pds; - } - - public PropertyDef getPropertyDefValue(String pdname) { + ArrayList propsdefs = new ArrayList<>(); + LinkedHashMap parentProperties = new LinkedHashMap<>(); + if (parentCapabilities != null) { + for (Map.Entry me : parentCapabilities.entrySet()) { + parentProperties.put(me.getKey(), ((LinkedHashMap) me.getValue()).get("properties")); + } + } + if (properties != null) { + for (Map.Entry me : properties.entrySet()) { + propsdefs.add(new PropertyDef(me.getKey(), null, (LinkedHashMap) me.getValue())); + } + } + if (parentProperties != null) { + for (Map.Entry me : parentProperties.entrySet()) { + LinkedHashMap props = (LinkedHashMap) me.getValue(); + if (props != null) { + for (Map.Entry pe : props.entrySet()) { + String prop = pe.getKey(); + LinkedHashMap schema = (LinkedHashMap) pe.getValue(); + // add parent property if not overridden by children type + if (properties == null || properties.get(prop) == null) { + propsdefs.add(new PropertyDef(prop, null, schema)); + } + } + } + } + } + return propsdefs; + } + + public LinkedHashMap getPropertiesDef() { + LinkedHashMap pds = new LinkedHashMap<>(); + for (PropertyDef pd : getPropertiesDefObjects()) { + pds.put(pd.getName(), pd); + } + return pds; + } + + public PropertyDef getPropertyDefValue(String pdname) { // Return the definition of a given property name - LinkedHashMap propsDef = getPropertiesDef(); - if(propsDef != null && propsDef.get(pdname) != null) { - return (PropertyDef)propsDef.get(pdname).getPDValue(); - } - return null; - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _getParentCapabilities(LinkedHashMap customDef) { - LinkedHashMap capabilities = new LinkedHashMap<>(); - CapabilityTypeDef parentCap = getParentType(); - if(parentCap != null) { - String sParentCap = parentCap.getType(); - while(!sParentCap.equals(TOSCA_TYPEURI_CAPABILITY_ROOT)) { - if(TOSCA_DEF.get(sParentCap) != null) { - capabilities.put(sParentCap,TOSCA_DEF.get(sParentCap)); - } - else if(customDef != null && customDef.get(sParentCap) != null) { - capabilities.put(sParentCap,customDef.get(sParentCap)); - } - sParentCap = (String)((LinkedHashMap)capabilities.get(sParentCap)).get("derived_from"); - } - } - return capabilities; - } - - public CapabilityTypeDef getParentType() { + LinkedHashMap propsDef = getPropertiesDef(); + if (propsDef != null && propsDef.get(pdname) != null) { + return (PropertyDef) propsDef.get(pdname).getPDValue(); + } + return null; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap getParentCapabilities(LinkedHashMap customDef) { + LinkedHashMap capabilities = new LinkedHashMap<>(); + CapabilityTypeDef parentCap = getParentType(); + if (parentCap != null) { + String sParentCap = parentCap.getType(); + while (!sParentCap.equals(TOSCA_TYPEURI_CAPABILITY_ROOT)) { + if (TOSCA_DEF.get(sParentCap) != null) { + capabilities.put(sParentCap, TOSCA_DEF.get(sParentCap)); + } else if (customDef != null && customDef.get(sParentCap) != null) { + capabilities.put(sParentCap, customDef.get(sParentCap)); + } + sParentCap = (String) ((LinkedHashMap) capabilities.get(sParentCap)).get("derived_from"); + } + } + return capabilities; + } + + public CapabilityTypeDef getParentType() { // Return a capability this capability is derived from - if(defs == null) { - return null; - } - String pnode = derivedFrom(defs); - if(pnode != null && !pnode.isEmpty()) { + if (defs == null) { + return null; + } + String pnode = derivedFrom(defs); + if (pnode != null && !pnode.isEmpty()) { return new CapabilityTypeDef(name, pnode, nodetype, customDef); - } - return null; - } + } + return null; + } - public boolean inheritsFrom(ArrayList typeNames) { + public boolean inheritsFrom(ArrayList typeNames) { // Check this capability is in type_names // Check if this capability or some of its parent types // are in the list of types: type_names - if(typeNames.contains(getType())) { - return true; - } - else if(getParentType() != null) { - return getParentType().inheritsFrom(typeNames); - } - return false; - } - - // getters/setters - - public LinkedHashMap getProperties() { - return properties; - } - - public String getName() { - return name; - } + if (typeNames.contains(getType())) { + return true; + } else if (getParentType() != null) { + return getParentType().inheritsFrom(typeNames); + } + return false; + } + + // getters/setters + + public LinkedHashMap getProperties() { + return properties; + } + + public String getName() { + return name; + } } /*python @@ -227,7 +225,7 @@ class CapabilityTypeDef(StatefulEntityType): if pnode: return CapabilityTypeDef(self.name, pnode, self.nodetype, self.custom_def) - + def inherits_from(self, type_names): '''Check this capability is in type_names diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java index 4b6451d..d8cf460 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,66 +24,66 @@ import java.util.ArrayList; import java.util.LinkedHashMap; public class DataType extends StatefulEntityType { - - LinkedHashMap customDef; - - public DataType(String _dataTypeName,LinkedHashMap _customDef) { - super(_dataTypeName,DATATYPE_NETWORK_PREFIX,_customDef); - - customDef = _customDef; - } - - public DataType getParentType() { + + LinkedHashMap customDef; + + public DataType(String _dataTypeName, LinkedHashMap _customDef) { + super(_dataTypeName, DATATYPE_NETWORK_PREFIX, _customDef); + + customDef = _customDef; + } + + public DataType getParentType() { // Return a datatype this datatype is derived from - if(defs != null) { - String ptype = derivedFrom(defs); - if(ptype != null) { - return new DataType(ptype,customDef); - } - } + if (defs != null) { + String ptype = derivedFrom(defs); + if (ptype != null) { + return new DataType(ptype, customDef); + } + } return null; - } + } - public String getValueType() { + public String getValueType() { // Return 'type' section in the datatype schema - if(defs != null) { - return (String)entityValue(defs,"type"); - } - return null; - } + if (defs != null) { + return (String) entityValue(defs, "type"); + } + return null; + } - public ArrayList getAllPropertiesObjects() { + public ArrayList getAllPropertiesObjects() { //Return all properties objects defined in type and parent type - ArrayList propsDef = getPropertiesDefObjects(); + ArrayList propsDef = getPropertiesDefObjects(); DataType ptype = getParentType(); - while(ptype != null) { + while (ptype != null) { propsDef.addAll(ptype.getPropertiesDefObjects()); ptype = ptype.getParentType(); } return propsDef; - } - - public LinkedHashMap getAllProperties() { + } + + public LinkedHashMap getAllProperties() { // Return a dictionary of all property definition name-object pairs - LinkedHashMap pno = new LinkedHashMap<>(); - for(PropertyDef pd: getAllPropertiesObjects()) { - pno.put(pd.getName(),pd); - } + LinkedHashMap pno = new LinkedHashMap<>(); + for (PropertyDef pd : getAllPropertiesObjects()) { + pno.put(pd.getName(), pd); + } return pno; - } + } - public Object getAllPropertyValue(String name) { + public Object getAllPropertyValue(String name) { // Return the value of a given property name - LinkedHashMap propsDef = getAllProperties(); - if(propsDef != null && propsDef.get(name) != null) { + LinkedHashMap propsDef = getAllProperties(); + if (propsDef != null && propsDef.get(name) != null) { return propsDef.get(name).getPDValue(); } return null; - } - - public LinkedHashMap getDefs() { - return defs; - } + } + + public LinkedHashMap getDefs() { + return defs; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java index 62f51d2..efc6ac9 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -36,189 +36,189 @@ import org.yaml.snakeyaml.Yaml; public class EntityType { - private static Logger log = LoggerFactory.getLogger(EntityType.class.getName()); - - private static final String TOSCA_DEFINITION_1_0_YAML = "TOSCA_definition_1_0.yaml"; - protected static final String DERIVED_FROM = "derived_from"; - protected static final String PROPERTIES = "properties"; - protected static final String ATTRIBUTES = "attributes"; - protected static final String REQUIREMENTS = "requirements"; - protected static final String INTERFACES = "interfaces"; - protected static final String CAPABILITIES = "capabilities"; - protected static final String TYPE = "type"; - protected static final String ARTIFACTS = "artifacts"; - - @SuppressWarnings("unused") - private static final String SECTIONS[] = { - DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS, + private static Logger log = LoggerFactory.getLogger(EntityType.class.getName()); + + private static final String TOSCA_DEFINITION_1_0_YAML = "TOSCA_definition_1_0.yaml"; + protected static final String DERIVED_FROM = "derived_from"; + protected static final String PROPERTIES = "properties"; + protected static final String ATTRIBUTES = "attributes"; + protected static final String REQUIREMENTS = "requirements"; + protected static final String INTERFACES = "interfaces"; + protected static final String CAPABILITIES = "capabilities"; + protected static final String TYPE = "type"; + protected static final String ARTIFACTS = "artifacts"; + + @SuppressWarnings("unused") + private static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS, INTERFACES, CAPABILITIES, TYPE, ARTIFACTS - }; + }; - public static final String TOSCA_DEF_SECTIONS[] = { - "node_types", "data_types", "artifact_types", - "group_types", "relationship_types", - "capability_types", "interface_types", - "policy_types"}; + public static final String TOSCA_DEF_SECTIONS[] = { + "node_types", "data_types", "artifact_types", + "group_types", "relationship_types", + "capability_types", "interface_types", + "policy_types"}; // TOSCA definition file - //private final static String path = EntityType.class.getProtectionDomain().getCodeSource().getLocation().getPath(); - - //private final static String path = EntityType.class.getClassLoader().getResource("TOSCA_definition_1_0.yaml").getFile(); + //private final static String path = EntityType.class.getProtectionDomain().getCodeSource().getLocation().getPath(); + + //private final static String path = EntityType.class.getClassLoader().getResource("TOSCA_definition_1_0.yaml").getFile(); //private final static String TOSCA_DEF_FILE = EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); - - private static LinkedHashMap TOSCA_DEF_LOAD_AS_IS = loadTdf(); - - //EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); - - @SuppressWarnings("unchecked") - private static LinkedHashMap loadTdf() { - String toscaDefLocation = EntityType.class.getClassLoader().getResource(TOSCA_DEFINITION_1_0_YAML).getFile(); - InputStream input = EntityType.class.getClassLoader().getResourceAsStream(TOSCA_DEFINITION_1_0_YAML); - if (input == null){ - log.error("EntityType - loadTdf - Couldn't load TOSCA_DEF_FILE {}", toscaDefLocation); - } - Yaml yaml = new Yaml(); - Object loaded = yaml.load(input); - //@SuppressWarnings("unchecked") - return (LinkedHashMap) loaded; - } + + private static LinkedHashMap TOSCA_DEF_LOAD_AS_IS = loadTdf(); + + //EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); + + @SuppressWarnings("unchecked") + private static LinkedHashMap loadTdf() { + String toscaDefLocation = EntityType.class.getClassLoader().getResource(TOSCA_DEFINITION_1_0_YAML).getFile(); + InputStream input = EntityType.class.getClassLoader().getResourceAsStream(TOSCA_DEFINITION_1_0_YAML); + if (input == null) { + log.error("EntityType - loadTdf - Couldn't load TOSCA_DEF_FILE {}", toscaDefLocation); + } + Yaml yaml = new Yaml(); + Object loaded = yaml.load(input); + //@SuppressWarnings("unchecked") + return (LinkedHashMap) loaded; + } // Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS - public static LinkedHashMap TOSCA_DEF; - static { - TOSCA_DEF = new LinkedHashMap(); - for(String section: TOSCA_DEF_SECTIONS) { - @SuppressWarnings("unchecked") - LinkedHashMap value = (LinkedHashMap)TOSCA_DEF_LOAD_AS_IS.get(section); - if(value != null) { - for(String key: value.keySet()) { - TOSCA_DEF.put(key, value.get(key)); - } - } + public static LinkedHashMap TOSCA_DEF; + + static { + TOSCA_DEF = new LinkedHashMap(); + for (String section : TOSCA_DEF_SECTIONS) { + @SuppressWarnings("unchecked") + LinkedHashMap value = (LinkedHashMap) TOSCA_DEF_LOAD_AS_IS.get(section); + if (value != null) { + for (String key : value.keySet()) { + TOSCA_DEF.put(key, value.get(key)); + } + } } - } - - public static final String DEPENDSON = "tosca.relationships.DependsOn"; - public static final String HOSTEDON = "tosca.relationships.HostedOn"; - public static final String CONNECTSTO = "tosca.relationships.ConnectsTo"; - public static final String ATTACHESTO = "tosca.relationships.AttachesTo"; - public static final String LINKSTO = "tosca.relationships.network.LinksTo"; - public static final String BINDSTO = "tosca.relationships.network.BindsTo"; - - public static final String RELATIONSHIP_TYPE[] = { - "tosca.relationships.DependsOn", - "tosca.relationships.HostedOn", - "tosca.relationships.ConnectsTo", - "tosca.relationships.AttachesTo", - "tosca.relationships.network.LinksTo", - "tosca.relationships.network.BindsTo"}; - - public static final String NODE_PREFIX = "tosca.nodes."; - public static final String RELATIONSHIP_PREFIX = "tosca.relationships."; - public static final String CAPABILITY_PREFIX = "tosca.capabilities."; - public static final String INTERFACE_PREFIX = "tosca.interfaces."; - public static final String ARTIFACT_PREFIX = "tosca.artifacts."; - public static final String POLICY_PREFIX = "tosca.policies."; - public static final String GROUP_PREFIX = "tosca.groups."; - //currently the data types are defined only for network - // but may have changes in the future. - public static final String DATATYPE_PREFIX = "tosca.datatypes."; - public static final String DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + "network."; - public static final String TOSCA = "tosca"; - - protected String type; - protected LinkedHashMap defs = null; - public Object getParentType() { return null; } - - public String derivedFrom(LinkedHashMap defs) { + } + + public static final String DEPENDSON = "tosca.relationships.DependsOn"; + public static final String HOSTEDON = "tosca.relationships.HostedOn"; + public static final String CONNECTSTO = "tosca.relationships.ConnectsTo"; + public static final String ATTACHESTO = "tosca.relationships.AttachesTo"; + public static final String LINKSTO = "tosca.relationships.network.LinksTo"; + public static final String BINDSTO = "tosca.relationships.network.BindsTo"; + + public static final String RELATIONSHIP_TYPE[] = { + "tosca.relationships.DependsOn", + "tosca.relationships.HostedOn", + "tosca.relationships.ConnectsTo", + "tosca.relationships.AttachesTo", + "tosca.relationships.network.LinksTo", + "tosca.relationships.network.BindsTo"}; + + public static final String NODE_PREFIX = "tosca.nodes."; + public static final String RELATIONSHIP_PREFIX = "tosca.relationships."; + public static final String CAPABILITY_PREFIX = "tosca.capabilities."; + public static final String INTERFACE_PREFIX = "tosca.interfaces."; + public static final String ARTIFACT_PREFIX = "tosca.artifacts."; + public static final String POLICY_PREFIX = "tosca.policies."; + public static final String GROUP_PREFIX = "tosca.groups."; + //currently the data types are defined only for network + // but may have changes in the future. + public static final String DATATYPE_PREFIX = "tosca.datatypes."; + public static final String DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + "network."; + public static final String TOSCA = "tosca"; + + protected String type; + protected LinkedHashMap defs = null; + + public Object getParentType() { + return null; + } + + public String derivedFrom(LinkedHashMap defs) { // Return a type this type is derived from - return (String)entityValue(defs, "derived_from"); + return (String) entityValue(defs, "derived_from"); } public boolean isDerivedFrom(String type_str) { // Check if object inherits from the given type // Returns true if this object is derived from 'type_str' // False otherwise. - if(type == null || this.type.isEmpty()) { + if (type == null || this.type.isEmpty()) { return false; - } - else if(type == type_str) { + } else if (type == type_str) { return true; - } - else if(getParentType() != null) { - return ((EntityType)getParentType()).isDerivedFrom(type_str); - } - else { + } else if (getParentType() != null) { + return ((EntityType) getParentType()).isDerivedFrom(type_str); + } else { return false; } } - public Object entityValue(LinkedHashMap defs, String key) { - if(defs != null) { - return defs.get(key); - } - return null; + public Object entityValue(LinkedHashMap defs, String key) { + if (defs != null) { + return defs.get(key); + } + return null; } @SuppressWarnings("unchecked") - public Object getValue(String ndtype, LinkedHashMap _defs, boolean parent) { + public Object getValue(String ndtype, LinkedHashMap _defs, boolean parent) { Object value = null; - if(_defs == null) { - if(defs == null) { + if (_defs == null) { + if (defs == null) { return null; } _defs = this.defs; } - Object defndt = _defs.get(ndtype); - if(defndt != null) { + Object defndt = _defs.get(ndtype); + if (defndt != null) { // copy the value to avoid that next operations add items in the // item definitions //value = copy.copy(defs[ndtype]) - value = CopyUtils.copyLhmOrAl(defndt); + value = CopyUtils.copyLhmOrAl(defndt); } - - if(parent) { + + if (parent) { EntityType p = this; - if(p != null) { - while(p != null) { - if(p.defs != null && p.defs.get(ndtype) != null) { + if (p != null) { + while (p != null) { + if (p.defs != null && p.defs.get(ndtype) != null) { // get the parent value Object parentValue = p.defs.get(ndtype); - if(value != null) { - if(value instanceof LinkedHashMap) { - for(Map.Entry me: ((LinkedHashMap)parentValue).entrySet()) { - String k = me.getKey(); - if(((LinkedHashMap)value).get(k) == null) { - ((LinkedHashMap)value).put(k,me.getValue()); - } - } + if (value != null) { + if (value instanceof LinkedHashMap) { + for (Map.Entry me : ((LinkedHashMap) parentValue).entrySet()) { + String k = me.getKey(); + if (((LinkedHashMap) value).get(k) == null) { + ((LinkedHashMap) value).put(k, me.getValue()); + } + } } - if(value instanceof ArrayList) { - for(Object pValue: (ArrayList)parentValue) { - if(!((ArrayList)value).contains(pValue)) { - ((ArrayList)value).add(pValue); - } - } + if (value instanceof ArrayList) { + for (Object pValue : (ArrayList) parentValue) { + if (!((ArrayList) value).contains(pValue)) { + ((ArrayList) value).add(pValue); + } + } } - } - else { - // value = copy.copy(parent_value) + } else { + // value = copy.copy(parent_value) value = CopyUtils.copyLhmOrAl(parentValue); } - } - p = (EntityType)p.getParentType(); + } + p = (EntityType) p.getParentType(); } } } - - return value; - } + + return value; + } @SuppressWarnings("unchecked") - public Object getDefinition(String ndtype) { + public Object getDefinition(String ndtype) { Object value = null; - LinkedHashMap _defs; + LinkedHashMap _defs; // no point in hasattr, because we have it, and it // doesn't do anything except emit an exception anyway //if not hasattr(self, 'defs'): @@ -228,59 +228,57 @@ public class EntityType { //else: // defs = self.defs _defs = this.defs; - - if(_defs != null && _defs.get(ndtype) != null) { - value = _defs.get(ndtype); + + if (_defs != null && _defs.get(ndtype) != null) { + value = _defs.get(ndtype); } Object p = getParentType(); - if(p != null) { - Object inherited = ((EntityType)p).getDefinition(ndtype); - if(inherited != null) { + if (p != null) { + Object inherited = ((EntityType) p).getDefinition(ndtype); + if (inherited != null) { // inherited = dict(inherited) WTF?!? - if(value == null) { - value = inherited; - } - else { - //????? + if (value == null) { + value = inherited; + } else { + //????? //inherited.update(value) //value.update(inherited) - for(Map.Entry me: ((LinkedHashMap)inherited).entrySet()) { - ((LinkedHashMap)value).put(me.getKey(),me.getValue()); - } - } - } + for (Map.Entry me : ((LinkedHashMap) inherited).entrySet()) { + ((LinkedHashMap) value).put(me.getKey(), me.getValue()); + } + } + } } return value; } - - public static void updateDefinitions(String version) { + + public static void updateDefinitions(String version) { ExtTools exttools = new ExtTools(); String extensionDefsFile = exttools.getDefsFile(version); - try (InputStream input = EntityType.class.getClassLoader().getResourceAsStream(extensionDefsFile);){ - Yaml yaml = new Yaml(); - LinkedHashMap nfvDefFile = (LinkedHashMap)yaml.load(input); - LinkedHashMap nfvDef = new LinkedHashMap<>(); - for(String section: TOSCA_DEF_SECTIONS) { - if(nfvDefFile.get(section) != null) { - LinkedHashMap value = - (LinkedHashMap)nfvDefFile.get(section); - for(String key: value.keySet()) { - nfvDef.put(key, value.get(key)); - } - } - } - TOSCA_DEF.putAll(nfvDef); - } - catch (IOException e) { - log.error("EntityType - updateDefinitions - Failed to update definitions from defs file {}",extensionDefsFile); - log.error("Exception:", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE280", - String.format("Failed to update definitions from defs file \"%s\" ",extensionDefsFile))); - return; - } + try (InputStream input = EntityType.class.getClassLoader().getResourceAsStream(extensionDefsFile);) { + Yaml yaml = new Yaml(); + LinkedHashMap nfvDefFile = (LinkedHashMap) yaml.load(input); + LinkedHashMap nfvDef = new LinkedHashMap<>(); + for (String section : TOSCA_DEF_SECTIONS) { + if (nfvDefFile.get(section) != null) { + LinkedHashMap value = + (LinkedHashMap) nfvDefFile.get(section); + for (String key : value.keySet()) { + nfvDef.put(key, value.get(key)); + } + } + } + TOSCA_DEF.putAll(nfvDef); + } catch (IOException e) { + log.error("EntityType - updateDefinitions - Failed to update definitions from defs file {}", extensionDefsFile); + log.error("Exception:", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE280", + String.format("Failed to update definitions from defs file \"%s\" ", extensionDefsFile))); + return; + } } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java index cbcb6f6..db6f2b7 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,151 +29,150 @@ import java.util.Map; public class GroupType extends StatefulEntityType { - private static final String DERIVED_FROM = "derived_from"; - private static final String VERSION = "version"; - private static final String METADATA = "metadata"; - private static final String DESCRIPTION = "description"; - private static final String PROPERTIES = "properties"; - private static final String MEMBERS = "members"; - private static final String INTERFACES = "interfaces"; - - private static final String SECTIONS[] = { - DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; - - private String groupType; - private LinkedHashMap customDef; - private String groupDescription; - private String groupVersion; - //private LinkedHashMap groupProperties; - //private ArrayList groupMembers; - private LinkedHashMap metaData; - - @SuppressWarnings("unchecked") - public GroupType(String _grouptype,LinkedHashMap _customDef) { - super(_grouptype,GROUP_PREFIX,_customDef); - - groupType = _grouptype; - customDef = _customDef; - _validateFields(); - if(defs != null) { - groupDescription = (String)defs.get(DESCRIPTION); - groupVersion = (String)defs.get(VERSION); - //groupProperties = (LinkedHashMap)defs.get(PROPERTIES); - //groupMembers = (ArrayList)defs.get(MEMBERS); - Object mdo = defs.get(METADATA); - if(mdo instanceof LinkedHashMap) { - metaData = (LinkedHashMap)mdo; - } - else { - metaData = null; - } - - if(metaData != null) { - _validateMetadata(metaData); - } - } - } - - public GroupType getParentType() { + private static final String DERIVED_FROM = "derived_from"; + private static final String VERSION = "version"; + private static final String METADATA = "metadata"; + private static final String DESCRIPTION = "description"; + private static final String PROPERTIES = "properties"; + private static final String MEMBERS = "members"; + private static final String INTERFACES = "interfaces"; + + private static final String[] SECTIONS = { + DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; + + private String groupType; + private LinkedHashMap customDef; + private String groupDescription; + private String groupVersion; + //private LinkedHashMap groupProperties; + //private ArrayList groupMembers; + private LinkedHashMap metaData; + + @SuppressWarnings("unchecked") + public GroupType(String groupType, LinkedHashMap customDef) { + super(groupType, GROUP_PREFIX, customDef); + + this.groupType = groupType; + this.customDef = customDef; + validateFields(); + if (defs != null) { + groupDescription = (String) defs.get(DESCRIPTION); + groupVersion = (String) defs.get(VERSION); + //groupProperties = (LinkedHashMap)defs.get(PROPERTIES); + //groupMembers = (ArrayList)defs.get(MEMBERS); + Object mdo = defs.get(METADATA); + if (mdo instanceof LinkedHashMap) { + metaData = (LinkedHashMap) mdo; + } else { + metaData = null; + } + + if (metaData != null) { + validateMetadata(metaData); + } + } + } + + public GroupType getParentType() { // Return a group statefulentity of this entity is derived from. - if(defs == null) { + if (defs == null) { return null; } String pgroupEntity = derivedFrom(defs); - if(pgroupEntity != null) { - return new GroupType(pgroupEntity,customDef); + if (pgroupEntity != null) { + return new GroupType(pgroupEntity, customDef); } return null; - } - - public String getDescription() { - return groupDescription; - } - - public String getVersion() { - return groupVersion; - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getInterfaces() { - Object ifo = getValue(INTERFACES,null,false); - if(ifo instanceof LinkedHashMap) { - return (LinkedHashMap)ifo; - } - return new LinkedHashMap(); - } - - private void _validateFields() { - if(defs != null) { - for(String name: defs.keySet()) { - boolean bFound = false; - for(String sect: SECTIONS) { - if(name.equals(sect)) { - bFound = true; - break; - } - } - if(!bFound) { + } + + public String getDescription() { + return groupDescription; + } + + public String getVersion() { + return groupVersion; + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getInterfaces() { + Object ifo = getValue(INTERFACES, null, false); + if (ifo instanceof LinkedHashMap) { + return (LinkedHashMap) ifo; + } + return new LinkedHashMap(); + } + + private void validateFields() { + if (defs != null) { + for (String name : defs.keySet()) { + boolean bFound = false; + for (String sect : SECTIONS) { + if (name.equals(sect)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE120", String.format( - "UnknownFieldError: Group Type \"%s\" contains unknown field \"%s\"", - groupType,name))); - } - } - } - } - - @SuppressWarnings("unchecked") - private void _validateMetadata(LinkedHashMap metadata) { - String mtt = (String) metadata.get("type"); - if(mtt != null && !mtt.equals("map") && !mtt.equals("tosca:map")) { + "UnknownFieldError: Group Type \"%s\" contains unknown field \"%s\"", + groupType, name))); + } + } + } + } + + @SuppressWarnings("unchecked") + private void validateMetadata(LinkedHashMap metadata) { + String mtt = (String) metadata.get("type"); + if (mtt != null && !mtt.equals("map") && !mtt.equals("tosca:map")) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE121", String.format( - "InvalidTypeError: \"%s\" defined in group for metadata is invalid", - mtt))); - } - for(String entrySchema: metadata.keySet()) { - Object estob = metadata.get(entrySchema); - if(estob instanceof LinkedHashMap) { - String est = (String)((LinkedHashMap)estob).get("type"); - if(!est.equals("string")) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE122", String.format( - "InvalidTypeError: \"%s\" defined in group for metadata \"%s\" is invalid", - est,entrySchema))); - } - } - } - } - - public String getType() { - return groupType; - } - + "InvalidTypeError: \"%s\" defined in group for metadata is invalid", + mtt))); + } + for (String entrySchema : metadata.keySet()) { + Object estob = metadata.get(entrySchema); + if (estob instanceof LinkedHashMap) { + String est = (String) ((LinkedHashMap) estob).get("type"); + if (!est.equals("string")) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE122", String.format( + "InvalidTypeError: \"%s\" defined in group for metadata \"%s\" is invalid", + est, entrySchema))); + } + } + } + } + + public String getType() { + return groupType; + } + @SuppressWarnings("unchecked") - public ArrayList getCapabilitiesObjects() { + public ArrayList getCapabilitiesObjects() { // Return a list of capability objects - ArrayList typecapabilities = new ArrayList<>(); - LinkedHashMap caps = (LinkedHashMap)getValue(CAPABILITIES, null, true); - if(caps != null) { + ArrayList typecapabilities = new ArrayList<>(); + LinkedHashMap caps = (LinkedHashMap) getValue(CAPABILITIES, null, true); + if (caps != null) { // 'cname' is symbolic name of the capability // 'cvalue' is a dict { 'type': } - for(Map.Entry me: caps.entrySet()) { - String cname = me.getKey(); - LinkedHashMap cvalue = (LinkedHashMap)me.getValue(); - String ctype = cvalue.get("type"); - CapabilityTypeDef cap = new CapabilityTypeDef(cname,ctype,type,customDef); - typecapabilities.add(cap); - } + for (Map.Entry me : caps.entrySet()) { + String cname = me.getKey(); + LinkedHashMap cvalue = (LinkedHashMap) me.getValue(); + String ctype = cvalue.get("type"); + CapabilityTypeDef cap = new CapabilityTypeDef(cname, ctype, type, customDef); + typecapabilities.add(cap); + } } return typecapabilities; - } - - public LinkedHashMap getCapabilities() { + } + + public LinkedHashMap getCapabilities() { // Return a dictionary of capability name-objects pairs - LinkedHashMap caps = new LinkedHashMap<>(); - for(CapabilityTypeDef ctd: getCapabilitiesObjects()) { - caps.put(ctd.getName(),ctd); - } - return caps; - } + LinkedHashMap caps = new LinkedHashMap<>(); + for (CapabilityTypeDef ctd : getCapabilitiesObjects()) { + caps.put(ctd.getName(), ctd); + } + return caps; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java index ceb8fb9..2862a11 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -30,167 +30,163 @@ import java.util.Map; public class InterfacesDef extends StatefulEntityType { - public static final String LIFECYCLE = "tosca.interfaces.node.lifecycle.Standard"; - public static final String CONFIGURE = "tosca.interfaces.relationship.Configure"; - public static final String LIFECYCLE_SHORTNAME = "Standard"; - public static final String CONFIGURE_SHORTNAME = "Configure"; - - public static final String SECTIONS[] = { - LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME,CONFIGURE_SHORTNAME - }; - - public static final String IMPLEMENTATION = "implementation"; - public static final String DESCRIPTION = "description"; - public static final String INPUTS = "inputs"; - - public static final String INTERFACE_DEF_RESERVED_WORDS[] = { - "type", "inputs", "derived_from", "version", "description"}; - - private EntityType ntype; - private EntityTemplate nodeTemplate; - - private String operationName; - private Object operationDef; - private Object implementation; - private LinkedHashMap inputs; - private String description; - - @SuppressWarnings("unchecked") - public InterfacesDef(EntityType inodeType, - String interfaceType, - EntityTemplate inodeTemplate, - String iname, - Object ivalue) { - // void - super(); - - ntype = inodeType; - nodeTemplate = inodeTemplate; - type = interfaceType; - operationName = iname; - operationDef = ivalue; - implementation = null; - inputs = null; - defs = new LinkedHashMap(); - - if(interfaceType.equals(LIFECYCLE_SHORTNAME)) { - interfaceType = LIFECYCLE; - } - if(interfaceType.equals(CONFIGURE_SHORTNAME)) { - interfaceType = CONFIGURE; - } - - // only NodeType has getInterfaces "hasattr(ntype,interfaces)" - // while RelationshipType does not - if(ntype instanceof NodeType) { - if(((NodeType)ntype).getInterfaces() != null && - ((NodeType)ntype).getInterfaces().values().contains(interfaceType)) { - LinkedHashMap nii = (LinkedHashMap) - ((NodeType)ntype).getInterfaces().get(interfaceType); - interfaceType = (String)nii.get("type"); - } - } - if(inodeType != null) { - if(nodeTemplate != null && nodeTemplate.getCustomDef() != null && - nodeTemplate.getCustomDef().containsKey(interfaceType)) { - defs = (LinkedHashMap) - nodeTemplate.getCustomDef().get(interfaceType); - } - else { - defs = (LinkedHashMap)TOSCA_DEF.get(interfaceType); - } - } - - if(ivalue != null) { - if(ivalue instanceof LinkedHashMap) { - for(Map.Entry me: ((LinkedHashMap)ivalue).entrySet()) { - if(me.getKey().equals(IMPLEMENTATION)) { - implementation = me.getValue(); - } - else if(me.getKey().equals(INPUTS)) { - inputs = (LinkedHashMap)me.getValue(); - } - else if(me.getKey().equals(DESCRIPTION)) { - description = (String)me.getValue(); - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE123", String.format( - "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"", - nodeTemplate.getName(),me.getKey()))); - } - } - } - } - } - - public ArrayList getLifecycleOps() { - if(defs != null) { - if(type.equals(LIFECYCLE)) { - return _ops(); - } - } - return null; - } - - public ArrayList getInterfaceOps() { - if(defs != null) { - ArrayList ops = _ops(); - ArrayList idrw = new ArrayList<>(); - for(int i=0; i getConfigureOps() { - if(defs != null) { - if(type.equals(CONFIGURE)) { - return _ops(); - } - } - return null; - } - - private ArrayList _ops() { - return new ArrayList(defs.keySet()); - } - - // getters/setters - - public LinkedHashMap getInputs() { - return inputs; - } - - public void setInput(String name,Object value) { - inputs.put(name, value); - } - - public Object getImplementation(){ - return implementation; - } - - public void setImplementation(Object implementation){ - this.implementation = implementation; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - public String getOperationName() { - return operationName; - } - - public void setOperationName(String operationName) { - this.operationName = operationName; - } + public static final String LIFECYCLE = "tosca.interfaces.node.lifecycle.Standard"; + public static final String CONFIGURE = "tosca.interfaces.relationship.Configure"; + public static final String LIFECYCLE_SHORTNAME = "Standard"; + public static final String CONFIGURE_SHORTNAME = "Configure"; + + public static final String[] SECTIONS = { + LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME, CONFIGURE_SHORTNAME + }; + + public static final String IMPLEMENTATION = "implementation"; + public static final String DESCRIPTION = "description"; + public static final String INPUTS = "inputs"; + + public static final String[] INTERFACE_DEF_RESERVED_WORDS = { + "type", "inputs", "derived_from", "version", "description"}; + + private EntityType ntype; + private EntityTemplate nodeTemplate; + + private String operationName; + private Object operationDef; + private Object implementation; + private LinkedHashMap inputs; + private String description; + + @SuppressWarnings("unchecked") + public InterfacesDef(EntityType inodeType, + String interfaceType, + EntityTemplate inodeTemplate, + String iname, + Object ivalue) { + // void + super(); + + ntype = inodeType; + nodeTemplate = inodeTemplate; + type = interfaceType; + operationName = iname; + operationDef = ivalue; + implementation = null; + inputs = null; + defs = new LinkedHashMap<>(); + + if (interfaceType.equals(LIFECYCLE_SHORTNAME)) { + interfaceType = LIFECYCLE; + } + if (interfaceType.equals(CONFIGURE_SHORTNAME)) { + interfaceType = CONFIGURE; + } + + // only NodeType has getInterfaces "hasattr(ntype,interfaces)" + // while RelationshipType does not + if (ntype instanceof NodeType) { + if (((NodeType) ntype).getInterfaces() != null + && ((NodeType) ntype).getInterfaces().values().contains(interfaceType)) { + LinkedHashMap nii = (LinkedHashMap) + ((NodeType) ntype).getInterfaces().get(interfaceType); + interfaceType = (String) nii.get("type"); + } + } + if (inodeType != null) { + if (nodeTemplate != null && nodeTemplate.getCustomDef() != null + && nodeTemplate.getCustomDef().containsKey(interfaceType)) { + defs = (LinkedHashMap) + nodeTemplate.getCustomDef().get(interfaceType); + } else { + defs = (LinkedHashMap) TOSCA_DEF.get(interfaceType); + } + } + + if (ivalue != null) { + if (ivalue instanceof LinkedHashMap) { + for (Map.Entry me : ((LinkedHashMap) ivalue).entrySet()) { + if (me.getKey().equals(IMPLEMENTATION)) { + implementation = me.getValue(); + } else if (me.getKey().equals(INPUTS)) { + inputs = (LinkedHashMap) me.getValue(); + } else if (me.getKey().equals(DESCRIPTION)) { + description = (String) me.getValue(); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE123", String.format( + "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"", + nodeTemplate.getName(), me.getKey()))); + } + } + } + } + } + + public ArrayList getLifecycleOps() { + if (defs != null) { + if (type.equals(LIFECYCLE)) { + return ops(); + } + } + return null; + } + + public ArrayList getInterfaceOps() { + if (defs != null) { + ArrayList ops = ops(); + ArrayList idrw = new ArrayList<>(); + for (int i = 0; i < InterfacesDef.INTERFACE_DEF_RESERVED_WORDS.length; i++) { + idrw.add(InterfacesDef.INTERFACE_DEF_RESERVED_WORDS[i]); + } + ops.removeAll(idrw); + return ops; + } + return null; + } + + public ArrayList getConfigureOps() { + if (defs != null) { + if (type.equals(CONFIGURE)) { + return ops(); + } + } + return null; + } + + private ArrayList ops() { + return new ArrayList(defs.keySet()); + } + + // getters/setters + + public LinkedHashMap getInputs() { + return inputs; + } + + public void setInput(String name, Object value) { + inputs.put(name, value); + } + + public Object getImplementation() { + return implementation; + } + + public void setImplementation(Object implementation) { + this.implementation = implementation; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getOperationName() { + return operationName; + } + + public void setOperationName(String operationName) { + this.operationName = operationName; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java index dd914d4..f3de49e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,36 +26,37 @@ import java.util.Map; import java.util.stream.Collectors; public class Metadata { - - private final Map metadataMap; - public Metadata(Map metadataMap) { + private final Map metadataMap; + + public Metadata(Map metadataMap) { this.metadataMap = metadataMap != null ? metadataMap : new HashMap<>(); } - public String getValue(String key) { - - Object obj = this.metadataMap.get(key); - if (obj != null){ - return String.valueOf(obj); - } - return null; - } - - /** - * Get all properties of a Metadata object.
- * This object represents the "metadata" section of some entity. - * @return all properties of this Metadata, as a key-value. - */ - public Map getAllProperties() { - return metadataMap.entrySet().stream().map(e-> new AbstractMap.SimpleEntry(e.getKey(), String.valueOf(e.getValue()))).collect(Collectors.toMap(Map.Entry::getKey,Map.Entry::getValue)); - } - - @Override - public String toString() { - return "Metadata{" + - "metadataMap=" + metadataMap + - '}'; - } + public String getValue(String key) { + + Object obj = this.metadataMap.get(key); + if (obj != null) { + return String.valueOf(obj); + } + return null; + } + + /** + * Get all properties of a Metadata object.
+ * This object represents the "metadata" section of some entity. + * + * @return all properties of this Metadata, as a key-value. + */ + public Map getAllProperties() { + return metadataMap.entrySet().stream().map(e -> new AbstractMap.SimpleEntry(e.getKey(), String.valueOf(e.getValue()))).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + @Override + public String toString() { + return "Metadata{" + + "metadataMap=" + metadataMap + + '}'; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java index 918c629..c251be9 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,305 +28,300 @@ import java.util.LinkedHashMap; import java.util.Map; public class NodeType extends StatefulEntityType { - // TOSCA built-in node type - - private static final String DERIVED_FROM = "derived_from"; - private static final String METADATA = "metadata"; - private static final String PROPERTIES = "properties"; - private static final String VERSION = "version"; - private static final String DESCRIPTION = "description"; - private static final String ATTRIBUTES = "attributes"; - private static final String REQUIREMENTS = "requirements"; - private static final String CAPABILITIES = "capabilities"; - private static final String INTERFACES = "interfaces"; - private static final String ARTIFACTS = "artifacts"; - - private static final String SECTIONS[] = { - DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS - }; - - private String ntype; - public LinkedHashMap customDef; - - public NodeType(String nttype,LinkedHashMap ntcustomDef) { - super(nttype,NODE_PREFIX, ntcustomDef); + // TOSCA built-in node type + + private static final String DERIVED_FROM = "derived_from"; + private static final String METADATA = "metadata"; + private static final String PROPERTIES = "properties"; + private static final String VERSION = "version"; + private static final String DESCRIPTION = "description"; + private static final String ATTRIBUTES = "attributes"; + private static final String REQUIREMENTS = "requirements"; + private static final String CAPABILITIES = "capabilities"; + private static final String INTERFACES = "interfaces"; + private static final String ARTIFACTS = "artifacts"; + + private static final String SECTIONS[] = { + DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS + }; + + private String ntype; + public LinkedHashMap customDef; + + public NodeType(String nttype, LinkedHashMap ntcustomDef) { + super(nttype, NODE_PREFIX, ntcustomDef); ntype = nttype; customDef = ntcustomDef; _validateKeys(); - } + } - public Object getParentType() { + public Object getParentType() { // Return a node this node is derived from - if(defs == null) { - return null; - } - String pnode = derivedFrom(defs); - if(pnode != null && !pnode.isEmpty()) { - return new NodeType(pnode,customDef); - } - return null; - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getRelationship() { + if (defs == null) { + return null; + } + String pnode = derivedFrom(defs); + if (pnode != null && !pnode.isEmpty()) { + return new NodeType(pnode, customDef); + } + return null; + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getRelationship() { // Return a dictionary of relationships to other node types // This method returns a dictionary of named relationships that nodes // of the current node type (self) can have to other nodes (of specific // types) in a TOSCA template. - LinkedHashMap relationship = new LinkedHashMap<>(); - ArrayList> requires; - Object treq = getAllRequirements(); - if(treq != null) { + LinkedHashMap relationship = new LinkedHashMap<>(); + ArrayList> requires; + Object treq = getAllRequirements(); + if (treq != null) { // NOTE(sdmonov): Check if requires is a dict. // If it is a dict convert it to a list of dicts. // This is needed because currently the code below supports only // lists as requirements definition. The following check will // make sure if a map (dict) was provided it will be converted to // a list before proceeding to the parsing. - if(treq instanceof LinkedHashMap) { - requires = new ArrayList<>(); - for(Map.Entry me: ((LinkedHashMap)treq).entrySet()) { - LinkedHashMap tl = new LinkedHashMap<>(); - tl.put(me.getKey(),me.getValue()); - requires.add(tl); - } - } - else { - requires = (ArrayList>)treq; + if (treq instanceof LinkedHashMap) { + requires = new ArrayList<>(); + for (Map.Entry me : ((LinkedHashMap) treq).entrySet()) { + LinkedHashMap tl = new LinkedHashMap<>(); + tl.put(me.getKey(), me.getValue()); + requires.add(tl); + } + } else { + requires = (ArrayList>) treq; } - + String keyword = null; String nodeType = null; - for(LinkedHashMap require: requires) { - String relation = null; - for(Map.Entry re: require.entrySet()) { - String key = re.getKey(); - LinkedHashMap req = (LinkedHashMap)re.getValue(); - if(req.get("relationship") != null) { - Object trelation = req.get("relationship"); - // trelation is a string or a dict with "type" mapped to the string we want - if(trelation instanceof String) { - relation = (String)trelation; - } - else { - if(((LinkedHashMap)trelation).get("type") != null) { - relation = (String)((LinkedHashMap)trelation).get("type"); - } - } - nodeType = (String)req.get("node"); - //BUG meaningless?? LinkedHashMap value = req; - if(nodeType != null) { - keyword = "node"; - } - else { - String getRelation = null; + for (LinkedHashMap require : requires) { + String relation = null; + for (Map.Entry re : require.entrySet()) { + String key = re.getKey(); + LinkedHashMap req = (LinkedHashMap) re.getValue(); + if (req.get("relationship") != null) { + Object trelation = req.get("relationship"); + // trelation is a string or a dict with "type" mapped to the string we want + if (trelation instanceof String) { + relation = (String) trelation; + } else { + if (((LinkedHashMap) trelation).get("type") != null) { + relation = (String) ((LinkedHashMap) trelation).get("type"); + } + } + nodeType = (String) req.get("node"); + //BUG meaningless?? LinkedHashMap value = req; + if (nodeType != null) { + keyword = "node"; + } else { + String getRelation = null; // If nodeTypeByCap is a dict and has a type key // we need to lookup the node type using // the capability type - String captype = (String)req.get("capability"); - nodeType = _getNodeTypeByCap(captype); - if (nodeType != null){ - getRelation = _getRelation(key, nodeType); - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( - "NodeTypeRequirementForCapabilityUnfulfilled: Node type: \"%s\" with requrement \"%s\" for node type with capability type \"%s\" is not found\\unfulfilled", this.ntype, key, captype))); - } - if (getRelation != null) { - relation = getRelation; - } - keyword = key; - } - } - } - if(relation == null || nodeType == null){ - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( - "NodeTypeForRelationUnfulfilled: Node type \"%s\" - relationship type \"%s\" is unfulfilled", this.ntype, relation))); - } else { - RelationshipType rtype = new RelationshipType(relation, keyword, customDef); - NodeType relatednode = new NodeType(nodeType, customDef); - relationship.put(rtype, relatednode); - } + String captype = (String) req.get("capability"); + nodeType = _getNodeTypeByCap(captype); + if (nodeType != null) { + getRelation = _getRelation(key, nodeType); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( + "NodeTypeRequirementForCapabilityUnfulfilled: Node type: \"%s\" with requrement \"%s\" for node type with capability type \"%s\" is not found\\unfulfilled", this.ntype, key, captype))); + } + if (getRelation != null) { + relation = getRelation; + } + keyword = key; + } + } + } + if (relation == null || nodeType == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( + "NodeTypeForRelationUnfulfilled: Node type \"%s\" - relationship type \"%s\" is unfulfilled", this.ntype, relation))); + } else { + RelationshipType rtype = new RelationshipType(relation, keyword, customDef); + NodeType relatednode = new NodeType(nodeType, customDef); + relationship.put(rtype, relatednode); + } } - } - return relationship; - - } - + } + return relationship; + + } + @SuppressWarnings("unchecked") - private String _getNodeTypeByCap(String cap) { + private String _getNodeTypeByCap(String cap) { // Find the node type that has the provided capability // This method will lookup all node types if they have the // provided capability. // Filter the node types ArrayList nodeTypes = new ArrayList<>(); - for(String nt: customDef.keySet()) { - if(nt.startsWith(NODE_PREFIX) || nt.startsWith("org.openecomp") && !nt.equals("tosca.nodes.Root")) { - nodeTypes.add(nt); - } + for (String nt : customDef.keySet()) { + if (nt.startsWith(NODE_PREFIX) || nt.startsWith("org.openecomp") && !nt.equals("tosca.nodes.Root")) { + nodeTypes.add(nt); + } } - for(String nt: nodeTypes) { - LinkedHashMap nodeDef = (LinkedHashMap)customDef.get(nt); - if(nodeDef instanceof LinkedHashMap && nodeDef.get("capabilities") != null) { - LinkedHashMap nodeCaps = (LinkedHashMap)nodeDef.get("capabilities"); - if(nodeCaps != null) { - for(Object val: nodeCaps.values()) { - if(val instanceof LinkedHashMap) { - String tp = (String)((LinkedHashMap)val).get("type"); - if(tp != null && tp.equals(cap)) { - return nt; - } - } - } - } + for (String nt : nodeTypes) { + LinkedHashMap nodeDef = (LinkedHashMap) customDef.get(nt); + if (nodeDef instanceof LinkedHashMap && nodeDef.get("capabilities") != null) { + LinkedHashMap nodeCaps = (LinkedHashMap) nodeDef.get("capabilities"); + if (nodeCaps != null) { + for (Object val : nodeCaps.values()) { + if (val instanceof LinkedHashMap) { + String tp = (String) ((LinkedHashMap) val).get("type"); + if (tp != null && tp.equals(cap)) { + return nt; + } + } + } + } } - } + } return null; - } - + } + @SuppressWarnings("unchecked") - private String _getRelation(String key,String ndtype) { - String relation = null; - NodeType ntype = new NodeType(ndtype, customDef); - LinkedHashMap caps = ntype.getCapabilities(); - if(caps != null && caps.get(key) != null) { - CapabilityTypeDef c = caps.get(key); - for(int i=0; i< RELATIONSHIP_TYPE.length; i++) { - String r = RELATIONSHIP_TYPE[i]; - if(r != null) { - relation = r; - break; - } - LinkedHashMap rtypedef = (LinkedHashMap)customDef.get(r); - for(Object o: rtypedef.values()) { - LinkedHashMap properties = (LinkedHashMap)o; - if(properties.get(c.getType()) != null) { - relation = r; - break; - } - } - if(relation != null) { - break; - } - else { - for(Object o: rtypedef.values()) { - LinkedHashMap properties = (LinkedHashMap)o; - if(properties.get(c.getParentType()) != null) { - relation = r; - break; - } - } - } - } - } - return relation; + private String _getRelation(String key, String ndtype) { + String relation = null; + NodeType ntype = new NodeType(ndtype, customDef); + LinkedHashMap caps = ntype.getCapabilities(); + if (caps != null && caps.get(key) != null) { + CapabilityTypeDef c = caps.get(key); + for (int i = 0; i < RELATIONSHIP_TYPE.length; i++) { + String r = RELATIONSHIP_TYPE[i]; + if (r != null) { + relation = r; + break; + } + LinkedHashMap rtypedef = (LinkedHashMap) customDef.get(r); + for (Object o : rtypedef.values()) { + LinkedHashMap properties = (LinkedHashMap) o; + if (properties.get(c.getType()) != null) { + relation = r; + break; + } + } + if (relation != null) { + break; + } else { + for (Object o : rtypedef.values()) { + LinkedHashMap properties = (LinkedHashMap) o; + if (properties.get(c.getParentType()) != null) { + relation = r; + break; + } + } + } + } + } + return relation; } @SuppressWarnings("unchecked") - public ArrayList getCapabilitiesObjects() { + public ArrayList getCapabilitiesObjects() { // Return a list of capability objects - ArrayList typecapabilities = new ArrayList<>(); - LinkedHashMap caps = (LinkedHashMap)getValue(CAPABILITIES, null, true); - if(caps != null) { + ArrayList typecapabilities = new ArrayList<>(); + LinkedHashMap caps = (LinkedHashMap) getValue(CAPABILITIES, null, true); + if (caps != null) { // 'cname' is symbolic name of the capability // 'cvalue' is a dict { 'type': } - for(Map.Entry me: caps.entrySet()) { - String cname = me.getKey(); - LinkedHashMap cvalue = (LinkedHashMap)me.getValue(); - String ctype = cvalue.get("type"); - CapabilityTypeDef cap = new CapabilityTypeDef(cname,ctype,type,customDef); - typecapabilities.add(cap); - } + for (Map.Entry me : caps.entrySet()) { + String cname = me.getKey(); + LinkedHashMap cvalue = (LinkedHashMap) me.getValue(); + String ctype = cvalue.get("type"); + CapabilityTypeDef cap = new CapabilityTypeDef(cname, ctype, type, customDef); + typecapabilities.add(cap); + } } return typecapabilities; - } - - public LinkedHashMap getCapabilities() { + } + + public LinkedHashMap getCapabilities() { // Return a dictionary of capability name-objects pairs - LinkedHashMap caps = new LinkedHashMap<>(); - for(CapabilityTypeDef ctd: getCapabilitiesObjects()) { - caps.put(ctd.getName(),ctd); - } - return caps; - } - - @SuppressWarnings("unchecked") - public ArrayList getRequirements() { - return (ArrayList)getValue(REQUIREMENTS,null,true); - } - - public ArrayList getAllRequirements() { - return getRequirements(); - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getInterfaces() { - return (LinkedHashMap)getValue(INTERFACES,null,false); - } - - - @SuppressWarnings("unchecked") - public ArrayList getLifecycleInputs() - { + LinkedHashMap caps = new LinkedHashMap<>(); + for (CapabilityTypeDef ctd : getCapabilitiesObjects()) { + caps.put(ctd.getName(), ctd); + } + return caps; + } + + @SuppressWarnings("unchecked") + public ArrayList getRequirements() { + return (ArrayList) getValue(REQUIREMENTS, null, true); + } + + public ArrayList getAllRequirements() { + return getRequirements(); + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getInterfaces() { + return (LinkedHashMap) getValue(INTERFACES, null, false); + } + + + @SuppressWarnings("unchecked") + public ArrayList getLifecycleInputs() { // Return inputs to life cycle operations if found ArrayList inputs = new ArrayList<>(); - LinkedHashMap interfaces = getInterfaces(); - if(interfaces != null) { - for(Map.Entry me: interfaces.entrySet()) { - String iname = me.getKey(); - LinkedHashMap ivalue = (LinkedHashMap)me.getValue(); - if(iname.equals(InterfacesDef.LIFECYCLE)) { - for(Map.Entry ie: ivalue.entrySet()) { - if(ie.getKey().equals("input")) { - LinkedHashMap y = (LinkedHashMap)ie.getValue(); - for(String i: y.keySet()) { - inputs.add(i); - } - } - } - } - } + LinkedHashMap interfaces = getInterfaces(); + if (interfaces != null) { + for (Map.Entry me : interfaces.entrySet()) { + String iname = me.getKey(); + LinkedHashMap ivalue = (LinkedHashMap) me.getValue(); + if (iname.equals(InterfacesDef.LIFECYCLE)) { + for (Map.Entry ie : ivalue.entrySet()) { + if (ie.getKey().equals("input")) { + LinkedHashMap y = (LinkedHashMap) ie.getValue(); + for (String i : y.keySet()) { + inputs.add(i); + } + } + } + } + } } return inputs; - } - - public ArrayList getLifecycleOperations() { - // Return available life cycle operations if found - ArrayList ops = null; - LinkedHashMap interfaces = getInterfaces(); - if(interfaces != null) { - InterfacesDef i = new InterfacesDef(this,InterfacesDef.LIFECYCLE,null,null,null); - ops = i.getLifecycleOps(); - } - return ops; - } - - public CapabilityTypeDef getCapability(String name) { - //BUG?? the python code has to be wrong - // it refers to a bad attribute 'value'... - LinkedHashMap caps = getCapabilities(); - if(caps != null) { - return caps.get(name); - } - return null; + } + + public ArrayList getLifecycleOperations() { + // Return available life cycle operations if found + ArrayList ops = null; + LinkedHashMap interfaces = getInterfaces(); + if (interfaces != null) { + InterfacesDef i = new InterfacesDef(this, InterfacesDef.LIFECYCLE, null, null, null); + ops = i.getLifecycleOps(); + } + return ops; + } + + public CapabilityTypeDef getCapability(String name) { + //BUG?? the python code has to be wrong + // it refers to a bad attribute 'value'... + LinkedHashMap caps = getCapabilities(); + if (caps != null) { + return caps.get(name); + } + return null; /* def get_capability(self, name): caps = self.get_capabilities() if caps and name in caps.keys(): return caps[name].value */ - } + } public String getCapabilityType(String name) { - //BUG?? the python code has to be wrong - // it refers to a bad attribute 'value'... - CapabilityTypeDef captype = getCapability(name); - if(captype != null) { - return captype.getType(); - } - return null; + //BUG?? the python code has to be wrong + // it refers to a bad attribute 'value'... + CapabilityTypeDef captype = getCapability(name); + if (captype != null) { + return captype.getType(); + } + return null; /* def get_capability_type(self, name): captype = self.get_capability(name) @@ -336,21 +331,21 @@ public class NodeType extends StatefulEntityType { } private void _validateKeys() { - if(defs != null) { - for(String key: defs.keySet()) { - boolean bFound = false; - for(int i=0; i< SECTIONS.length; i++) { - if(key.equals(SECTIONS[i])) { - bFound = true; - break; - } - } - if(!bFound) { + if (defs != null) { + for (String key : defs.keySet()) { + boolean bFound = false; + for (int i = 0; i < SECTIONS.length; i++) { + if (key.equals(SECTIONS[i])) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE124", String.format( - "UnknownFieldError: Nodetype \"%s\" has unknown field \"%s\"",ntype,key))); - } - } - } + "UnknownFieldError: Nodetype \"%s\" has unknown field \"%s\"", ntype, key))); + } + } + } } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java index e4d1dd6..b227a31 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,180 +21,178 @@ package org.onap.sdc.toscaparser.api.elements; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.TOSCAVersionProperty; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; import java.util.LinkedHashMap; -import org.onap.sdc.toscaparser.api.utils.TOSCAVersionProperty; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class PolicyType extends StatefulEntityType { - - private static final String DERIVED_FROM = "derived_from"; - private static final String METADATA = "metadata"; - private static final String PROPERTIES = "properties"; - private static final String VERSION = "version"; - private static final String DESCRIPTION = "description"; - private static final String TARGETS = "targets"; - private static final String TRIGGERS = "triggers"; - private static final String TYPE = "type"; - - private static final String SECTIONS[] = { - DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS, TRIGGERS, TYPE - }; - - private LinkedHashMap customDef; - private String policyDescription; - private Object policyVersion; - private LinkedHashMap properties; - private LinkedHashMap parentPolicies; - private LinkedHashMap metaData; - private ArrayList targetsList; - - - public PolicyType(String _type, LinkedHashMap _customDef) { - super(_type,POLICY_PREFIX,_customDef); - - type = _type; - customDef = _customDef; - _validateKeys(); - + + private static final String DERIVED_FROM = "derived_from"; + private static final String METADATA = "metadata"; + private static final String PROPERTIES = "properties"; + private static final String VERSION = "version"; + private static final String DESCRIPTION = "description"; + private static final String TARGETS = "targets"; + private static final String TRIGGERS = "triggers"; + private static final String TYPE = "type"; + + private static final String[] SECTIONS = { + DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS, TRIGGERS, TYPE + }; + + private LinkedHashMap customDef; + private String policyDescription; + private Object policyVersion; + private LinkedHashMap properties; + private LinkedHashMap parentPolicies; + private LinkedHashMap metaData; + private ArrayList targetsList; + + + public PolicyType(String type, LinkedHashMap customDef) { + super(type, POLICY_PREFIX, customDef); + + this.type = type; + this.customDef = customDef; + validateKeys(); + metaData = null; - if(defs != null && defs.get(METADATA) != null) { - metaData = (LinkedHashMap)defs.get(METADATA); - _validateMetadata(metaData); + if (defs != null && defs.get(METADATA) != null) { + metaData = (LinkedHashMap) defs.get(METADATA); + validateMetadata(metaData); } properties = null; - if(defs != null && defs.get(PROPERTIES) != null) { - properties = (LinkedHashMap)defs.get(PROPERTIES); + if (defs != null && defs.get(PROPERTIES) != null) { + properties = (LinkedHashMap) defs.get(PROPERTIES); } - parentPolicies = _getParentPolicies(); + parentPolicies = getParentPolicies(); policyVersion = null; - if(defs != null && defs.get(VERSION) != null) { + if (defs != null && defs.get(VERSION) != null) { policyVersion = (new TOSCAVersionProperty( - defs.get(VERSION))).getVersion(); + defs.get(VERSION).toString())).getVersion(); } policyDescription = null; - if(defs != null && defs.get(DESCRIPTION) != null) { - policyDescription = (String)defs.get(DESCRIPTION); + if (defs != null && defs.get(DESCRIPTION) != null) { + policyDescription = (String) defs.get(DESCRIPTION); } - + targetsList = null; - if(defs != null && defs.get(TARGETS) != null) { - targetsList = (ArrayList)defs.get(TARGETS); - _validateTargets(targetsList,customDef); + if (defs != null && defs.get(TARGETS) != null) { + targetsList = (ArrayList) defs.get(TARGETS); + validateTargets(targetsList, this.customDef); + } + + } + + private LinkedHashMap getParentPolicies() { + LinkedHashMap policies = new LinkedHashMap<>(); + String parentPolicy; + if (getParentType() != null) { + parentPolicy = getParentType().getType(); + } else { + parentPolicy = null; } - - } - - private LinkedHashMap _getParentPolicies() { - LinkedHashMap policies = new LinkedHashMap<>(); - String parentPolicy; - if(getParentType() != null) { - parentPolicy = getParentType().getType(); - } - else { - parentPolicy = null; - } - if(parentPolicy != null) { - while(parentPolicy != null && !parentPolicy.equals("tosca.policies.Root")) { - policies.put(parentPolicy, TOSCA_DEF.get(parentPolicy)); - parentPolicy = (String) - ((LinkedHashMap)policies.get(parentPolicy)).get("derived_from);"); - } - } - return policies; - } - - public String getType() { - return type; - } - - public PolicyType getParentType() { + if (parentPolicy != null) { + while (parentPolicy != null && !parentPolicy.equals("tosca.policies.Root")) { + policies.put(parentPolicy, TOSCA_DEF.get(parentPolicy)); + parentPolicy = (String) + ((LinkedHashMap) policies.get(parentPolicy)).get("derived_from);"); + } + } + return policies; + } + + public String getType() { + return type; + } + + public PolicyType getParentType() { // Return a policy statefulentity of this node is derived from - if(defs == null) { - return null; - } - String ppolicyEntity = derivedFrom(defs); - if(ppolicyEntity != null) { - return new PolicyType(ppolicyEntity,customDef); + if (defs == null) { + return null; + } + String policyEntity = derivedFrom(defs); + if (policyEntity != null) { + return new PolicyType(policyEntity, customDef); } return null; - } - - public Object getPolicy(String name) { + } + + public Object getPolicy(String name) { // Return the definition of a policy field by name - if(defs != null && defs.get(name) != null) { + if (defs != null && defs.get(name) != null) { return defs.get(name); } return null; - } + } - public ArrayList getTargets() { + public ArrayList getTargets() { // Return targets return targetsList; - } - - public String getDescription() { - return policyDescription; - } - - public Object getVersion() { - return policyVersion; - } - - private void _validateKeys() { - for(String key: defs.keySet()) { - boolean bFound = false; - for(String sect: SECTIONS) { - if(key.equals(sect)) { - bFound = true; - break; - } - } - if(!bFound) { + } + + public String getDescription() { + return policyDescription; + } + + public Object getVersion() { + return policyVersion; + } + + private void validateKeys() { + for (String key : defs.keySet()) { + boolean bFound = false; + for (String sect : SECTIONS) { + if (key.equals(sect)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE125", String.format( - "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"", - type,key))); - } - } - } - - private void _validateTargets(ArrayList _targetsList, - LinkedHashMap _customDef) { - for(String nodetype: _targetsList) { - if(_customDef.get(nodetype) == null) { + "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"", + type, key))); + } + } + } + + private void validateTargets(ArrayList targetsList, + LinkedHashMap customDef) { + for (String nodetype : targetsList) { + if (customDef.get(nodetype) == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE126", String.format( - "InvalidTypeError: \"%s\" defined in targets for policy \"%s\"", - nodetype,type))); - - } - } - } - - private void _validateMetadata(LinkedHashMap _metaData) { - String mtype = (String)_metaData.get("type"); - if(mtype != null && !mtype.equals("map") && !mtype.equals("tosca:map")) { + "InvalidTypeError: \"%s\" defined in targets for policy \"%s\"", + nodetype, type))); + + } + } + } + + private void validateMetadata(LinkedHashMap metaData) { + String mtype = (String) metaData.get("type"); + if (mtype != null && !mtype.equals("map") && !mtype.equals("tosca:map")) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE127", String.format( - "InvalidTypeError: \"%s\" defined in policy for metadata", - mtype))); - } - for(String entrySchema: metaData.keySet()) { - Object estob = metaData.get(entrySchema); - if(estob instanceof LinkedHashMap) { - String est = (String) - ((LinkedHashMap)estob).get("type"); - if(!est.equals("string")) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE128", String.format( - "InvalidTypeError: \"%s\" defined in policy for metadata \"%s\"", - est,entrySchema))); - } - } - } - } + "InvalidTypeError: \"%s\" defined in policy for metadata", + mtype))); + } + for (String entrySchema : this.metaData.keySet()) { + Object estob = this.metaData.get(entrySchema); + if (estob instanceof LinkedHashMap) { + String est = (String) + ((LinkedHashMap) estob).get("type"); + if (!est.equals("string")) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE128", String.format( + "InvalidTypeError: \"%s\" defined in policy for metadata \"%s\"", + est, entrySchema))); + } + } + } + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java index 65304dd..01fb9fc 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,16 +38,16 @@ public class PortSpec { private static final String SOURCE_RANGE = "source_range"; private static final String TARGET = "target"; private static final String TARGET_RANGE = "target_range"; - + private static final String PROPERTY_NAMES[] = { - PROTOCOL, SOURCE, SOURCE_RANGE, - TARGET, TARGET_RANGE + PROTOCOL, SOURCE, SOURCE_RANGE, + TARGET, TARGET_RANGE }; - + // todo(TBD) May want to make this a subclass of DataType // and change init method to set PortSpec's properties public PortSpec() { - + } // The following additional requirements MUST be tested: @@ -59,47 +59,44 @@ public class PortSpec { // 3) A valid PortSpec MUST have a value for the target property that is // within the numeric range specified by the property target_range // when target_range is specified. - public static void validateAdditionalReq(Object _properties, - String propName, - LinkedHashMap custom_def) { - + public static void validateAdditionalReq(Object _properties, + String propName, + LinkedHashMap custom_def) { + try { - LinkedHashMap properties = (LinkedHashMap)_properties; + LinkedHashMap properties = (LinkedHashMap) _properties; Object source = properties.get(PortSpec.SOURCE); Object sourceRange = properties.get(PortSpec.SOURCE_RANGE); Object target = properties.get(PortSpec.TARGET); Object targetRange = properties.get(PortSpec.TARGET_RANGE); // verify one of the specified values is set - if(source == null && sourceRange == null && - target == null && targetRange == null) { + if (source == null && sourceRange == null && + target == null && targetRange == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE129", String.format( - "InvalidTypeAdditionalRequirementsError: Additional requirements for type \"%s\" not met", - TYPE_URI))); + "InvalidTypeAdditionalRequirementsError: Additional requirements for type \"%s\" not met", + TYPE_URI))); } // Validate source value is in specified range - if(source != null && sourceRange != null) { - ValidateUtils.validateValueInRange(source,sourceRange,SOURCE); - } - else { + if (source != null && sourceRange != null) { + ValidateUtils.validateValueInRange(source, sourceRange, SOURCE); + } else { DataEntity portdef = new DataEntity("PortDef", source, null, SOURCE); portdef.validate(); } // Validate target value is in specified range - if(target != null && targetRange != null) { - ValidateUtils.validateValueInRange(target,targetRange,SOURCE); - } - else { + if (target != null && targetRange != null) { + ValidateUtils.validateValueInRange(target, targetRange, SOURCE); + } else { DataEntity portdef = new DataEntity("PortDef", source, null, TARGET); portdef.validate(); } - } - catch(Exception e) { + } catch (Exception e) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE130", String.format( - "ValueError: \"%s\" do not meet requirements for type \"%s\"", - _properties.toString(),SHORTNAME))); + "ValueError: \"%s\" do not meet requirements for type \"%s\"", + _properties.toString(), SHORTNAME))); } - } + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java index 6e1fe61..484d17e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,12 +27,12 @@ import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class PropertyDef { - - private static final String PROPERTY_KEYNAME_DEFAULT = "default"; - private static final String PROPERTY_KEYNAME_REQUIRED = "required"; - private static final String PROPERTY_KEYNAME_STATUS = "status"; - private static final String VALID_PROPERTY_KEYNAMES[] = { - PROPERTY_KEYNAME_DEFAULT, + + private static final String PROPERTY_KEYNAME_DEFAULT = "default"; + private static final String PROPERTY_KEYNAME_REQUIRED = "required"; + private static final String PROPERTY_KEYNAME_STATUS = "status"; + private static final String VALID_PROPERTY_KEYNAMES[] = { + PROPERTY_KEYNAME_DEFAULT, PROPERTY_KEYNAME_REQUIRED, PROPERTY_KEYNAME_STATUS}; @@ -41,122 +41,120 @@ public class PropertyDef { private static final String VALID_REQUIRED_VALUES[] = {"true", "false"}; private static final String PROPERTY_STATUS_SUPPORTED = "supported"; - private static final String PROPERTY_STATUS_EXPERIMENTAL = "experimental"; - private static final String VALID_STATUS_VALUES[] = { - PROPERTY_STATUS_SUPPORTED, PROPERTY_STATUS_EXPERIMENTAL}; - - private static final String PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED; - - private String name; - private Object value; - private LinkedHashMap schema; - private String _status; - private boolean _required; - - public PropertyDef(String pdName, Object pdValue, - LinkedHashMap pdSchema) { - name = pdName; - value = pdValue; - schema = pdSchema; + private static final String PROPERTY_STATUS_EXPERIMENTAL = "experimental"; + private static final String VALID_STATUS_VALUES[] = { + PROPERTY_STATUS_SUPPORTED, PROPERTY_STATUS_EXPERIMENTAL}; + + private static final String PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED; + + private String name; + private Object value; + private LinkedHashMap schema; + private String _status; + private boolean _required; + + public PropertyDef(String pdName, Object pdValue, + LinkedHashMap pdSchema) { + name = pdName; + value = pdValue; + schema = pdSchema; _status = PROPERTY_STATUS_DEFAULT; _required = PROPERTY_REQUIRED_DEFAULT; - if(schema != null) { - // Validate required 'type' property exists - if(schema.get("type") == null) { - //msg = (_('Schema definition of "%(pname)s" must have a "type" ' - // 'attribute.') % dict(pname=self.name)) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE131", String.format( - "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name))); - } - _loadRequiredAttrFromSchema(); - _loadStatusAttrFromSchema(); + if (schema != null) { + // Validate required 'type' property exists + if (schema.get("type") == null) { + //msg = (_('Schema definition of "%(pname)s" must have a "type" ' + // 'attribute.') % dict(pname=self.name)) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE131", String.format( + "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute", name))); + } + _loadRequiredAttrFromSchema(); + _loadStatusAttrFromSchema(); + } + } + + public Object getDefault() { + if (schema != null) { + for (Map.Entry me : schema.entrySet()) { + if (me.getKey().equals(PROPERTY_KEYNAME_DEFAULT)) { + return me.getValue(); + } + } } - } - - public Object getDefault() { - if(schema != null) { - for(Map.Entry me: schema.entrySet()) { - if(me.getKey().equals(PROPERTY_KEYNAME_DEFAULT)) { - return me.getValue(); - } - } - } - return null; - } - - public boolean isRequired() { - return _required; - } - - private void _loadRequiredAttrFromSchema() { + return null; + } + + public boolean isRequired() { + return _required; + } + + private void _loadRequiredAttrFromSchema() { // IF 'required' keyname exists verify it's a boolean, // if so override default - Object val = schema.get(PROPERTY_KEYNAME_REQUIRED); - if(val != null) { - if(val instanceof Boolean) { - _required = (boolean)val; - } - else { + Object val = schema.get(PROPERTY_KEYNAME_REQUIRED); + if (val != null) { + if (val instanceof Boolean) { + _required = (boolean) val; + } else { //valid_values = ', '.join(self.VALID_REQUIRED_VALUES) //attr = self.PROPERTY_KEYNAME_REQUIRED //TOSCAException.generate_inv_schema_property_error(self, // attr, // value, // valid_values) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE132", String.format( - "Schema definition of \"%s\" has \"required\" attribute with an invalid value", - name))); - } - } - } - - public String getStatus() { - return _status; - } - - private void _loadStatusAttrFromSchema() { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE132", String.format( + "Schema definition of \"%s\" has \"required\" attribute with an invalid value", + name))); + } + } + } + + public String getStatus() { + return _status; + } + + private void _loadStatusAttrFromSchema() { // IF 'status' keyname exists verify it's a boolean, // if so override default - String sts = (String)schema.get(PROPERTY_KEYNAME_STATUS); - if(sts != null) { - boolean bFound = false; - for(String vsv: VALID_STATUS_VALUES) { - if(vsv.equals(sts)) { - bFound = true; - break; - } - } - if(bFound) { - _status = sts; - } - else { + String sts = (String) schema.get(PROPERTY_KEYNAME_STATUS); + if (sts != null) { + boolean bFound = false; + for (String vsv : VALID_STATUS_VALUES) { + if (vsv.equals(sts)) { + bFound = true; + break; + } + } + if (bFound) { + _status = sts; + } else { //valid_values = ', '.join(self.VALID_STATUS_VALUES) //attr = self.PROPERTY_KEYNAME_STATUS //TOSCAException.generate_inv_schema_property_error(self, // attr, // value, // valid_values) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE006", String.format( - "Schema definition of \"%s\" has \"status\" attribute with an invalid value", - name))); - } - } - } - - public String getName() { - return name; - } - - public LinkedHashMap getSchema() { - return schema; - } - - public Object getPDValue() { - // there's getValue in EntityType... - return value; - } - + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE006", String.format( + "Schema definition of \"%s\" has \"status\" attribute with an invalid value", + name))); + } + } + } + + public String getName() { + return name; + } + + public LinkedHashMap getSchema() { + return schema; + } + + public Object getPDValue() { + // there's getValue in EntityType... + return value; + } + } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java index 17f420d..4c39ec2 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,56 +27,56 @@ import java.util.LinkedHashMap; public class RelationshipType extends StatefulEntityType { - private static final String DERIVED_FROM = "derived_from"; - private static final String VALID_TARGET_TYPES = "valid_target_types"; - private static final String INTERFACES = "interfaces"; - private static final String ATTRIBUTES = "attributes"; - private static final String PROPERTIES = "properties"; - private static final String DESCRIPTION = "description"; - private static final String VERSION = "version"; - private static final String CREDENTIAL = "credential"; - - private static final String SECTIONS[] = { - DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES, - ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, CREDENTIAL}; - - private String capabilityName; - private LinkedHashMap customDef; - - public RelationshipType(String _type, String _capabilityName, LinkedHashMap _customDef) { - super(_type,RELATIONSHIP_PREFIX,_customDef); - capabilityName = _capabilityName; - customDef = _customDef; - } - - public RelationshipType getParentType() { + private static final String DERIVED_FROM = "derived_from"; + private static final String VALID_TARGET_TYPES = "valid_target_types"; + private static final String INTERFACES = "interfaces"; + private static final String ATTRIBUTES = "attributes"; + private static final String PROPERTIES = "properties"; + private static final String DESCRIPTION = "description"; + private static final String VERSION = "version"; + private static final String CREDENTIAL = "credential"; + + private static final String[] SECTIONS = { + DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES, + ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, CREDENTIAL}; + + private String capabilityName; + private LinkedHashMap customDef; + + public RelationshipType(String type, String capabilityName, LinkedHashMap customDef) { + super(type, RELATIONSHIP_PREFIX, customDef); + this.capabilityName = capabilityName; + this.customDef = customDef; + } + + public RelationshipType getParentType() { // Return a relationship this reletionship is derived from.''' String prel = derivedFrom(defs); - if(prel != null) { - return new RelationshipType(prel,null,customDef); + if (prel != null) { + return new RelationshipType(prel, null, customDef); } return null; - } - - public Object getValidTargetTypes() { - return entityValue(defs,"valid_target_types"); - } - - private void _validateKeys() { - for(String key: defs.keySet()) { - boolean bFound = false; - for(int i=0; i< SECTIONS.length; i++) { - if(key.equals(SECTIONS[i])) { - bFound = true; - break; - } - } - if(!bFound) { + } + + public Object getValidTargetTypes() { + return entityValue(defs, "valid_target_types"); + } + + private void validateKeys() { + for (String key : defs.keySet()) { + boolean bFound = false; + for (String section : SECTIONS) { + if (key.equals(section)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE133", String.format( - "UnknownFieldError: Relationshiptype \"%s\" has unknown field \"%s\"",type,key))); - } + "UnknownFieldError: Relationshiptype \"%s\" has unknown field \"%s\"", type, key))); + } } - } + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java index eeaa07c..1eaa8a0 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,147 +20,152 @@ package org.onap.sdc.toscaparser.api.elements; -import java.util.HashMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.onap.sdc.toscaparser.api.utils.ValidateUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.HashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + public abstract class ScalarUnit { - private static Logger log = LoggerFactory.getLogger(ScalarUnit.class.getName()); + private static Logger log = LoggerFactory.getLogger(ScalarUnit.class.getName()); + + private static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; + private static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; + private static final String SCALAR_UNIT_TIME = "scalar-unit.time"; - private static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; - private static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; - private static final String SCALAR_UNIT_TIME = "scalar-unit.time"; - - public static final String SCALAR_UNIT_TYPES[] = { + public static final String[] SCALAR_UNIT_TYPES = { SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME - }; - - private Object value; - protected HashMap SCALAR_UNIT_DICT; - protected String SCALAR_UNIT_DEFAULT; - - public ScalarUnit(Object _value) { - value = _value; - SCALAR_UNIT_DICT = new HashMap<>(); - SCALAR_UNIT_DEFAULT = ""; - } - - - private String _checkUnitInScalarStandardUnits(String inputUnit) { + }; + + private Object value; + private HashMap scalarUnitDict; + private String scalarUnitDefault; + + public ScalarUnit(Object value) { + this.value = value; + scalarUnitDict = new HashMap<>(); + scalarUnitDefault = ""; + } + + void putToScalarUnitDict(String key, Object value) { + scalarUnitDict.put(key, value); + } + + void setScalarUnitDefault(String scalarUnitDefault) { + this.scalarUnitDefault = scalarUnitDefault; + } + + private String checkUnitInScalarStandardUnits(String inputUnit) { // Check whether the input unit is following specified standard - + // If unit is not following specified standard, convert it to standard // unit after displaying a warning message. - - if(SCALAR_UNIT_DICT.get(inputUnit) != null) { - return inputUnit; - } - else { - for(String key: SCALAR_UNIT_DICT.keySet()) { - if(key.toUpperCase().equals(inputUnit.toUpperCase())) { - log.debug("ScalarUnit - _checkUnitInScalarStandardUnits - \n" + - "The unit {} does not follow scalar unit standards\n" + - "using {} instead", - inputUnit, key); - return key; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE007", String.format( - "'The unit \"%s\" is not valid. Valid units are \n%s", - inputUnit,SCALAR_UNIT_DICT.keySet().toString()))); + + if (scalarUnitDict.get(inputUnit) != null) { + return inputUnit; + } else { + for (String key : scalarUnitDict.keySet()) { + if (key.toUpperCase().equals(inputUnit.toUpperCase())) { + log.debug("ScalarUnit - checkUnitInScalarStandardUnits - \n" + + "The unit {} does not follow scalar unit standards\n" + + "using {} instead", + inputUnit, key); + return key; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE007", String.format( + "'The unit \"%s\" is not valid. Valid units are \n%s", + inputUnit, scalarUnitDict.keySet().toString()))); return inputUnit; - } - } - - public Object validateScalarUnit() { - Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); - Matcher matcher = pattern.matcher(value.toString()); - if(matcher.find()) { - ValidateUtils.strToNum(matcher.group(1)); - String scalarUnit = _checkUnitInScalarStandardUnits(matcher.group(2)); - value = matcher.group(1) + " " + scalarUnit; - } - else { + } + } + + public Object validateScalarUnit() { + Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); + Matcher matcher = pattern.matcher(value.toString()); + if (matcher.find()) { + ValidateUtils.strToNum(matcher.group(1)); + String scalarUnit = checkUnitInScalarStandardUnits(matcher.group(2)); + value = matcher.group(1) + " " + scalarUnit; + } else { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE134", String.format( - "ValueError: \"%s\" is not a valid scalar-unit",value.toString()))); - } - return value; - } - - public double getNumFromScalarUnit(String unit) { - if(unit != null) { - unit = _checkUnitInScalarStandardUnits(unit); - } - else { - unit = SCALAR_UNIT_DEFAULT; - } - Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); - Matcher matcher = pattern.matcher(value.toString()); - if(matcher.find()) { - ValidateUtils.strToNum(matcher.group(1)); - String scalarUnit = _checkUnitInScalarStandardUnits(matcher.group(2)); - value = matcher.group(1) + " " + scalarUnit; - Object on1 = ValidateUtils.strToNum(matcher.group(1)) != null ? ValidateUtils.strToNum(matcher.group(1)) : 0; - Object on2 = SCALAR_UNIT_DICT.get(matcher.group(2)) != null ? SCALAR_UNIT_DICT.get(matcher.group(2)) : 0; - Object on3 = SCALAR_UNIT_DICT.get(unit) != null ? SCALAR_UNIT_DICT.get(unit) : 0; - - Double n1 = new Double(on1.toString()); - Double n2 = new Double(on2.toString()); - Double n3 = new Double(on3.toString()); - double converted = n1 * n2 / n3; - if(Math.abs(converted - Math.round(converted)) < 0.0000000000001 ) { - converted = Math.round(converted); - } - return converted; - } - return 0l; //??? - } - - protected static HashMap scalarunitMapping = _getScalarunitMappings(); - - private static HashMap _getScalarunitMappings() { - HashMap map = new HashMap<>(); - map.put(SCALAR_UNIT_FREQUENCY,"ScalarUnitFrequency"); - map.put(SCALAR_UNIT_SIZE, "ScalarUnitSize"); - map.put(SCALAR_UNIT_TIME, "ScalarUnit_Time"); - return map; - } - - public static ScalarUnit getScalarunitClass(String type,Object val) { - if(type.equals(SCALAR_UNIT_SIZE)) { - return new ScalarUnitSize(val); - } - else if(type.equals(SCALAR_UNIT_TIME)) { - return new ScalarUnitTime(val); - } - else if(type.equals(SCALAR_UNIT_FREQUENCY)) { - return new ScalarUnitFrequency(val); - } - return null; - } - - public static double getScalarunitValue(String type, Object value, String unit) { - if(type.equals(SCALAR_UNIT_SIZE)) { - return (new ScalarUnitSize(value)).getNumFromScalarUnit(unit); - } - if(type.equals(SCALAR_UNIT_TIME)) { - return (new ScalarUnitTime(value)).getNumFromScalarUnit(unit); - } - if(type.equals(SCALAR_UNIT_FREQUENCY)) { - return (new ScalarUnitFrequency(value)).getNumFromScalarUnit(unit); - } + "ValueError: \"%s\" is not a valid scalar-unit", value.toString()))); + } + return value; + } + + public double getNumFromScalarUnit(String unit) { + if (unit != null) { + unit = checkUnitInScalarStandardUnits(unit); + } else { + unit = scalarUnitDefault; + } + Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); + Matcher matcher = pattern.matcher(value.toString()); + if (matcher.find()) { + final double minimalNum = 0.0000000000001; + + ValidateUtils.strToNum(matcher.group(1)); + String scalarUnit = checkUnitInScalarStandardUnits(matcher.group(2)); + value = matcher.group(1) + " " + scalarUnit; + Object on1 = ValidateUtils.strToNum(matcher.group(1)) != null ? ValidateUtils.strToNum(matcher.group(1)) : 0; + Object on2 = scalarUnitDict.get(matcher.group(2)) != null ? scalarUnitDict.get(matcher.group(2)) : 0; + Object on3 = scalarUnitDict.get(unit) != null ? scalarUnitDict.get(unit) : 0; + + Double n1 = new Double(on1.toString()); + Double n2 = new Double(on2.toString()); + Double n3 = new Double(on3.toString()); + double converted = n1 * n2 / n3; + + if (Math.abs(converted - Math.round(converted)) < minimalNum) { + converted = Math.round(converted); + } + return converted; + } + return 0.0; + } + + private static HashMap scalarUnitMapping = getScalarUnitMappings(); + + private static HashMap getScalarUnitMappings() { + HashMap map = new HashMap<>(); + map.put(SCALAR_UNIT_FREQUENCY, "ScalarUnitFrequency"); + map.put(SCALAR_UNIT_SIZE, "ScalarUnitSize"); + map.put(SCALAR_UNIT_TIME, "ScalarUnit_Time"); + return map; + } + + public static ScalarUnit getScalarunitClass(String type, Object val) { + if (type.equals(SCALAR_UNIT_SIZE)) { + return new ScalarUnitSize(val); + } else if (type.equals(SCALAR_UNIT_TIME)) { + return new ScalarUnitTime(val); + } else if (type.equals(SCALAR_UNIT_FREQUENCY)) { + return new ScalarUnitFrequency(val); + } + return null; + } + + public static double getScalarunitValue(String type, Object value, String unit) { + if (type.equals(SCALAR_UNIT_SIZE)) { + return (new ScalarUnitSize(value)).getNumFromScalarUnit(unit); + } + if (type.equals(SCALAR_UNIT_TIME)) { + return (new ScalarUnitTime(value)).getNumFromScalarUnit(unit); + } + if (type.equals(SCALAR_UNIT_FREQUENCY)) { + return (new ScalarUnitFrequency(value)).getNumFromScalarUnit(unit); + } ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE135", String.format( - "TypeError: \"%s\" is not a valid scalar-unit type",type))); + "TypeError: \"%s\" is not a valid scalar-unit type", type))); return 0.0; - } - + } + } /*python @@ -190,10 +195,10 @@ class ScalarUnit(object): If unit is not following specified standard, convert it to standard unit after displaying a warning message. """ - if input_unit in self.SCALAR_UNIT_DICT.keys(): + if input_unit in self.scalarUnitDict.keys(): return input_unit else: - for key in self.SCALAR_UNIT_DICT.keys(): + for key in self.scalarUnitDict.keys(): if key.upper() == input_unit.upper(): log.warning(_('The unit "%(unit)s" does not follow ' 'scalar unit standards; using "%(key)s" ' @@ -203,7 +208,7 @@ class ScalarUnit(object): msg = (_('The unit "%(unit)s" is not valid. Valid units are ' '"%(valid_units)s".') % {'unit': input_unit, - 'valid_units': sorted(self.SCALAR_UNIT_DICT.keys())}) + 'valid_units': sorted(self.scalarUnitDict.keys())}) ValidationIssueCollector.appendException(ValueError(msg)) def validate_scalar_unit(self): @@ -224,14 +229,14 @@ class ScalarUnit(object): if unit: unit = self._check_unit_in_scalar_standard_units(unit) else: - unit = self.SCALAR_UNIT_DEFAULT + unit = self.scalarUnitDefault self.validate_scalar_unit() regex = re.compile('([0-9.]+)\s*(\w+)') result = regex.match(str(self.value)).groups() converted = (float(validateutils.str_to_num(result[0])) - * self.SCALAR_UNIT_DICT[result[1]] - / self.SCALAR_UNIT_DICT[unit]) + * self.scalarUnitDict[result[1]] + / self.scalarUnitDict[unit]) if converted - int(converted) < 0.0000000000001: converted = int(converted) return converted @@ -239,8 +244,8 @@ class ScalarUnit(object): class ScalarUnit_Size(ScalarUnit): - SCALAR_UNIT_DEFAULT = 'B' - SCALAR_UNIT_DICT = {'B': 1, 'kB': 1000, 'KiB': 1024, 'MB': 1000000, + scalarUnitDefault = 'B' + scalarUnitDict = {'B': 1, 'kB': 1000, 'KiB': 1024, 'MB': 1000000, 'MiB': 1048576, 'GB': 1000000000, 'GiB': 1073741824, 'TB': 1000000000000, 'TiB': 1099511627776} @@ -248,15 +253,15 @@ class ScalarUnit_Size(ScalarUnit): class ScalarUnit_Time(ScalarUnit): - SCALAR_UNIT_DEFAULT = 'ms' - SCALAR_UNIT_DICT = {'d': 86400, 'h': 3600, 'm': 60, 's': 1, + scalarUnitDefault = 'ms' + scalarUnitDict = {'d': 86400, 'h': 3600, 'm': 60, 's': 1, 'ms': 0.001, 'us': 0.000001, 'ns': 0.000000001} class ScalarUnit_Frequency(ScalarUnit): - SCALAR_UNIT_DEFAULT = 'GHz' - SCALAR_UNIT_DICT = {'Hz': 1, 'kHz': 1000, + scalarUnitDefault = 'GHz' + scalarUnitDict = {'Hz': 1, 'kHz': 1000, 'MHz': 1000000, 'GHz': 1000000000} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java index 59664ca..ed10da9 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,13 +22,18 @@ package org.onap.sdc.toscaparser.api.elements; public class ScalarUnitFrequency extends ScalarUnit { - public ScalarUnitFrequency(Object value) { - super(value); - SCALAR_UNIT_DEFAULT = "GHz"; - SCALAR_UNIT_DICT.put("Hz",1L); - SCALAR_UNIT_DICT.put("kHz",1000L); - SCALAR_UNIT_DICT.put("MHz",1000000L); - SCALAR_UNIT_DICT.put("GHz",1000000000L); - } + private static final Long HZ = 1L; + private static final Long KHZ = 1000L; + private static final Long MHZ = 1000000L; + private static final Long GHZ = 1000000000L; + + public ScalarUnitFrequency(Object value) { + super(value); + setScalarUnitDefault("GHz"); + putToScalarUnitDict("Hz", HZ); + putToScalarUnitDict("kHz", KHZ); + putToScalarUnitDict("MHz", MHZ); + putToScalarUnitDict("GHz", GHZ); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java index d29d8a2..78687a1 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,20 +20,24 @@ package org.onap.sdc.toscaparser.api.elements; +import org.onap.sdc.toscaparser.api.elements.enums.FileSize; + public class ScalarUnitSize extends ScalarUnit { - public ScalarUnitSize(Object value) { - super(value); - - SCALAR_UNIT_DEFAULT = "B"; - SCALAR_UNIT_DICT.put("B",1L); - SCALAR_UNIT_DICT.put("kB",1000L); - SCALAR_UNIT_DICT.put("kiB",1024L); - SCALAR_UNIT_DICT.put("MB",1000000L); - SCALAR_UNIT_DICT.put("MiB",1048576L); - SCALAR_UNIT_DICT.put("GB",1000000000L); - SCALAR_UNIT_DICT.put("GiB",1073741824L); - SCALAR_UNIT_DICT.put("TB",1000000000000L); - SCALAR_UNIT_DICT.put("TiB",1099511627776L); - } + + + public ScalarUnitSize(Object value) { + super(value); + + setScalarUnitDefault("B"); + putToScalarUnitDict("B", FileSize.B); + putToScalarUnitDict("kB", FileSize.KB); + putToScalarUnitDict("MB", FileSize.MB); + putToScalarUnitDict("GB", FileSize.GB); + putToScalarUnitDict("TB", FileSize.TB); + putToScalarUnitDict("kiB", FileSize.KIB); + putToScalarUnitDict("MiB", FileSize.MIB); + putToScalarUnitDict("GiB", FileSize.GIB); + putToScalarUnitDict("TiB", FileSize.TIB); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java index 45848af..8d2c13e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,16 +22,16 @@ package org.onap.sdc.toscaparser.api.elements; public class ScalarUnitTime extends ScalarUnit { - public ScalarUnitTime(Object value) { - super(value); - SCALAR_UNIT_DEFAULT = "ms"; - SCALAR_UNIT_DICT.put("d",86400L); - SCALAR_UNIT_DICT.put("h",3600L); - SCALAR_UNIT_DICT.put("m",60L); - SCALAR_UNIT_DICT.put("s",1L); - SCALAR_UNIT_DICT.put("ms",0.001); - SCALAR_UNIT_DICT.put("us",0.000001); - SCALAR_UNIT_DICT.put("ns",0.000000001); - } + public ScalarUnitTime(Object value) { + super(value); + setScalarUnitDefault("ms"); + putToScalarUnitDict("d", 86400L); + putToScalarUnitDict("h", 3600L); + putToScalarUnitDict("m", 60L); + putToScalarUnitDict("s", 1L); + putToScalarUnitDict("ms", 0.001); + putToScalarUnitDict("us", 0.000001); + putToScalarUnitDict("ns", 0.000000001); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java index ef9159f..b710dda 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,140 +20,136 @@ package org.onap.sdc.toscaparser.api.elements; +import org.onap.sdc.toscaparser.api.UnsupportedType; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; -import org.onap.sdc.toscaparser.api.UnsupportedType; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class StatefulEntityType extends EntityType { // Class representing TOSCA states - public static final String interfacesNodeLifecycleOperations[] = { - "create", "configure", "start", "stop", "delete"}; + public static final String[] INTERFACE_NODE_LIFECYCLE_OPERATIONS = { + "create", "configure", "start", "stop", "delete"}; - public static final String interfacesRelationshipConfigureOperations[] = { - "post_configure_source", "post_configure_target", "add_target", "remove_target"}; + public static final String[] INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS = { + "post_configure_source", "post_configure_target", "add_target", "remove_target"}; public StatefulEntityType() { - // void constructor for subclasses that don't want super + // void constructor for subclasses that don't want super } - - @SuppressWarnings("unchecked") - public StatefulEntityType(String entityType, String prefix, LinkedHashMap customDef) { + + @SuppressWarnings("unchecked") + public StatefulEntityType(String entityType, String prefix, LinkedHashMap customDef) { String entireEntityType = entityType; - if(UnsupportedType.validateType(entireEntityType)) { + if (UnsupportedType.validateType(entireEntityType)) { defs = null; - } - else { - if(entityType.startsWith(TOSCA + ":")) { - entityType = entityType.substring(TOSCA.length()+1); + } else { + if (entityType.startsWith(TOSCA + ":")) { + entityType = entityType.substring(TOSCA.length() + 1); entireEntityType = prefix + entityType; } - if(!entityType.startsWith(TOSCA)) { + if (!entityType.startsWith(TOSCA)) { entireEntityType = prefix + entityType; } - if(TOSCA_DEF.get(entireEntityType) != null) { - defs = (LinkedHashMap )TOSCA_DEF.get(entireEntityType); + if (TOSCA_DEF.get(entireEntityType) != null) { + defs = (LinkedHashMap) TOSCA_DEF.get(entireEntityType); entityType = entireEntityType; - } - else if(customDef != null && customDef.get(entityType) != null) { - defs = (LinkedHashMap )customDef.get(entityType); - } - else{ + } else if (customDef != null && customDef.get(entityType) != null) { + defs = (LinkedHashMap) customDef.get(entityType); + } else { defs = null; - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE136", String.format( - "InvalidTypeError: \"%s\" is not a valid type",entityType))); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE136", String.format( + "InvalidTypeError: \"%s\" is not a valid type", entityType))); } } type = entityType; - } - - @SuppressWarnings("unchecked") - public ArrayList getPropertiesDefObjects() { - // Return a list of property definition objects - ArrayList properties = new ArrayList(); - LinkedHashMap props = (LinkedHashMap)getDefinition(PROPERTIES); - if(props != null) { - for(Map.Entry me: props.entrySet()) { - String pdname = me.getKey(); - Object to = me.getValue(); - if(to == null || !(to instanceof LinkedHashMap)) { - String s = to == null ? "null" : to.getClass().getSimpleName(); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE137", String.format( - "Unexpected type error: property \"%s\" has type \"%s\" (expected dict)",pdname,s))); - continue; - } - LinkedHashMap pdschema = (LinkedHashMap)to; - properties.add(new PropertyDef(pdname,null,pdschema)); - } - } - return properties; - } - - public LinkedHashMap getPropertiesDef() { - LinkedHashMap pds = new LinkedHashMap(); - for(PropertyDef pd: getPropertiesDefObjects()) { - pds.put(pd.getName(),pd); - } - return pds; - } - - public PropertyDef getPropertyDefValue(String name) { - // Return the property definition associated with a given name - PropertyDef pd = null; - LinkedHashMap propsDef = getPropertiesDef(); - if(propsDef != null) { - pd = propsDef.get(name); - } - return pd; - } - - public ArrayList getAttributesDefObjects() { - // Return a list of attribute definition objects - @SuppressWarnings("unchecked") - LinkedHashMap attrs = (LinkedHashMap)getValue(ATTRIBUTES,null,true); - ArrayList ads = new ArrayList<>(); - if(attrs != null) { - for(Map.Entry me: attrs.entrySet()) { - String attr = me.getKey(); - @SuppressWarnings("unchecked") - LinkedHashMap adschema = (LinkedHashMap)me.getValue(); - ads.add(new AttributeDef(attr,null,adschema)); - } - } - return ads; - } - - public LinkedHashMap getAttributesDef() { - // Return a dictionary of attribute definition name-object pairs - - LinkedHashMap ads = new LinkedHashMap<>(); - for(AttributeDef ado: getAttributesDefObjects()) { - ads.put(((AttributeDef)ado).getName(),ado); - } - return ads; - } - - public AttributeDef getAttributeDefValue(String name) { - // Return the attribute definition associated with a given name - AttributeDef ad = null; - LinkedHashMap attrsDef = getAttributesDef(); - if(attrsDef != null) { - ad = attrsDef.get(name); - } - return ad; - } - - public String getType() { - return type; - } - } + } + + @SuppressWarnings("unchecked") + public ArrayList getPropertiesDefObjects() { + // Return a list of property definition objects + ArrayList properties = new ArrayList(); + LinkedHashMap props = (LinkedHashMap) getDefinition(PROPERTIES); + if (props != null) { + for (Map.Entry me : props.entrySet()) { + String pdname = me.getKey(); + Object to = me.getValue(); + if (to == null || !(to instanceof LinkedHashMap)) { + String s = to == null ? "null" : to.getClass().getSimpleName(); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE137", String.format( + "Unexpected type error: property \"%s\" has type \"%s\" (expected dict)", pdname, s))); + continue; + } + LinkedHashMap pdschema = (LinkedHashMap) to; + properties.add(new PropertyDef(pdname, null, pdschema)); + } + } + return properties; + } + + public LinkedHashMap getPropertiesDef() { + LinkedHashMap pds = new LinkedHashMap(); + for (PropertyDef pd : getPropertiesDefObjects()) { + pds.put(pd.getName(), pd); + } + return pds; + } + + public PropertyDef getPropertyDefValue(String name) { + // Return the property definition associated with a given name + PropertyDef pd = null; + LinkedHashMap propsDef = getPropertiesDef(); + if (propsDef != null) { + pd = propsDef.get(name); + } + return pd; + } + + public ArrayList getAttributesDefObjects() { + // Return a list of attribute definition objects + @SuppressWarnings("unchecked") + LinkedHashMap attrs = (LinkedHashMap) getValue(ATTRIBUTES, null, true); + ArrayList ads = new ArrayList<>(); + if (attrs != null) { + for (Map.Entry me : attrs.entrySet()) { + String attr = me.getKey(); + @SuppressWarnings("unchecked") + LinkedHashMap adschema = (LinkedHashMap) me.getValue(); + ads.add(new AttributeDef(attr, null, adschema)); + } + } + return ads; + } + + public LinkedHashMap getAttributesDef() { + // Return a dictionary of attribute definition name-object pairs + + LinkedHashMap ads = new LinkedHashMap<>(); + for (AttributeDef ado : getAttributesDefObjects()) { + ads.put(((AttributeDef) ado).getName(), ado); + } + return ads; + } + + public AttributeDef getAttributeDefValue(String name) { + // Return the attribute definition associated with a given name + AttributeDef ad = null; + LinkedHashMap attrsDef = getAttributesDef(); + if (attrsDef != null) { + ad = attrsDef.get(name); + } + return ad; + } + + public String getType() { + return type; + } +} /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java index 9321064..18dd5ca 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -30,25 +30,25 @@ import org.onap.sdc.toscaparser.api.extensions.ExtTools; public class TypeValidation { - private static final String DEFINITION_VERSION = "tosca_definitions_version"; - private static final String DESCRIPTION = "description"; - private static final String IMPORTS = "imports"; - private static final String DSL_DEFINITIONS = "dsl_definitions"; - private static final String NODE_TYPES = "node_types"; - private static final String REPOSITORIES = "repositories"; - private static final String DATA_TYPES = "data_types"; - private static final String ARTIFACT_TYPES = "artifact_types"; - private static final String GROUP_TYPES = "group_types"; - private static final String RELATIONSHIP_TYPES = "relationship_types"; - private static final String CAPABILITY_TYPES = "capability_types"; - private static final String INTERFACE_TYPES = "interface_types"; - private static final String POLICY_TYPES = "policy_types"; - private static final String TOPOLOGY_TEMPLATE = "topology_template"; - //Pavel - private static final String METADATA = "metadata"; - - private String ALLOWED_TYPE_SECTIONS[] = { - DEFINITION_VERSION, DESCRIPTION, IMPORTS, + private static final String DEFINITION_VERSION = "tosca_definitions_version"; + private static final String DESCRIPTION = "description"; + private static final String IMPORTS = "imports"; + private static final String DSL_DEFINITIONS = "dsl_definitions"; + private static final String NODE_TYPES = "node_types"; + private static final String REPOSITORIES = "repositories"; + private static final String DATA_TYPES = "data_types"; + private static final String ARTIFACT_TYPES = "artifact_types"; + private static final String GROUP_TYPES = "group_types"; + private static final String RELATIONSHIP_TYPES = "relationship_types"; + private static final String CAPABILITY_TYPES = "capability_types"; + private static final String INTERFACE_TYPES = "interface_types"; + private static final String POLICY_TYPES = "policy_types"; + private static final String TOPOLOGY_TEMPLATE = "topology_template"; + //Pavel + private static final String METADATA = "metadata"; + + private String ALLOWED_TYPE_SECTIONS[] = { + DEFINITION_VERSION, DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES, DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES, RELATIONSHIP_TYPES, CAPABILITY_TYPES, @@ -57,65 +57,65 @@ public class TypeValidation { }; private static ArrayList VALID_TEMPLATE_VERSIONS = _getVTV(); - + private static ArrayList _getVTV() { - ArrayList vtv = new ArrayList<>(); - vtv.add("tosca_simple_yaml_1_0"); - vtv.add("tosca_simple_yaml_1_1"); - ExtTools exttools = new ExtTools(); + ArrayList vtv = new ArrayList<>(); + vtv.add("tosca_simple_yaml_1_0"); + vtv.add("tosca_simple_yaml_1_1"); + ExtTools exttools = new ExtTools(); vtv.addAll(exttools.getVersions()); return vtv; } - + //private LinkedHashMap customTypes; private Object importDef; //private String version; - - public TypeValidation(LinkedHashMap _customTypes, - Object _importDef) { + + public TypeValidation(LinkedHashMap _customTypes, + Object _importDef) { importDef = _importDef; _validateTypeKeys(_customTypes); } - - private void _validateTypeKeys(LinkedHashMap customTypes) { - - String sVersion = (String)customTypes.get(DEFINITION_VERSION); - if(sVersion != null) { - _validateTypeVersion(sVersion); + + private void _validateTypeKeys(LinkedHashMap customTypes) { + + String sVersion = (String) customTypes.get(DEFINITION_VERSION); + if (sVersion != null) { + _validateTypeVersion(sVersion); //version = sVersion; } - for(String name: customTypes.keySet()) { - boolean bFound = false; - for(String ats: ALLOWED_TYPE_SECTIONS) { - if(name.equals(ats)) { - bFound = true; - break; - } - } - if(!bFound) { + for (String name : customTypes.keySet()) { + boolean bFound = false; + for (String ats : ALLOWED_TYPE_SECTIONS) { + if (name.equals(ats)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE138", String.format( - "UnknownFieldError: Template \"%s\" contains unknown field \"%s\"", - importDef.toString(),name))); - } + "UnknownFieldError: Template \"%s\" contains unknown field \"%s\"", + importDef.toString(), name))); + } } } - + private void _validateTypeVersion(String sVersion) { - boolean bFound = false; - String allowed = ""; - for(String atv: VALID_TEMPLATE_VERSIONS) { - allowed += "\"" + atv + "\" "; - if(sVersion.equals(atv)) { - bFound = true; - break; - } - } - if(!bFound) { + boolean bFound = false; + String allowed = ""; + for (String atv : VALID_TEMPLATE_VERSIONS) { + allowed += "\"" + atv + "\" "; + if (sVersion.equals(atv)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE139", String.format( - "InvalidTemplateVersion: version \"%s\" in \"%s\" is not supported\n" + - "Allowed versions: [%s]", - sVersion,importDef.toString(),allowed))); - } + "InvalidTemplateVersion: version \"%s\" in \"%s\" is not supported\n" + + "Allowed versions: [%s]", + sVersion, importDef.toString(), allowed))); + } } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java index 82f6718..dd77659 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,175 +20,221 @@ package org.onap.sdc.toscaparser.api.elements.constraints; -import java.util.ArrayList; -import java.util.LinkedHashMap; - import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.elements.ScalarUnit; import org.onap.sdc.toscaparser.api.functions.Function; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; + public abstract class Constraint { - - // Parent class for constraints for a Property or Input - - protected static final String EQUAL = "equal"; - protected static final String GREATER_THAN = "greater_than"; - protected static final String GREATER_OR_EQUAL = "greater_or_equal"; - protected static final String LESS_THAN = "less_than"; - protected static final String LESS_OR_EQUAL = "less_or_equal"; - protected static final String IN_RANGE = "in_range"; - protected static final String VALID_VALUES = "valid_values"; - protected static final String LENGTH = "length"; - protected static final String MIN_LENGTH = "min_length"; - protected static final String MAX_LENGTH = "max_length"; - protected static final String PATTERN = "pattern"; - - protected static final String CONSTRAINTS[] = { - EQUAL, GREATER_THAN,GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, - IN_RANGE, VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN}; - - @SuppressWarnings("unchecked") - public static Constraint factory(String constraintClass,String propname,String proptype,Object constraint) { - - // a factory for the different Constraint classes - // replaces Python's __new__() usage - - if(!(constraint instanceof LinkedHashMap) || - ((LinkedHashMap)constraint).size() != 1) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE101", - "InvalidSchemaError: Invalid constraint schema " + constraint.toString())); - } - - if(constraintClass.equals(EQUAL)) { - return new Equal(propname,proptype,constraint); - } - else if(constraintClass.equals(GREATER_THAN)) { - return new GreaterThan(propname,proptype,constraint); - } - else if(constraintClass.equals(GREATER_OR_EQUAL)) { - return new GreaterOrEqual(propname,proptype,constraint); - } - else if(constraintClass.equals(LESS_THAN)) { - return new LessThan(propname,proptype,constraint); - } - else if(constraintClass.equals(LESS_OR_EQUAL)) { - return new LessOrEqual(propname,proptype,constraint); - } - else if(constraintClass.equals(IN_RANGE)) { - return new InRange(propname,proptype,constraint); - } - else if(constraintClass.equals(VALID_VALUES)) { - return new ValidValues(propname,proptype,constraint); - } - else if(constraintClass.equals(LENGTH)) { - return new Length(propname,proptype,constraint); - } - else if(constraintClass.equals(MIN_LENGTH)) { - return new MinLength(propname,proptype,constraint); - } - else if(constraintClass.equals(MAX_LENGTH)) { - return new MaxLength(propname,proptype,constraint); - } - else if(constraintClass.equals(PATTERN)) { - return new Pattern(propname,proptype,constraint); + + // Parent class for constraints for a Property or Input + + protected static final String EQUAL = "equal"; + protected static final String GREATER_THAN = "greater_than"; + protected static final String GREATER_OR_EQUAL = "greater_or_equal"; + protected static final String LESS_THAN = "less_than"; + protected static final String LESS_OR_EQUAL = "less_or_equal"; + protected static final String IN_RANGE = "in_range"; + protected static final String VALID_VALUES = "valid_values"; + protected static final String LENGTH = "length"; + protected static final String MIN_LENGTH = "min_length"; + protected static final String MAX_LENGTH = "max_length"; + protected static final String PATTERN = "pattern"; + + protected static final String[] CONSTRAINTS = { + EQUAL, GREATER_THAN, GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, + IN_RANGE, VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN}; + + @SuppressWarnings("unchecked") + public static Constraint factory(String constraintClass, String propname, String proptype, Object constraint) { + + // a factory for the different Constraint classes + // replaces Python's __new__() usage + + if (!(constraint instanceof LinkedHashMap) + || ((LinkedHashMap) constraint).size() != 1) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE101", + "InvalidSchemaError: Invalid constraint schema " + constraint.toString())); } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE102", String.format( - "InvalidSchemaError: Invalid property \"%s\"",constraintClass))); - return null; + + switch (constraintClass) { + case EQUAL: + return new Equal(propname, proptype, constraint); + case GREATER_THAN: + return new GreaterThan(propname, proptype, constraint); + case GREATER_OR_EQUAL: + return new GreaterOrEqual(propname, proptype, constraint); + case LESS_THAN: + return new LessThan(propname, proptype, constraint); + case LESS_OR_EQUAL: + return new LessOrEqual(propname, proptype, constraint); + case IN_RANGE: + return new InRange(propname, proptype, constraint); + case VALID_VALUES: + return new ValidValues(propname, proptype, constraint); + case LENGTH: + return new Length(propname, proptype, constraint); + case MIN_LENGTH: + return new MinLength(propname, proptype, constraint); + case MAX_LENGTH: + return new MaxLength(propname, proptype, constraint); + case PATTERN: + return new Pattern(propname, proptype, constraint); + default: + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE102", String.format( + "InvalidSchemaError: Invalid property \"%s\"", constraintClass))); + return null; } - } - - protected String constraintKey = "TBD"; - protected ArrayList validTypes = new ArrayList<>(); - protected ArrayList validPropTypes = new ArrayList<>(); - - protected String propertyName; - protected String propertyType; - protected Object constraintValue; - protected Object constraintValueMsg; - protected Object valueMsg; - - @SuppressWarnings("unchecked") - public Constraint(String propname,String proptype,Object constraint) { - - _setValues(); - + } + + private String constraintKey = "TBD"; + protected ArrayList validTypes = new ArrayList<>(); + protected ArrayList validPropTypes = new ArrayList<>(); + + protected String propertyName; + private String propertyType; + protected Object constraintValue; + protected Object constraintValueMsg; + protected Object valueMsg; + + @SuppressWarnings("unchecked") + public Constraint(String propname, String proptype, Object constraint) { + + setValues(); + propertyName = propname; propertyType = proptype; - constraintValue = ((LinkedHashMap)constraint).get(constraintKey); + constraintValue = ((LinkedHashMap) constraint).get(constraintKey); constraintValueMsg = constraintValue; boolean bFound = false; - for(String s: ScalarUnit.SCALAR_UNIT_TYPES) { - if(s.equals(propertyType)) { - bFound = true; - break; - } + for (String s : ScalarUnit.SCALAR_UNIT_TYPES) { + if (s.equals(propertyType)) { + bFound = true; + break; + } } - if(bFound) { + if (bFound) { constraintValue = _getScalarUnitConstraintValue(); } // check if constraint is valid for property type bFound = false; - for(String s: validPropTypes) { - if(s.equals(propertyType)) { - bFound = true; - break; - } + for (String s : validPropTypes) { + if (s.equals(propertyType)) { + bFound = true; + break; + } } - if(!bFound) { + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE103", String.format( - "InvalidSchemaError: Property \"%s\" is not valid for data type \"%s\"", - constraintKey,propertyType))); + "InvalidSchemaError: Property \"%s\" is not valid for data type \"%s\"", + constraintKey, propertyType))); } - } - - @SuppressWarnings("unchecked") - private Object _getScalarUnitConstraintValue() { - // code differs from Python because of class creation - if(constraintValue instanceof ArrayList) { - ArrayList ret = new ArrayList<>(); - for(Object v: (ArrayList)constraintValue) { - ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType,v); - ret.add(su.getNumFromScalarUnit(null)); - } - return ret; - } - else { - ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType,constraintValue); - return su.getNumFromScalarUnit(null); - } - } - - public void validate(Object value) { - if (Function.isFunction(value)){ - //skipping constraints check for functions - return; - } - - valueMsg = value; + } + + public ArrayList getValidTypes() { + return validTypes; + } + + public void addValidTypes(List validTypes) { + this.validTypes.addAll(validTypes); + } + + public ArrayList getValidPropTypes() { + return validPropTypes; + } + + public String getPropertyType() { + return propertyType; + } + + public Object getConstraintValue() { + return constraintValue; + } + + public Object getConstraintValueMsg() { + return constraintValueMsg; + } + + public Object getValueMsg() { + return valueMsg; + } + + public void setConstraintKey(String constraintKey) { + this.constraintKey = constraintKey; + } + + public void setValidTypes(ArrayList validTypes) { + this.validTypes = validTypes; + } + + public void setValidPropTypes(ArrayList validPropTypes) { + this.validPropTypes = validPropTypes; + } + + public void setPropertyType(String propertyType) { + this.propertyType = propertyType; + } + + public void setConstraintValue(Object constraintValue) { + this.constraintValue = constraintValue; + } + + public void setConstraintValueMsg(Object constraintValueMsg) { + this.constraintValueMsg = constraintValueMsg; + } + + public void setValueMsg(Object valueMsg) { + this.valueMsg = valueMsg; + } + + @SuppressWarnings("unchecked") + private Object _getScalarUnitConstraintValue() { + // code differs from Python because of class creation + if (constraintValue instanceof ArrayList) { + ArrayList ret = new ArrayList<>(); + for (Object v : (ArrayList) constraintValue) { + ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType, v); + ret.add(su.getNumFromScalarUnit(null)); + } + return ret; + } else { + ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType, constraintValue); + return su.getNumFromScalarUnit(null); + } + } + + public void validate(Object value) { + if (Function.isFunction(value)) { + //skipping constraints check for functions + return; + } + + valueMsg = value; boolean bFound = false; - for(String s: ScalarUnit.SCALAR_UNIT_TYPES) { - if(s.equals(propertyType)) { - bFound = true; - break; - } + for (String s : ScalarUnit.SCALAR_UNIT_TYPES) { + if (s.equals(propertyType)) { + bFound = true; + break; + } } - if(bFound) { - value = ScalarUnit.getScalarunitValue(propertyType,value,null); + if (bFound) { + value = ScalarUnit.getScalarunitValue(propertyType, value, null); } - if(!_isValid(value)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE008", "ValidationError: " + _errMsg(value))); + if (!isValid(value)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE008", "ValidationError: " + errMsg(value))); } - } + } + + protected abstract boolean isValid(Object value); + + protected abstract void setValues(); - protected abstract boolean _isValid(Object value); - - protected abstract void _setValues(); + protected abstract String errMsg(Object value); - protected abstract String _errMsg(Object value); - } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java index 16e379a..f480099 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,36 +20,32 @@ package org.onap.sdc.toscaparser.api.elements.constraints; +import java.util.Arrays; + public class Equal extends Constraint { - protected void _setValues() { - - constraintKey = EQUAL; - - for(String s: Schema.PROPERTY_TYPES) { - validPropTypes.add(s); - } - - } - - public Equal(String name,String type,Object c) { - super(name,type,c); - - } - - protected boolean _isValid(Object val) { - // equality of objects is tricky so we're comparing - // the toString() representation - if(val.toString().equals(constraintValue.toString())) { - return true; - } - return false; - } - - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" is not equal to \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } + protected void setValues() { + + setConstraintKey(EQUAL); + validPropTypes.addAll(Arrays.asList(Schema.PROPERTY_TYPES)); + + } + + public Equal(String name, String type, Object c) { + super(name, type, c); + + } + + protected boolean isValid(Object val) { + // equality of objects is tricky so we're comparing + // the toString() representation + return val.toString().equals(constraintValue.toString()); + } + + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" is not equal to \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java index 4d6b1cf..0cb8f36 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,73 +21,69 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.Date; - import org.onap.sdc.toscaparser.api.functions.Function; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Arrays; +import java.util.Date; + public class GreaterOrEqual extends Constraint { - // Constraint class for "greater_or_equal" - - // Constrains a property or parameter to a value greater than or equal - // to ('>=') the value declared. - - protected void _setValues() { - - constraintKey = GREATER_OR_EQUAL; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public GreaterOrEqual(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE104", "InvalidSchemaError: The property \"greater_or_equal\" expects comparable values")); - } - } - - - - @Override - protected boolean _isValid(Object value) { - if(Function.isFunction(value)) { - return true; - } - - // timestamps - if(value instanceof Date) { - if(constraintValue instanceof Date) { - return !((Date)value).before((Date)constraintValue); - } - return false; - } - // all others - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 >= n2; - } - - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be greater or equal to \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } + // Constraint class for "greater_or_equal" + + // Constrains a property or parameter to a value greater than or equal + // to ('>=') the value declared. + + protected void setValues() { + + setConstraintKey(GREATER_OR_EQUAL); + + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public GreaterOrEqual(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE104", "InvalidSchemaError: The property \"greater_or_equal\" expects comparable values")); + } + } + + + @Override + protected boolean isValid(Object value) { + if (Function.isFunction(value)) { + return true; + } + + // timestamps + if (value instanceof Date) { + if (constraintValue instanceof Date) { + return !((Date) value).before((Date) constraintValue); + } + return false; + } + // all others + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 >= n2; + } + + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be greater or equal to \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java index c716821..b501907 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,65 +21,62 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Arrays; import java.util.Date; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class GreaterThan extends Constraint { - @Override - protected void _setValues() { - - constraintKey = GREATER_THAN; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public GreaterThan(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE105", "InvalidSchemaError: The property \"greater_than\" expects comparable values")); - } - } - - @Override - protected boolean _isValid(Object value) { - - // timestamps - if(value instanceof Date) { - if(constraintValue instanceof Date) { - return ((Date)value).after((Date)constraintValue); - } - return false; - } - - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 > n2; - } - - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be greater than \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } + @Override + protected void setValues() { + + setConstraintKey(GREATER_THAN); + + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public GreaterThan(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE105", "InvalidSchemaError: The property \"greater_than\" expects comparable values")); + } + } + + @Override + protected boolean isValid(Object value) { + + // timestamps + if (value instanceof Date) { + if (constraintValue instanceof Date) { + return ((Date) value).after((Date) constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 > n2; + } + + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be greater than \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java index 32719fa..4edf021 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,6 +23,7 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Arrays; import java.util.Date; import java.util.ArrayList; @@ -34,95 +35,89 @@ public class InRange extends Constraint { //the two values declared. private static final String UNBOUNDED = "UNBOUNDED"; - - private Object min,max; - - protected void _setValues() { - - constraintKey = IN_RANGE; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - validTypes.add("String"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - validPropTypes.add(Schema.RANGE); - - } - - @SuppressWarnings("unchecked") - public InRange(String name,String type,Object c) { - super(name,type,c); - - if(!(constraintValue instanceof ArrayList) || ((ArrayList)constraintValue).size() != 2) { + + private Object min, max; + + protected void setValues() { + + setConstraintKey(IN_RANGE); + + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "String", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + validPropTypes.add(Schema.RANGE); + + } + + @SuppressWarnings("unchecked") + public InRange(String name, String type, Object c) { + super(name, type, c); + + if (!(constraintValue instanceof ArrayList) || ((ArrayList) constraintValue).size() != 2) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE106", "InvalidSchemaError: The property \"in_range\" expects a list")); - - } - ArrayList alcv = (ArrayList)constraintValue; + } + + ArrayList alcv = (ArrayList) constraintValue; String msg = "The property \"in_range\" expects comparable values"; - for(Object vo: alcv) { - if(!validTypes.contains(vo.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE107", "InvalidSchemaError: " + msg)); - } + for (Object vo : alcv) { + if (!validTypes.contains(vo.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE107", "InvalidSchemaError: " + msg)); + } // The only string we allow for range is the special value 'UNBOUNDED' - if((vo instanceof String) && !((String)vo).equals(UNBOUNDED)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE108", "InvalidSchemaError: " + msg)); + if ((vo instanceof String) && !((String) vo).equals(UNBOUNDED)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE108", "InvalidSchemaError: " + msg)); } } min = alcv.get(0); max = alcv.get(1); - - } - - @Override - protected boolean _isValid(Object value) { - - // timestamps - if(value instanceof Date) { - if(min instanceof Date && max instanceof Date) { - return !((Date)value).before((Date)min) && - !((Date)value).after((Date)max); - } - return false; - } - - Double dvalue = new Double(value.toString()); - if(!(min instanceof String)) { - if(dvalue < new Double(min.toString())) { - return false; + + } + + @Override + protected boolean isValid(Object value) { + + // timestamps + if (value instanceof Date) { + if (min instanceof Date && max instanceof Date) { + return !((Date) value).before((Date) min) + && !((Date) value).after((Date) max); } - } - else if(!((String)min).equals(UNBOUNDED)) { return false; } - if(!(max instanceof String)) { - if(dvalue > new Double(max.toString())) { + + Double dvalue = new Double(value.toString()); + if (!(min instanceof String)) { + if (dvalue < new Double(min.toString())) { return false; } + } else if (!((String) min).equals(UNBOUNDED)) { + return false; } - else if(!((String)max).equals(UNBOUNDED)) { + if (!(max instanceof String)) { + if (dvalue > new Double(max.toString())) { + return false; + } + } else if (!((String) max).equals(UNBOUNDED)) { return false; } return true; - } + } - @Override - protected String _errMsg(Object value) { + @Override + protected String errMsg(Object value) { return String.format("The value \"%s\" of property \"%s\" is out of range \"(min:%s, max:%s)\"", - valueMsg,propertyName,min.toString(),max.toString()); - } + valueMsg, propertyName, min.toString(), max.toString()); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java index 1abdcfd..7988cb8 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,44 +23,45 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Collections; + public class Length extends Constraint { - // Constraint class for "length" - - // Constrains the property or parameter to a value of a given length. + // Constraint class for "length" - @Override - protected void _setValues() { + // Constrains the property or parameter to a value of a given length. - constraintKey = LENGTH; + @Override + protected void setValues() { - validTypes.add("Integer"); - - validPropTypes.add(Schema.STRING); - - } - - public Length(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE109", "InvalidSchemaError: The property \"length\" expects an integer")); - } - } - - @Override - protected boolean _isValid(Object value) { - if(value instanceof String && constraintValue instanceof Integer && - ((String)value).length() == (Integer)constraintValue) { - return true; - } - return false; - } + setConstraintKey(LENGTH); + addValidTypes(Collections.singletonList("Integer")); + + validPropTypes.add(Schema.STRING); + + } + + public Length(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE109", "InvalidSchemaError: The property \"length\" expects an integer")); + } + } + + @Override + protected boolean isValid(Object value) { + if (value instanceof String && constraintValue instanceof Integer && + ((String) value).length() == (Integer) constraintValue) { + return true; + } + return false; + } - @Override - protected String _errMsg(Object value) { - return String.format("Length of value \"%s\" of property \"%s\" must be equal to \"%s\"", - value.toString(),propertyName,constraintValue.toString()); - } + @Override + protected String errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be equal to \"%s\"", + value.toString(), propertyName, constraintValue.toString()); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java index 9f1cd65..37a4afc 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,68 +21,65 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Arrays; import java.util.Date; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class LessOrEqual extends Constraint { - // Constraint class for "less_or_equal" - - // Constrains a property or parameter to a value less than or equal - // to ('<=') the value declared. - - protected void _setValues() { - - constraintKey = LESS_OR_EQUAL; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public LessOrEqual(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE110", "InvalidSchemaError: The property \"less_or_equal\" expects comparable values")); - } - } - - @Override - protected boolean _isValid(Object value) { - - // timestamps - if(value instanceof Date) { - if(constraintValue instanceof Date) { - return !((Date)value).after((Date)constraintValue); - } - return false; - } - - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 <= n2; - } - - @Override - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be less or equal to \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } + // Constraint class for "less_or_equal" + + // Constrains a property or parameter to a value less than or equal + // to ('<=') the value declared. + + protected void setValues() { + + setConstraintKey(LESS_OR_EQUAL); + + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public LessOrEqual(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE110", "InvalidSchemaError: The property \"less_or_equal\" expects comparable values")); + } + } + + @Override + protected boolean isValid(Object value) { + + // timestamps + if (value instanceof Date) { + if (constraintValue instanceof Date) { + return !((Date) value).after((Date) constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 <= n2; + } + + @Override + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be less or equal to \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java index b893fea..952861d 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,63 +23,60 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Arrays; import java.util.Date; public class LessThan extends Constraint { - @Override - protected void _setValues() { - - constraintKey = LESS_THAN; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public LessThan(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE111", "InvalidSchemaError: The property \"less_than\" expects comparable values")); - } - } - - @Override - protected boolean _isValid(Object value) { - - // timestamps - if(value instanceof Date) { - if(constraintValue instanceof Date) { - return ((Date)value).before((Date)constraintValue); - } - return false; - } - - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 < n2; - } - - @Override - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be less than \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } + @Override + protected void setValues() { + + setConstraintKey(LESS_THAN); + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public LessThan(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE111", "InvalidSchemaError: The property \"less_than\" expects comparable values")); + } + } + + @Override + protected boolean isValid(Object value) { + + // timestamps + if (value instanceof Date) { + if (constraintValue instanceof Date) { + return ((Date) value).before((Date) constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 < n2; + } + + @Override + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be less than \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java index 2cb20eb..9068b65 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,55 +21,54 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Collections; import java.util.LinkedHashMap; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class MaxLength extends Constraint { - // Constraint class for "min_length" - - // Constrains the property or parameter to a value of a maximum length. - - @Override - protected void _setValues() { - - constraintKey = MAX_LENGTH; - - validTypes.add("Integer"); - - validPropTypes.add(Schema.STRING); - validPropTypes.add(Schema.MAP); - - } - - public MaxLength(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE112", "InvalidSchemaError: The property \"max_length\" expects an integer")); - } - } - - @SuppressWarnings("unchecked") - @Override - protected boolean _isValid(Object value) { - if(value instanceof String && constraintValue instanceof Integer && - ((String)value).length() <= (Integer)constraintValue) { - return true; - } - else if(value instanceof LinkedHashMap && constraintValue instanceof Integer && - ((LinkedHashMap)value).size() <= (Integer)constraintValue) { - return true; - } - return false; - } - - @Override - protected String _errMsg(Object value) { - return String.format("Length of value \"%s\" of property \"%s\" must be no greater than \"%s\"", - value.toString(),propertyName,constraintValue.toString()); - } + // Constraint class for "min_length" + + // Constrains the property or parameter to a value of a maximum length. + + @Override + protected void setValues() { + + setConstraintKey(MAX_LENGTH); + + addValidTypes(Collections.singletonList("Integer")); + + + validPropTypes.add(Schema.STRING); + validPropTypes.add(Schema.MAP); + + } + + public MaxLength(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE112", "InvalidSchemaError: The property \"max_length\" expects an integer")); + } + } + + @SuppressWarnings("unchecked") + @Override + protected boolean isValid(Object value) { + if (value instanceof String && constraintValue instanceof Integer + && ((String) value).length() <= (Integer) constraintValue) { + return true; + } else { + return value instanceof LinkedHashMap && constraintValue instanceof Integer + && ((LinkedHashMap) value).size() <= (Integer) constraintValue; + } + } + + @Override + protected String errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be no greater than \"%s\"", + value.toString(), propertyName, constraintValue.toString()); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java index e7d0a9d..eb1d870 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,55 +21,53 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Collections; import java.util.LinkedHashMap; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class MinLength extends Constraint { - // Constraint class for "min_length" - - // Constrains the property or parameter to a value of a minimum length. - - @Override - protected void _setValues() { - - constraintKey = MIN_LENGTH; - - validTypes.add("Integer"); - - validPropTypes.add(Schema.STRING); - validPropTypes.add(Schema.MAP); - - } - - public MinLength(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE113", "InvalidSchemaError: The property \"min_length\" expects an integer")); - } - } - - @SuppressWarnings("unchecked") - @Override - protected boolean _isValid(Object value) { - if(value instanceof String && constraintValue instanceof Integer && - ((String)value).length() >= (Integer)constraintValue) { - return true; - } - else if(value instanceof LinkedHashMap && constraintValue instanceof Integer && - ((LinkedHashMap)value).size() >= (Integer)constraintValue) { - return true; - } - return false; - } - - @Override - protected String _errMsg(Object value) { - return String.format("Length of value \"%s\" of property \"%s\" must be at least \"%s\"", - value.toString(),propertyName,constraintValue.toString()); - } + // Constraint class for "min_length" + + // Constrains the property or parameter to a value of a minimum length. + + @Override + protected void setValues() { + + setConstraintKey(MIN_LENGTH); + + addValidTypes(Collections.singletonList("Integer")); + + validPropTypes.add(Schema.STRING); + validPropTypes.add(Schema.MAP); + + } + + public MinLength(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE113", "InvalidSchemaError: The property \"min_length\" expects an integer")); + } + } + + @SuppressWarnings("unchecked") + @Override + protected boolean isValid(Object value) { + if (value instanceof String && constraintValue instanceof Integer + && ((String) value).length() >= (Integer) constraintValue) { + return true; + } else { + return value instanceof LinkedHashMap && constraintValue instanceof Integer + && ((LinkedHashMap) value).size() >= (Integer) constraintValue; + } + } + + @Override + protected String errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be at least \"%s\"", + value.toString(), propertyName, constraintValue.toString()); + } } @@ -77,16 +75,16 @@ public class MinLength extends Constraint { class MinLength(Constraint): """Constraint class for "min_length" - + Constrains the property or parameter to a value to a minimum length. """ - + constraint_key = Constraint.MIN_LENGTH - + valid_types = (int, ) - + valid_prop_types = (Schema.STRING, Schema.MAP) - + def __init__(self, property_name, property_type, constraint): super(MinLength, self).__init__(property_name, property_type, constraint) @@ -94,14 +92,14 @@ class MinLength(Constraint): ValidationIsshueCollector.appendException( InvalidSchemaError(message=_('The property "min_length" ' 'expects an integer.'))) - + def _is_valid(self, value): if ((isinstance(value, str) or isinstance(value, dict)) and len(value) >= self.constraint_value): return True - + return False - + def _err_msg(self, value): return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' 'must be at least "%(cvalue)s".') % diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java index f1b374e..913e922 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,63 +21,62 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Collections; import java.util.regex.Matcher; import java.util.regex.PatternSyntaxException; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class Pattern extends Constraint { - @Override - protected void _setValues() { - - constraintKey = PATTERN; - - validTypes.add("String"); - - validPropTypes.add(Schema.STRING); - - } - - - public Pattern(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE114", "InvalidSchemaError: The property \"pattern\" expects a string")); - } - } - - @Override - protected boolean _isValid(Object value) { - try { - if(!(value instanceof String)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE115", String.format("ValueError: Input value \"%s\" to \"pattern\" property \"%s\" must be a string", - value.toString(),propertyName))); - return false; - } - String strp = constraintValue.toString(); - String strm = value.toString(); - java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(strp); - Matcher matcher = pattern.matcher(strm); - if(matcher.find() && matcher.end() == strm.length()) { - return true; - } - return false; - } - catch(PatternSyntaxException pse) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE116", String.format("ValueError: Invalid regex \"%s\" in \"pattern\" property \"%s\"", - constraintValue.toString(),propertyName))); - return false; - } - } - - @Override - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" does not match the pattern \"%s\"", - value.toString(),propertyName,constraintValue.toString()); - } + @Override + protected void setValues() { + + setConstraintKey(PATTERN); + + addValidTypes(Collections.singletonList("String")); + + validPropTypes.add(Schema.STRING); + + } + + + public Pattern(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE114", "InvalidSchemaError: The property \"pattern\" expects a string")); + } + } + + @Override + protected boolean isValid(Object value) { + try { + if (!(value instanceof String)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE115", String.format("ValueError: Input value \"%s\" to \"pattern\" property \"%s\" must be a string", + value.toString(), propertyName))); + return false; + } + String strp = constraintValue.toString(); + String strm = value.toString(); + java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(strp); + Matcher matcher = pattern.matcher(strm); + if (matcher.find() && matcher.end() == strm.length()) { + return true; + } + return false; + } catch (PatternSyntaxException pse) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE116", String.format("ValueError: Invalid regex \"%s\" in \"pattern\" property \"%s\"", + constraintValue.toString(), propertyName))); + return false; + } + } + + @Override + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" does not match the pattern \"%s\"", + value.toString(), propertyName, constraintValue.toString()); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java index 06a9cd0..15ec597 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,109 +20,110 @@ package org.onap.sdc.toscaparser.api.elements.constraints; +import com.google.common.collect.ImmutableMap; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.enums.FileSize; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; public class Schema { - private static final String TYPE = "type"; - private static final String REQUIRED = "required"; - private static final String DESCRIPTION = "description"; - private static final String DEFAULT = "default"; - private static final String CONSTRAINTS = "constraints"; - private static final String STATUS = "status"; - private static final String ENTRYSCHEMA = "entry_schema"; - private static final String KEYS[] = { - TYPE, REQUIRED, DESCRIPTION,DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS}; - - public static final String INTEGER = "integer"; - public static final String STRING = "string"; - public static final String BOOLEAN = "boolean"; - public static final String FLOAT = "float"; - public static final String RANGE = "range"; - public static final String NUMBER = "number"; - public static final String TIMESTAMP = "timestamp"; - public static final String LIST = "list"; - public static final String MAP = "map"; - public static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; - public static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; - public static final String SCALAR_UNIT_TIME = "scalar-unit.time"; - public static final String VERSION = "version"; - public static final String PORTDEF = "PortDef"; - public static final String PORTSPEC = "PortSpec"; //??? PortSpec.SHORTNAME - public static final String JSON = "json"; - - public static final String PROPERTY_TYPES[] = { - INTEGER, STRING, BOOLEAN, FLOAT, RANGE,NUMBER, TIMESTAMP, LIST, MAP, - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, - VERSION, PORTDEF, PORTSPEC, JSON}; - - public static final String SIMPLE_PROPERTY_TYPES[] = { - INTEGER, STRING, BOOLEAN, FLOAT, RANGE,NUMBER, TIMESTAMP, - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, - VERSION}; - - @SuppressWarnings("unused") - private static final String SCALAR_UNIT_SIZE_DEFAULT = "B"; - - private static Map SCALAR_UNIT_SIZE_DICT = new HashMap<>(); - static { - SCALAR_UNIT_SIZE_DICT.put("B", 1L); - SCALAR_UNIT_SIZE_DICT.put("KB", 1000L); - SCALAR_UNIT_SIZE_DICT.put("KIB", 1024L); - SCALAR_UNIT_SIZE_DICT.put("MB", 1000000L); - SCALAR_UNIT_SIZE_DICT.put("MIB", 1048576L); - SCALAR_UNIT_SIZE_DICT.put("GB", 1000000000L); - SCALAR_UNIT_SIZE_DICT.put("GIB", 1073741824L); - SCALAR_UNIT_SIZE_DICT.put("TB", 1000000000000L); - SCALAR_UNIT_SIZE_DICT.put("TIB", 1099511627776L); - } - - private String name; - private LinkedHashMap schema; - private int _len; - private ArrayList constraintsList; - - - public Schema(String _name,LinkedHashMap _schemaDict) { - name = _name; - - if(!(_schemaDict instanceof LinkedHashMap)) { + private static final String TYPE = "type"; + private static final String REQUIRED = "required"; + private static final String DESCRIPTION = "description"; + private static final String DEFAULT = "default"; + private static final String CONSTRAINTS = "constraints"; + private static final String STATUS = "status"; + private static final String ENTRYSCHEMA = "entry_schema"; + private static final String[] KEYS = { + TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS}; + + public static final String INTEGER = "integer"; + public static final String STRING = "string"; + public static final String BOOLEAN = "boolean"; + public static final String FLOAT = "float"; + public static final String RANGE = "range"; + public static final String NUMBER = "number"; + public static final String TIMESTAMP = "timestamp"; + public static final String LIST = "list"; + public static final String MAP = "map"; + public static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; + public static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; + public static final String SCALAR_UNIT_TIME = "scalar-unit.time"; + public static final String VERSION = "version"; + public static final String PORTDEF = "PortDef"; + public static final String PORTSPEC = "PortSpec"; //??? PortSpec.SHORTNAME + public static final String JSON = "json"; + + public static final String[] PROPERTY_TYPES = { + INTEGER, STRING, BOOLEAN, FLOAT, RANGE, NUMBER, TIMESTAMP, LIST, MAP, + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, + VERSION, PORTDEF, PORTSPEC, JSON}; + + public static final String[] SIMPLE_PROPERTY_TYPES = { + INTEGER, STRING, BOOLEAN, FLOAT, RANGE, NUMBER, TIMESTAMP, + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, + VERSION}; + + @SuppressWarnings("unused") + private static final String SCALAR_UNIT_SIZE_DEFAULT = "B"; + + private static Map scalarUnitSizeDict = ImmutableMap.builder() + .put("B", FileSize.B) + .put("KB", FileSize.KB) + .put("MB", FileSize.MB) + .put("GB", FileSize.GB) + .put("TB", FileSize.TB) + .put("KIB", FileSize.KIB) + .put("MIB", FileSize.MIB) + .put("GIB", FileSize.GIB) + .put("TIB", FileSize.TIB) + .build(); + + + private String name; + private LinkedHashMap schema; + private int len; + private ArrayList constraintsList; + + + public Schema(String name, LinkedHashMap schemaDict) { + this.name = name; + + if (!(schemaDict instanceof LinkedHashMap)) { //msg = (_('Schema definition of "%(pname)s" must be a dict.') // % dict(pname=name)) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE117", String.format( - "InvalidSchemaError: Schema definition of \"%s\" must be a dict",name))); + "InvalidSchemaError: Schema definition of \"%s\" must be a dict", this.name))); } - if(_schemaDict.get("type") == null) { + if (schemaDict.get("type") == null) { //msg = (_('Schema definition of "%(pname)s" must have a "type" ' // 'attribute.') % dict(pname=name)) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE118", String.format( - "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name))); + "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute", this.name))); } - - schema = _schemaDict; - _len = 0; //??? None + + schema = schemaDict; + len = 0; //??? None constraintsList = new ArrayList<>(); - } + } public String getType() { - return (String)schema.get(TYPE); + return (String) schema.get(TYPE); } public boolean isRequired() { - return (boolean)schema.getOrDefault(REQUIRED, true); + return (boolean) schema.getOrDefault(REQUIRED, true); } public String getDescription() { - return (String)schema.getOrDefault(DESCRIPTION,""); + return (String) schema.getOrDefault(DESCRIPTION, ""); } public Object getDefault() { @@ -130,53 +131,52 @@ public class Schema { } public String getStatus() { - return (String)schema.getOrDefault(STATUS,""); + return (String) schema.getOrDefault(STATUS, ""); } public static boolean isRequestedTypeSimple(String type) { - return Arrays.stream(SIMPLE_PROPERTY_TYPES).anyMatch(t->t.equals(type)); - } + return Arrays.asList(SIMPLE_PROPERTY_TYPES).contains(type); + } @SuppressWarnings("unchecked") - public ArrayList getConstraints() { - if(constraintsList.size() == 0) { - Object cob = schema.get(CONSTRAINTS); - if(cob instanceof ArrayList) { - ArrayList constraintSchemata = (ArrayList)cob; - for(Object ob: constraintSchemata) { - if(ob instanceof LinkedHashMap) { - for(String cClass: ((LinkedHashMap)ob).keySet()) { - Constraint c = Constraint.factory(cClass,name,getType(),ob); - if(c != null) { - constraintsList.add(c); - } - else { - // error - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE119", String.format( - "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported", - cClass,name))); - } - break; - } - } - } - } + public ArrayList getConstraints() { + if (constraintsList.size() == 0) { + Object cob = schema.get(CONSTRAINTS); + if (cob instanceof ArrayList) { + ArrayList constraintSchemata = (ArrayList) cob; + for (Object ob : constraintSchemata) { + if (ob instanceof LinkedHashMap) { + for (String cClass : ((LinkedHashMap) ob).keySet()) { + Constraint c = Constraint.factory(cClass, name, getType(), ob); + if (c != null) { + constraintsList.add(c); + } else { + // error + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE119", String.format( + "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported", + cClass, name))); + } + break; + } + } + } + } } return constraintsList; } @SuppressWarnings("unchecked") - public LinkedHashMap getEntrySchema() { - return (LinkedHashMap)schema.get(ENTRYSCHEMA); + public LinkedHashMap getEntrySchema() { + return (LinkedHashMap) schema.get(ENTRYSCHEMA); } - + // Python intrinsic methods... // substitute for __getitem__ (aka self[key]) public Object getItem(String key) { - return schema.get(key); + return schema.get(key); } - + /* def __iter__(self): for k in self.KEYS: @@ -187,23 +187,24 @@ public class Schema { else: yield k */ - + // substitute for __len__ (aka self.len()) public int getLen() { - int len = 0; - for(String k: KEYS) { - if(schema.get(k) != null) { - len++; - } - _len = len; - } - return _len; + int len = 0; + for (String k : KEYS) { + if (schema.get(k) != null) { + len++; + } + this.len = len; + } + return this.len; } + // getter - public LinkedHashMap getSchema() { - return schema; + public LinkedHashMap getSchema() { + return schema; } - + } /*python @@ -231,7 +232,7 @@ PROPERTY_TYPES = ( ) SCALAR_UNIT_SIZE_DEFAULT = 'B' -SCALAR_UNIT_SIZE_DICT = {'B': 1, 'KB': 1000, 'KIB': 1024, 'MB': 1000000, +scalarUnitSizeDict = {'B': 1, 'KB': 1000, 'KIB': 1024, 'MB': 1000000, 'MIB': 1048576, 'GB': 1000000000, 'GIB': 1073741824, 'TB': 1000000000000, 'TIB': 1099511627776} @@ -251,7 +252,7 @@ def __init__(self, name, schema_dict): ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) self.schema = schema_dict - self._len = None + self.len = None self.constraints_list = [] @property @@ -302,7 +303,7 @@ def __iter__(self): yield k def __len__(self): - if self._len is None: - self._len = len(list(iter(self))) - return self._len + if self.len is None: + self.len = len(list(iter(self))) + return self.len */ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java index d09caae..c3a192d 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,49 +21,44 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import java.util.ArrayList; +import java.util.Collections; public class ValidValues extends Constraint { - protected void _setValues() { + protected void setValues() { + setConstraintKey(VALID_VALUES); + Collections.addAll(validPropTypes, Schema.PROPERTY_TYPES); + } + - constraintKey = VALID_VALUES; - - for(String s: Schema.PROPERTY_TYPES) { - validPropTypes.add(s); - } - - } - - - public ValidValues(String name,String type,Object c) { - super(name,type,c); - - } + public ValidValues(String name, String type, Object c) { + super(name, type, c); + } @SuppressWarnings("unchecked") - protected boolean _isValid(Object val) { - if(!(constraintValue instanceof ArrayList)) { - return false; - } - if(val instanceof ArrayList) { - boolean bAll = true; - for(Object v: (ArrayList)val) { - if(!((ArrayList)constraintValue).contains(v)) { - bAll = false; - break; - }; - } - return bAll; - } - return ((ArrayList)constraintValue).contains(val); + protected boolean isValid(Object val) { + if (!(constraintValue instanceof ArrayList)) { + return false; + } + if (val instanceof ArrayList) { + boolean bAll = true; + for (Object v : (ArrayList) val) { + if (!((ArrayList) constraintValue).contains(v)) { + bAll = false; + break; + } + } + return bAll; + } + return ((ArrayList) constraintValue).contains(val); } - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" is not valid. Expected a value from \"%s\"", - value.toString(),propertyName,constraintValue.toString()); + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" is not valid. Expected a value from \"%s\"", + value.toString(), propertyName, constraintValue.toString()); } - + } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java new file mode 100644 index 0000000..b07f7fa --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java @@ -0,0 +1,32 @@ +/* +============LICENSE_START======================================================= + SDC + ================================================================================ + Copyright (C) 2019 Nokia. All rights reserved. + ================================================================================ + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + ============LICENSE_END========================================================= +*/ +package org.onap.sdc.toscaparser.api.elements.enums; + +public class FileSize { + public static final long B = 1L; + public static final long KB = 1000L; + public static final long MB = 1000000L; + public static final long GB = 1000000000L; + public static final long TB = 1000000000000L; + public static final long KIB = 1000L; + public static final long MIB = 1048576L; + public static final long GIB = 1073741824L; + public static final long TIB = 1099511627776L; +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java index 715123b..ac0d837 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,20 +21,20 @@ package org.onap.sdc.toscaparser.api.elements.enums; public enum ToscaElementNames { - - TYPE ("type"), - PROPERTIES ("properties"), - ANNOTATIONS ("annotations"), - SOURCE_TYPE ("source_type"); - - private String name; - - ToscaElementNames(String name){ - this.name = name; - } - public String getName() { - return name; - } - + TYPE("type"), + PROPERTIES("properties"), + ANNOTATIONS("annotations"), + SOURCE_TYPE("source_type"); + + private String name; + + ToscaElementNames(String name) { + this.name = name; + } + + public String getName() { + return name; + } + } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java b/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java index 8e0915e..5fbfca0 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,100 +39,92 @@ import java.util.regex.Pattern; public class ExtTools { - private static Logger log = LoggerFactory.getLogger(ExtTools.class.getName()); - - private static LinkedHashMap EXTENSION_INFO = new LinkedHashMap<>(); - - public ExtTools() { - - EXTENSION_INFO = _loadExtensions(); - } - - private LinkedHashMap _loadExtensions() { - - LinkedHashMap extensions = new LinkedHashMap<>(); - - Reflections reflections = new Reflections("extensions", new ResourcesScanner()); - Set resourcePaths = reflections.getResources(Pattern.compile(".*\\.py$")); - - for(String resourcePath : resourcePaths) { - try (InputStream is = ExtTools.class.getClassLoader().getResourceAsStream(resourcePath); - InputStreamReader isr = new InputStreamReader(is, Charset.forName("UTF-8")); - BufferedReader br = new BufferedReader(isr);){ - String version = null; - ArrayList sections = null; - String defsFile = null; - String line; - - Pattern pattern = Pattern.compile("^([^#]\\S+)\\s*=\\s*(\\S.*)$"); - while ((line = br.readLine()) != null) { - line = line.replace("'", "\""); - Matcher matcher = pattern.matcher(line.toString()); - if (matcher.find()) { - if (matcher.group(1).equals("VERSION")) { - version = matcher.group(2); - if (version.startsWith("'") || version.startsWith("\"")) { - version = version.substring(1, version.length() - 1); - } - } - else if (matcher.group(1).equals("DEFS_FILE")) { - String fn = matcher.group(2); - if (fn.startsWith("'") || fn.startsWith("\"")) { - fn = fn.substring(1, fn.length() - 1); - } - defsFile = resourcePath.replaceFirst("\\w*.py$", fn); - } - else if (matcher.group(1).equals("SECTIONS")) { - sections = new ArrayList<>(); - Pattern secpat = Pattern.compile("\"([^\"]+)\""); - Matcher secmat = secpat.matcher(matcher.group(2)); - while (secmat.find()) { - sections.add(secmat.group(1)); - } - } - } - } - - if (version != null && defsFile != null) { - LinkedHashMap ext = new LinkedHashMap<>(); - ext.put("defs_file", defsFile); - if (sections != null) { - ext.put("sections", sections); - } - extensions.put(version, ext); - } - else { - // error - } - } - catch (Exception e) { - log.error("ExtTools - _loadExtensions - {}", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue - ("JE281", "Failed to load extensions" + e.getMessage())); - // ... - } - } - return extensions; - } - - public ArrayList getVersions() { - return new ArrayList(EXTENSION_INFO.keySet()); - } - - public LinkedHashMap> getSections() { - LinkedHashMap> sections = new LinkedHashMap<>(); - for(String version: EXTENSION_INFO.keySet()) { - LinkedHashMap eiv = (LinkedHashMap)EXTENSION_INFO.get(version); - sections.put(version,(ArrayList)eiv.get("sections")); + private static Logger log = LoggerFactory.getLogger(ExtTools.class.getName()); + + private static LinkedHashMap extensionInfo = new LinkedHashMap<>(); + + public ExtTools() { + extensionInfo = loadExtensions(); + } + + private LinkedHashMap loadExtensions() { + + LinkedHashMap extensions = new LinkedHashMap<>(); + + Reflections reflections = new Reflections("extensions", new ResourcesScanner()); + Set resourcePaths = reflections.getResources(Pattern.compile(".*\\.py$")); + + for (String resourcePath : resourcePaths) { + try (InputStream is = ExtTools.class.getClassLoader().getResourceAsStream(resourcePath); + InputStreamReader isr = new InputStreamReader(is, Charset.forName("UTF-8")); + BufferedReader br = new BufferedReader(isr);) { + String version = null; + ArrayList sections = null; + String defsFile = null; + String line; + + Pattern pattern = Pattern.compile("^([^#]\\S+)\\s*=\\s*(\\S.*)$"); + while ((line = br.readLine()) != null) { + line = line.replace("'", "\""); + Matcher matcher = pattern.matcher(line); + if (matcher.find()) { + if (matcher.group(1).equals("VERSION")) { + version = matcher.group(2); + if (version.startsWith("'") || version.startsWith("\"")) { + version = version.substring(1, version.length() - 1); + } + } else if (matcher.group(1).equals("DEFS_FILE")) { + String fn = matcher.group(2); + if (fn.startsWith("'") || fn.startsWith("\"")) { + fn = fn.substring(1, fn.length() - 1); + } + defsFile = resourcePath.replaceFirst("\\w*.py$", fn); + } else if (matcher.group(1).equals("SECTIONS")) { + sections = new ArrayList<>(); + Pattern secpat = Pattern.compile("\"([^\"]+)\""); + Matcher secmat = secpat.matcher(matcher.group(2)); + while (secmat.find()) { + sections.add(secmat.group(1)); + } + } + } + } + + if (version != null && defsFile != null) { + LinkedHashMap ext = new LinkedHashMap<>(); + ext.put("defs_file", defsFile); + if (sections != null) { + ext.put("sections", sections); + } + extensions.put(version, ext); + } + } catch (Exception e) { + log.error("ExtTools - loadExtensions - {}", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue( + "JE281", "Failed to load extensions" + e.getMessage())); + } + } + return extensions; + } + + public ArrayList getVersions() { + return new ArrayList(extensionInfo.keySet()); + } + + public LinkedHashMap> getSections() { + LinkedHashMap> sections = new LinkedHashMap<>(); + for (String version : extensionInfo.keySet()) { + LinkedHashMap eiv = (LinkedHashMap) extensionInfo.get(version); + sections.put(version, (ArrayList) eiv.get("sections")); } return sections; - } + } + + public String getDefsFile(String version) { + LinkedHashMap eiv = (LinkedHashMap) extensionInfo.get(version); + return (String) eiv.get("defs_file"); + } - public String getDefsFile(String version) { - LinkedHashMap eiv = (LinkedHashMap)EXTENSION_INFO.get(version); - return (String)eiv.get("defs_file"); - } - } /*python @@ -147,7 +139,7 @@ REQUIRED_ATTRIBUTES = ['VERSION', 'DEFS_FILE'] class ExtTools(object): def __init__(self): - self.EXTENSION_INFO = self._load_extensions() + self.extensionInfo = self._load_extensions() def _load_extensions(self): '''Dynamically load all the extensions .''' @@ -193,17 +185,17 @@ class ExtTools(object): return extensions def get_versions(self): - return self.EXTENSION_INFO.keys() + return self.extensionInfo.keys() def get_sections(self): sections = {} - for version in self.EXTENSION_INFO.keys(): - sections[version] = self.EXTENSION_INFO[version]['sections'] + for version in self.extensionInfo.keys(): + sections[version] = self.extensionInfo[version]['sections'] return sections def get_defs_file(self, version): - versiondata = self.EXTENSION_INFO.get(version) + versiondata = self.extensionInfo.get(version) if versiondata: return versiondata.get('defs_file') diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java index d47fd57..4ebeba9 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -44,24 +44,24 @@ public class Concat extends Function { // ':' , // get_attribute: [ server, port ] ] - - public Concat(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl,context,name,args); - } - - @Override - public Object result() { - return this; - } - - @Override - void validate() { - if(args.size() < 1) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE145", - "ValueError: Invalid arguments for function \"concat\". " + - "Expected at least one argument")); - } - } + + public Concat(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); + } + + @Override + public Object result() { + return this; + } + + @Override + void validate() { + if (args.size() < 1) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE145", + "ValueError: Invalid arguments for function \"concat\". " + + "Expected at least one argument")); + } + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java index 2b4759f..711a7ca 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,95 +20,99 @@ package org.onap.sdc.toscaparser.api.functions; -import java.util.*; import org.onap.sdc.toscaparser.api.TopologyTemplate; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + public abstract class Function { - protected static final String GET_PROPERTY = "get_property"; - protected static final String GET_ATTRIBUTE = "get_attribute"; - protected static final String GET_INPUT = "get_input"; - protected static final String GET_OPERATION_OUTPUT = "get_operation_output"; - protected static final String CONCAT = "concat"; - protected static final String TOKEN = "token"; - - protected static final String SELF = "SELF"; - protected static final String HOST = "HOST"; - protected static final String TARGET = "TARGET"; - protected static final String SOURCE = "SOURCE"; - - protected static final String HOSTED_ON = "tosca.relationships.HostedOn"; - - protected static HashMap functionMappings = _getFunctionMappings(); - - private static HashMap _getFunctionMappings() { - HashMap map = new HashMap<>(); - map.put(GET_PROPERTY,"GetProperty"); - map.put(GET_INPUT, "GetInput"); - map.put(GET_ATTRIBUTE, "GetAttribute"); - map.put(GET_OPERATION_OUTPUT, "GetOperationOutput"); - map.put(CONCAT, "Concat"); - map.put(TOKEN, "Token"); - return map; - } - - protected TopologyTemplate toscaTpl; - protected Object context; - protected String name; - protected ArrayList args; - - - public Function(TopologyTemplate _toscaTpl,Object _context,String _name,ArrayList _args) { + protected static final String GET_PROPERTY = "get_property"; + protected static final String GET_ATTRIBUTE = "get_attribute"; + protected static final String GET_INPUT = "get_input"; + protected static final String GET_OPERATION_OUTPUT = "get_operation_output"; + protected static final String CONCAT = "concat"; + protected static final String TOKEN = "token"; + + protected static final String SELF = "SELF"; + protected static final String HOST = "HOST"; + protected static final String TARGET = "TARGET"; + protected static final String SOURCE = "SOURCE"; + + protected static final String HOSTED_ON = "tosca.relationships.HostedOn"; + + protected static HashMap functionMappings = _getFunctionMappings(); + + private static HashMap _getFunctionMappings() { + HashMap map = new HashMap<>(); + map.put(GET_PROPERTY, "GetProperty"); + map.put(GET_INPUT, "GetInput"); + map.put(GET_ATTRIBUTE, "GetAttribute"); + map.put(GET_OPERATION_OUTPUT, "GetOperationOutput"); + map.put(CONCAT, "Concat"); + map.put(TOKEN, "Token"); + return map; + } + + protected TopologyTemplate toscaTpl; + protected Object context; + protected String name; + protected ArrayList args; + + + public Function(TopologyTemplate _toscaTpl, Object _context, String _name, ArrayList _args) { toscaTpl = _toscaTpl; context = _context; name = _name; args = _args; validate(); - - } - - abstract Object result(); - - abstract void validate(); - - @SuppressWarnings("unchecked") - public static boolean isFunction(Object funcObj) { - // Returns True if the provided function is a Tosca intrinsic function. - // - //Examples: - // - //* "{ get_property: { SELF, port } }" - //* "{ get_input: db_name }" - //* Function instance - - //:param function: Function as string or a Function instance. - //:return: True if function is a Tosca intrinsic function, otherwise False. - // - - if(funcObj instanceof LinkedHashMap) { - LinkedHashMap function = (LinkedHashMap)funcObj; - if(function.size() == 1) { - String funcName = (new ArrayList(function.keySet())).get(0); - return functionMappings.keySet().contains(funcName); - } - } - return (funcObj instanceof Function); - } - - @SuppressWarnings("unchecked") - public static Object getFunction(TopologyTemplate ttpl,Object context,Object rawFunctionObj, boolean resolveGetInput) { - // Gets a Function instance representing the provided template function. - - // If the format provided raw_function format is not relevant for template - // functions or if the function name doesn't exist in function mapping the - // method returns the provided raw_function. - // - // :param tosca_tpl: The tosca template. - // :param node_template: The node template the function is specified for. - // :param raw_function: The raw function as dict. - // :return: Template function as Function instance or the raw_function if - // parsing was unsuccessful. + + } + + abstract Object result(); + + abstract void validate(); + + @SuppressWarnings("unchecked") + public static boolean isFunction(Object funcObj) { + // Returns True if the provided function is a Tosca intrinsic function. + // + //Examples: + // + //* "{ get_property: { SELF, port } }" + //* "{ get_input: db_name }" + //* Function instance + + //:param function: Function as string or a Function instance. + //:return: True if function is a Tosca intrinsic function, otherwise False. + // + + if (funcObj instanceof LinkedHashMap) { + LinkedHashMap function = (LinkedHashMap) funcObj; + if (function.size() == 1) { + String funcName = (new ArrayList(function.keySet())).get(0); + return functionMappings.keySet().contains(funcName); + } + } + return (funcObj instanceof Function); + } + + @SuppressWarnings("unchecked") + public static Object getFunction(TopologyTemplate ttpl, Object context, Object rawFunctionObj, boolean resolveGetInput) { + // Gets a Function instance representing the provided template function. + + // If the format provided raw_function format is not relevant for template + // functions or if the function name doesn't exist in function mapping the + // method returns the provided raw_function. + // + // :param tosca_tpl: The tosca template. + // :param node_template: The node template the function is specified for. + // :param raw_function: The raw function as dict. + // :return: Template function as Function instance or the raw_function if + // parsing was unsuccessful. // iterate over leaves of the properties's tree and convert function leaves to function object, @@ -116,85 +120,85 @@ public abstract class Function { // assuming that leaf value of function is always map type contains 1 item (e.g. my_leaf: {get_input: xxx}). if (rawFunctionObj instanceof LinkedHashMap) { // In map type case - LinkedHashMap rawFunction = ((LinkedHashMap) rawFunctionObj); - if(rawFunction.size() == 1 && - !(rawFunction.values().iterator().next() instanceof LinkedHashMap)) { // End point - return getFunctionForObjectItem(ttpl, context, rawFunction, resolveGetInput); - } else { - return getFunctionForMap(ttpl, context, rawFunction, resolveGetInput); - } - } else if (rawFunctionObj instanceof ArrayList) { // In list type case - return getFunctionForList(ttpl, context, (ArrayList) rawFunctionObj, resolveGetInput); - } - - return rawFunctionObj; - } - - private static Object getFunctionForList(TopologyTemplate ttpl, Object context, ArrayList rawFunctionObj, boolean resolveGetInput) { - // iterate over list properties in recursion, convert leaves to function, - // and collect them in the same hierarchy as the original list. - ArrayList rawFunctionObjList = new ArrayList<>(); - for (Object rawFunctionObjItem: rawFunctionObj) { + LinkedHashMap rawFunction = ((LinkedHashMap) rawFunctionObj); + if (rawFunction.size() == 1 && + !(rawFunction.values().iterator().next() instanceof LinkedHashMap)) { // End point + return getFunctionForObjectItem(ttpl, context, rawFunction, resolveGetInput); + } else { + return getFunctionForMap(ttpl, context, rawFunction, resolveGetInput); + } + } else if (rawFunctionObj instanceof ArrayList) { // In list type case + return getFunctionForList(ttpl, context, (ArrayList) rawFunctionObj, resolveGetInput); + } + + return rawFunctionObj; + } + + private static Object getFunctionForList(TopologyTemplate ttpl, Object context, ArrayList rawFunctionObj, boolean resolveGetInput) { + // iterate over list properties in recursion, convert leaves to function, + // and collect them in the same hierarchy as the original list. + ArrayList rawFunctionObjList = new ArrayList<>(); + for (Object rawFunctionObjItem : rawFunctionObj) { rawFunctionObjList.add(getFunction(ttpl, context, rawFunctionObjItem, resolveGetInput)); } - return rawFunctionObjList; - } - - private static Object getFunctionForMap(TopologyTemplate ttpl, Object context, LinkedHashMap rawFunction, boolean resolveGetInput) { - // iterate over map nested properties in recursion, convert leaves to function, - // and collect them in the same hierarchy as the original map. - LinkedHashMap rawFunctionObjMap = new LinkedHashMap(); - for (Object rawFunctionObjItem: rawFunction.entrySet()) { - Object itemValue = getFunction(ttpl, context, ((Map.Entry)rawFunctionObjItem).getValue(), resolveGetInput); - rawFunctionObjMap.put(((Map.Entry)rawFunctionObjItem).getKey(), itemValue); + return rawFunctionObjList; + } + + private static Object getFunctionForMap(TopologyTemplate ttpl, Object context, LinkedHashMap rawFunction, boolean resolveGetInput) { + // iterate over map nested properties in recursion, convert leaves to function, + // and collect them in the same hierarchy as the original map. + LinkedHashMap rawFunctionObjMap = new LinkedHashMap(); + for (Object rawFunctionObjItem : rawFunction.entrySet()) { + Object itemValue = getFunction(ttpl, context, ((Map.Entry) rawFunctionObjItem).getValue(), resolveGetInput); + rawFunctionObjMap.put(((Map.Entry) rawFunctionObjItem).getKey(), itemValue); + } + return rawFunctionObjMap; + } + + private static Object getFunctionForObjectItem(TopologyTemplate ttpl, Object context, Object rawFunctionObjItem, boolean resolveGetInput) { + if (isFunction(rawFunctionObjItem)) { + LinkedHashMap rawFunction = (LinkedHashMap) rawFunctionObjItem; + String funcName = (new ArrayList(rawFunction.keySet())).get(0); + if (functionMappings.keySet().contains(funcName)) { + String funcType = functionMappings.get(funcName); + Object oargs = (new ArrayList(rawFunction.values())).get(0); + ArrayList funcArgs; + if (oargs instanceof ArrayList) { + funcArgs = (ArrayList) oargs; + } else { + funcArgs = new ArrayList<>(); + funcArgs.add(oargs); + } + + switch (funcType) { + case "GetInput": + if (resolveGetInput) { + GetInput input = new GetInput(ttpl, context, funcName, funcArgs); + return input.result(); + } + return new GetInput(ttpl, context, funcName, funcArgs); + case "GetAttribute": + return new GetAttribute(ttpl, context, funcName, funcArgs); + case "GetProperty": + return new GetProperty(ttpl, context, funcName, funcArgs); + case "GetOperationOutput": + return new GetOperationOutput(ttpl, context, funcName, funcArgs); + case "Concat": + return new Concat(ttpl, context, funcName, funcArgs); + case "Token": + return new Token(ttpl, context, funcName, funcArgs); + } + } } - return rawFunctionObjMap; - } - - private static Object getFunctionForObjectItem(TopologyTemplate ttpl, Object context, Object rawFunctionObjItem, boolean resolveGetInput) { - if(isFunction(rawFunctionObjItem)) { - LinkedHashMap rawFunction = (LinkedHashMap) rawFunctionObjItem; - String funcName = (new ArrayList(rawFunction.keySet())).get(0); - if (functionMappings.keySet().contains(funcName)) { - String funcType = functionMappings.get(funcName); - Object oargs = (new ArrayList(rawFunction.values())).get(0); - ArrayList funcArgs; - if (oargs instanceof ArrayList) { - funcArgs = (ArrayList) oargs; - } else { - funcArgs = new ArrayList<>(); - funcArgs.add(oargs); - } - - switch (funcType) { - case "GetInput": - if (resolveGetInput) { - GetInput input = new GetInput(ttpl, context, funcName, funcArgs); - return input.result(); - } - return new GetInput(ttpl, context, funcName, funcArgs); - case "GetAttribute": - return new GetAttribute(ttpl, context, funcName, funcArgs); - case "GetProperty": - return new GetProperty(ttpl, context, funcName, funcArgs); - case "GetOperationOutput": - return new GetOperationOutput(ttpl, context, funcName, funcArgs); - case "Concat": - return new Concat(ttpl, context, funcName, funcArgs); - case "Token": - return new Token(ttpl, context, funcName, funcArgs); - } - } - } - - return rawFunctionObjItem; - } - - @Override - public String toString() { - String argsStr = args.size() > 1 ? args.toString() : args.get(0).toString(); - return name + ":" + argsStr; - } + + return rawFunctionObjItem; + } + + @Override + public String toString() { + String argsStr = args.size() > 1 ? args.toString() : args.get(0).toString(); + return name + ":" + argsStr; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java index aa85eb2..564d410 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,272 +39,272 @@ import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; import org.onap.sdc.toscaparser.api.elements.constraints.Schema; public class GetAttribute extends Function { - // Get an attribute value of an entity defined in the service template - - // Node template attributes values are set in runtime and therefore its the - // responsibility of the Tosca engine to implement the evaluation of - // get_attribute functions. - - // Arguments: - - // * Node template name | HOST. - // * Attribute name. - - // If the HOST keyword is passed as the node template name argument the - // function will search each node template along the HostedOn relationship - // chain until a node which contains the attribute is found. - - // Examples: - - // * { get_attribute: [ server, private_address ] } - // * { get_attribute: [ HOST, private_address ] } - // * { get_attribute: [ HOST, private_address, 0 ] } - // * { get_attribute: [ HOST, private_address, 0, some_prop] } - - public GetAttribute(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl, context, name, args); - } - - @Override - void validate() { - if (args.size() < 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", - "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); - return; - } else if (args.size() == 2) { - _findNodeTemplateContainingAttribute(); - } else { - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - if (nodeTpl == null) { - return; - } - int index = 2; - AttributeDef attr = nodeTpl.getTypeDefinition().getAttributeDefValue((String) args.get(1)); - if (attr != null) { - // found - } else { - index = 3; - // then check the req or caps - if (!(args.get(1) instanceof String) || !(args.get(2) instanceof String)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", "ValueError: Illegal arguments for function \"get_attribute\". Expected a String argument")); - } - - attr = _findReqOrCapAttribute(args.get(1).toString(), args.get(2).toString()); - if (attr == null) { - return; - } - } - - - String valueType = (String) attr.getSchema().get("type"); - if (args.size() > index) { - for (Object elem : args.subList(index, args.size())) { - if (valueType.equals("list")) { - if (!(elem instanceof Integer)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE147", String.format( - "ValueError: Illegal arguments for function \"get_attribute\" \"%s\". Expected positive integer argument", - elem.toString()))); - } - Object ob = attr.getSchema().get("entry_schema"); - valueType = (String) - ((LinkedHashMap) ob).get("type"); - } else if (valueType.equals("map")) { - Object ob = attr.getSchema().get("entry_schema"); - valueType = (String) - ((LinkedHashMap) ob).get("type"); - } else { - boolean bFound = false; - for (String p : Schema.PROPERTY_TYPES) { - if (p.equals(valueType)) { - bFound = true; - break; - } - } - if (bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE148", String.format( - "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%s\"", - elem))); - return; - } else { // It is a complex type - DataType dataType = new DataType(valueType, null); - LinkedHashMap props = - dataType.getAllProperties(); - PropertyDef prop = props.get((String) elem); - if (prop != null) { - valueType = (String) prop.getSchema().get("type"); - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE149", String.format( - "KeyError: Illegal arguments for function \"get_attribute\". Attribute name \"%s\" not found in \"%\"", - elem, valueType))); - } - } - } - } - } - } - } - - @Override - public Object result() { - return this; - } - - private NodeTemplate getReferencedNodeTemplate() { - // Gets the NodeTemplate instance the get_attribute function refers to - - // If HOST keyword was used as the node template argument, the node - // template which contains the attribute along the HostedOn relationship - // chain will be returned. - - return _findNodeTemplateContainingAttribute(); - - } - - // Attributes can be explicitly created as part of the type definition - // or a property name can be implicitly used as an attribute name - private NodeTemplate _findNodeTemplateContainingAttribute() { - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - if (nodeTpl != null && - !_attributeExistsInType(nodeTpl.getTypeDefinition()) && - !nodeTpl.getProperties().keySet().contains(getAttributeName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE150", String.format( - "KeyError: Attribute \"%s\" was not found in node template \"%s\"", - getAttributeName(), nodeTpl.getName()))); - } - return nodeTpl; - } - - private boolean _attributeExistsInType(StatefulEntityType typeDefinition) { - LinkedHashMap attrsDef = typeDefinition.getAttributesDef(); - return attrsDef.get(getAttributeName()) != null; - } - - private NodeTemplate _findHostContainingAttribute(String nodeTemplateName) { - NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); - if (nodeTemplate != null) { - LinkedHashMap hostedOnRel = - (LinkedHashMap) EntityType.TOSCA_DEF.get(HOSTED_ON); - for (RequirementAssignment r : nodeTemplate.getRequirements().getAll()) { - String targetName = r.getNodeTemplateName(); - NodeTemplate targetNode = _findNodeTemplate(targetName); - NodeType targetType = (NodeType) targetNode.getTypeDefinition(); - for (CapabilityTypeDef capability : targetType.getCapabilitiesObjects()) { + // Get an attribute value of an entity defined in the service template + + // Node template attributes values are set in runtime and therefore its the + // responsibility of the Tosca engine to implement the evaluation of + // get_attribute functions. + + // Arguments: + + // * Node template name | HOST. + // * Attribute name. + + // If the HOST keyword is passed as the node template name argument the + // function will search each node template along the HostedOn relationship + // chain until a node which contains the attribute is found. + + // Examples: + + // * { get_attribute: [ server, private_address ] } + // * { get_attribute: [ HOST, private_address ] } + // * { get_attribute: [ HOST, private_address, 0 ] } + // * { get_attribute: [ HOST, private_address, 0, some_prop] } + + public GetAttribute(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); + } + + @Override + void validate() { + if (args.size() < 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", + "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); + return; + } else if (args.size() == 2) { + _findNodeTemplateContainingAttribute(); + } else { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl == null) { + return; + } + int index = 2; + AttributeDef attr = nodeTpl.getTypeDefinition().getAttributeDefValue((String) args.get(1)); + if (attr != null) { + // found + } else { + index = 3; + // then check the req or caps + if (!(args.get(1) instanceof String) || !(args.get(2) instanceof String)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", "ValueError: Illegal arguments for function \"get_attribute\". Expected a String argument")); + } + + attr = _findReqOrCapAttribute(args.get(1).toString(), args.get(2).toString()); + if (attr == null) { + return; + } + } + + + String valueType = (String) attr.getSchema().get("type"); + if (args.size() > index) { + for (Object elem : args.subList(index, args.size())) { + if (valueType.equals("list")) { + if (!(elem instanceof Integer)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE147", String.format( + "ValueError: Illegal arguments for function \"get_attribute\" \"%s\". Expected positive integer argument", + elem.toString()))); + } + Object ob = attr.getSchema().get("entry_schema"); + valueType = (String) + ((LinkedHashMap) ob).get("type"); + } else if (valueType.equals("map")) { + Object ob = attr.getSchema().get("entry_schema"); + valueType = (String) + ((LinkedHashMap) ob).get("type"); + } else { + boolean bFound = false; + for (String p : Schema.PROPERTY_TYPES) { + if (p.equals(valueType)) { + bFound = true; + break; + } + } + if (bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE148", String.format( + "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%s\"", + elem))); + return; + } else { // It is a complex type + DataType dataType = new DataType(valueType, null); + LinkedHashMap props = + dataType.getAllProperties(); + PropertyDef prop = props.get((String) elem); + if (prop != null) { + valueType = (String) prop.getSchema().get("type"); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE149", String.format( + "KeyError: Illegal arguments for function \"get_attribute\". Attribute name \"%s\" not found in \"%\"", + elem, valueType))); + } + } + } + } + } + } + } + + @Override + public Object result() { + return this; + } + + private NodeTemplate getReferencedNodeTemplate() { + // Gets the NodeTemplate instance the get_attribute function refers to + + // If HOST keyword was used as the node template argument, the node + // template which contains the attribute along the HostedOn relationship + // chain will be returned. + + return _findNodeTemplateContainingAttribute(); + + } + + // Attributes can be explicitly created as part of the type definition + // or a property name can be implicitly used as an attribute name + private NodeTemplate _findNodeTemplateContainingAttribute() { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl != null && + !_attributeExistsInType(nodeTpl.getTypeDefinition()) && + !nodeTpl.getProperties().keySet().contains(getAttributeName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE150", String.format( + "KeyError: Attribute \"%s\" was not found in node template \"%s\"", + getAttributeName(), nodeTpl.getName()))); + } + return nodeTpl; + } + + private boolean _attributeExistsInType(StatefulEntityType typeDefinition) { + LinkedHashMap attrsDef = typeDefinition.getAttributesDef(); + return attrsDef.get(getAttributeName()) != null; + } + + private NodeTemplate _findHostContainingAttribute(String nodeTemplateName) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); + if (nodeTemplate != null) { + LinkedHashMap hostedOnRel = + (LinkedHashMap) EntityType.TOSCA_DEF.get(HOSTED_ON); + for (RequirementAssignment r : nodeTemplate.getRequirements().getAll()) { + String targetName = r.getNodeTemplateName(); + NodeTemplate targetNode = _findNodeTemplate(targetName); + NodeType targetType = (NodeType) targetNode.getTypeDefinition(); + for (CapabilityTypeDef capability : targetType.getCapabilitiesObjects()) { // if(((ArrayList)hostedOnRel.get("valid_target_types")).contains(capability.getType())) { - if (capability.inheritsFrom((ArrayList) hostedOnRel.get("valid_target_types"))) { - if (_attributeExistsInType(targetType)) { - return targetNode; - } - return _findHostContainingAttribute(targetName); - } - } - } - } - return null; - } - - - private NodeTemplate _findNodeTemplate(String nodeTemplateName) { - if (nodeTemplateName.equals(HOST)) { - // Currently this is the only way to tell whether the function - // is used within the outputs section of the TOSCA template. - if (context instanceof ArrayList) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE151", - "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template")); - return null; - } - NodeTemplate nodeTpl = _findHostContainingAttribute(SELF); - if (nodeTpl == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE152", String.format( - "ValueError: \"get_attribute: [ HOST, ... ]\" was used in " + - "node template \"%s\" but \"%s\" was not found in " + - "the relationship chain", ((NodeTemplate) context).getName(), HOSTED_ON))); - return null; - } - return nodeTpl; - } - if (nodeTemplateName.equals(TARGET)) { - if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE153", - "KeyError: \"TARGET\" keyword can only be used in context " + - " to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate) context).getTarget(); - } - if (nodeTemplateName.equals(SOURCE)) { - if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE154", - "KeyError: \"SOURCE\" keyword can only be used in context " + - " to \"Relationships\" source node")); - return null; - } - return ((RelationshipTemplate) context).getTarget(); - } - String name; - if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { - name = ((NodeTemplate) context).getName(); - } else { - name = nodeTemplateName; - } - for (NodeTemplate nt : toscaTpl.getNodeTemplates()) { - if (nt.getName().equals(name)) { - return nt; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE155", String.format( - "KeyError: Node template \"%s\" was not found", nodeTemplateName))); - return null; - } - - public AttributeDef _findReqOrCapAttribute(String reqOrCap, String attrName) { - - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - // Find attribute in node template's requirements - for (RequirementAssignment r : nodeTpl.getRequirements().getAll()) { - String nodeName = r.getNodeTemplateName(); - if (r.getName().equals(reqOrCap)) { - NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); - return _getCapabilityAttribute(nodeTemplate, r.getName(), attrName); - } - } - // If requirement was not found, look in node template's capabilities - return _getCapabilityAttribute(nodeTpl, reqOrCap, attrName); - } - - private AttributeDef _getCapabilityAttribute(NodeTemplate nodeTemplate, - String capabilityName, - String attrName) { - // Gets a node template capability attribute - CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); - - if (cap != null) { - AttributeDef attribute = null; - LinkedHashMap attrs = - cap.getDefinition().getAttributesDef(); - if (attrs != null && attrs.keySet().contains(attrName)) { - attribute = attrs.get(attrName); - } - if (attribute == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE156", String.format( - "KeyError: Attribute \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - attrName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName()))); - } - return attribute; - } - String msg = String.format( - "Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", - capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName()); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE157", "KeyError: " + msg)); - return null; - } - - String getNodeTemplateName() { - return (String) args.get(0); - } - - String getAttributeName() { - return (String) args.get(1); - } + if (capability.inheritsFrom((ArrayList) hostedOnRel.get("valid_target_types"))) { + if (_attributeExistsInType(targetType)) { + return targetNode; + } + return _findHostContainingAttribute(targetName); + } + } + } + } + return null; + } + + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if (nodeTemplateName.equals(HOST)) { + // Currently this is the only way to tell whether the function + // is used within the outputs section of the TOSCA template. + if (context instanceof ArrayList) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE151", + "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template")); + return null; + } + NodeTemplate nodeTpl = _findHostContainingAttribute(SELF); + if (nodeTpl == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE152", String.format( + "ValueError: \"get_attribute: [ HOST, ... ]\" was used in " + + "node template \"%s\" but \"%s\" was not found in " + + "the relationship chain", ((NodeTemplate) context).getName(), HOSTED_ON))); + return null; + } + return nodeTpl; + } + if (nodeTemplateName.equals(TARGET)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE153", + "KeyError: \"TARGET\" keyword can only be used in context " + + " to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + if (nodeTemplateName.equals(SOURCE)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE154", + "KeyError: \"SOURCE\" keyword can only be used in context " + + " to \"Relationships\" source node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + String name; + if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { + name = ((NodeTemplate) context).getName(); + } else { + name = nodeTemplateName; + } + for (NodeTemplate nt : toscaTpl.getNodeTemplates()) { + if (nt.getName().equals(name)) { + return nt; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE155", String.format( + "KeyError: Node template \"%s\" was not found", nodeTemplateName))); + return null; + } + + public AttributeDef _findReqOrCapAttribute(String reqOrCap, String attrName) { + + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + // Find attribute in node template's requirements + for (RequirementAssignment r : nodeTpl.getRequirements().getAll()) { + String nodeName = r.getNodeTemplateName(); + if (r.getName().equals(reqOrCap)) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); + return _getCapabilityAttribute(nodeTemplate, r.getName(), attrName); + } + } + // If requirement was not found, look in node template's capabilities + return _getCapabilityAttribute(nodeTpl, reqOrCap, attrName); + } + + private AttributeDef _getCapabilityAttribute(NodeTemplate nodeTemplate, + String capabilityName, + String attrName) { + // Gets a node template capability attribute + CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); + + if (cap != null) { + AttributeDef attribute = null; + LinkedHashMap attrs = + cap.getDefinition().getAttributesDef(); + if (attrs != null && attrs.keySet().contains(attrName)) { + attribute = attrs.get(attrName); + } + if (attribute == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE156", String.format( + "KeyError: Attribute \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + attrName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName()))); + } + return attribute; + } + String msg = String.format( + "Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", + capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName()); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE157", "KeyError: " + msg)); + return null; + } + + String getNodeTemplateName() { + return (String) args.get(0); + } + + String getAttributeName() { + return (String) args.get(1); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java index 026113e..ee5be17 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java @@ -5,9 +5,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -30,19 +30,19 @@ import java.util.LinkedHashMap; public class GetInput extends Function { - public static final String INDEX = "INDEX"; - public static final String INPUTS = "inputs"; - public static final String TYPE = "type"; - public static final String PROPERTIES = "properties"; - public static final String ENTRY_SCHEMA = "entry_schema"; + public static final String INDEX = "INDEX"; + public static final String INPUTS = "inputs"; + public static final String TYPE = "type"; + public static final String PROPERTIES = "properties"; + public static final String ENTRY_SCHEMA = "entry_schema"; - public GetInput(TopologyTemplate toscaTpl, Object context, String name, ArrayList _args) { - super(toscaTpl,context,name,_args); - - } + public GetInput(TopologyTemplate toscaTpl, Object context, String name, ArrayList _args) { + super(toscaTpl, context, name, _args); - @Override - void validate() { + } + + @Override + void validate() { // if(args.size() != 1) { // //PA - changed to WARNING from CRITICAL after talking to Renana, 22/05/2017 @@ -50,116 +50,114 @@ public class GetInput extends Function { // "ValueError: Expected one argument for function \"get_input\" but received \"%s\"", // args.toString())); // } - boolean bFound = false; - for(Input inp: toscaTpl.getInputs()) { - if(inp.getName().equals(args.get(0))) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE158", String.format( - "UnknownInputError: Unknown input \"%s\"",args.get(0)))); - } - else if(args.size() > 2){ - LinkedHashMap inputs = (LinkedHashMap)toscaTpl.getTpl().get(INPUTS); - LinkedHashMap data = (LinkedHashMap)inputs.get(getInputName()); - String type ; - - for(int argumentNumber=1;argumentNumber schema = (LinkedHashMap)data.get(ENTRY_SCHEMA); - dataTypeName=(String)schema.get(TYPE); - }else{ - dataTypeName=type; - } - //check property name - LinkedHashMap dataType = (LinkedHashMap)toscaTpl.getCustomDefs().get(dataTypeName); - if(dataType != null) { - LinkedHashMap props = (LinkedHashMap) dataType.get(PROPERTIES); - data = (LinkedHashMap)props.get(args.get(argumentNumber).toString()); - if(data != null) { - bFound = true; - } - } - } - if(!bFound){ - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE282", String.format( - "UnknownDataType: Unknown data type \"%s\"",args.get(argumentNumber)))); - } - } - } - } - - public Object result() { - if(toscaTpl.getParsedParams() != null && - toscaTpl.getParsedParams().get(getInputName()) != null) { - LinkedHashMap ttinp = (LinkedHashMap)toscaTpl.getTpl().get(INPUTS); - LinkedHashMap ttinpinp = (LinkedHashMap)ttinp.get(getInputName()); - String type = (String)ttinpinp.get("type"); - - Object value = DataEntity.validateDatatype( - type, toscaTpl.getParsedParams().get(getInputName()),null,toscaTpl.getCustomDefs(),null); - //SDC resolving Get Input - if (value instanceof ArrayList){ - if(args.size() == 2 && args.get(1) instanceof Integer && ((ArrayList) value).size()> (Integer)args.get(1)){ - return ((ArrayList) value).get((Integer) args.get(1)); - } + boolean bFound = false; + for (Input inp : toscaTpl.getInputs()) { + if (inp.getName().equals(args.get(0))) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE158", String.format( + "UnknownInputError: Unknown input \"%s\"", args.get(0)))); + } else if (args.size() > 2) { + LinkedHashMap inputs = (LinkedHashMap) toscaTpl.getTpl().get(INPUTS); + LinkedHashMap data = (LinkedHashMap) inputs.get(getInputName()); + String type; + + for (int argumentNumber = 1; argumentNumber < args.size(); argumentNumber++) { + String dataTypeName = ""; + bFound = false; + if (INDEX.equals(args.get(argumentNumber).toString()) || (args.get(argumentNumber) instanceof Integer)) { + bFound = true; + } else { + type = (String) data.get(TYPE); + //get type name + if (type.equals("list") || type.equals("map")) { + LinkedHashMap schema = (LinkedHashMap) data.get(ENTRY_SCHEMA); + dataTypeName = (String) schema.get(TYPE); + } else { + dataTypeName = type; + } + //check property name + LinkedHashMap dataType = (LinkedHashMap) toscaTpl.getCustomDefs().get(dataTypeName); + if (dataType != null) { + LinkedHashMap props = (LinkedHashMap) dataType.get(PROPERTIES); + data = (LinkedHashMap) props.get(args.get(argumentNumber).toString()); + if (data != null) { + bFound = true; + } + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE282", String.format( + "UnknownDataType: Unknown data type \"%s\"", args.get(argumentNumber)))); + } + } + } + } + + public Object result() { + if (toscaTpl.getParsedParams() != null && + toscaTpl.getParsedParams().get(getInputName()) != null) { + LinkedHashMap ttinp = (LinkedHashMap) toscaTpl.getTpl().get(INPUTS); + LinkedHashMap ttinpinp = (LinkedHashMap) ttinp.get(getInputName()); + String type = (String) ttinpinp.get("type"); + + Object value = DataEntity.validateDatatype( + type, toscaTpl.getParsedParams().get(getInputName()), null, toscaTpl.getCustomDefs(), null); + //SDC resolving Get Input + if (value instanceof ArrayList) { + if (args.size() == 2 && args.get(1) instanceof Integer && ((ArrayList) value).size() > (Integer) args.get(1)) { + return ((ArrayList) value).get((Integer) args.get(1)); + } /* commented out for network cloud (SDNC) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE273",String.format( "GetInputError: cannot resolve input name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0)))); return null; */ - } - return value; - } - - Input inputDef = null; - for(Input inpDef: toscaTpl.getInputs()) { - if(getInputName().equals(inpDef.getName())) { - inputDef = inpDef; - break; - } - } - if(inputDef != null) { - if (args.size() == 2 && inputDef.getDefault() != null && inputDef.getDefault() instanceof ArrayList){ - if ( args.get(1) instanceof Integer - && ((ArrayList) inputDef.getDefault()).size()> ((Integer)args.get(1)).intValue()) { - return ((ArrayList) inputDef.getDefault()).get(((Integer)args.get(1)).intValue()); - } + } + return value; + } + + Input inputDef = null; + for (Input inpDef : toscaTpl.getInputs()) { + if (getInputName().equals(inpDef.getName())) { + inputDef = inpDef; + break; + } + } + if (inputDef != null) { + if (args.size() == 2 && inputDef.getDefault() != null && inputDef.getDefault() instanceof ArrayList) { + if (args.get(1) instanceof Integer + && ((ArrayList) inputDef.getDefault()).size() > ((Integer) args.get(1)).intValue()) { + return ((ArrayList) inputDef.getDefault()).get(((Integer) args.get(1)).intValue()); + } /* commented out for network cloud (SDNC) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE274",(String.format( "GetInputError: cannot resolve input Def name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0))))); return null; */ - } - return inputDef.getDefault(); - } - return null; - } - - public String getInputName() { - return (String)args.get(0); - } - - public LinkedHashMap getEntrySchema() { - LinkedHashMap inputs = (LinkedHashMap)toscaTpl.getTpl().get(INPUTS); - LinkedHashMap inputValue = (LinkedHashMap)inputs.get(getInputName()); - return (LinkedHashMap)inputValue.get(ENTRY_SCHEMA); - } - - public ArrayList getArguments(){ - return args; - } + } + return inputDef.getDefault(); + } + return null; + } + + public String getInputName() { + return (String) args.get(0); + } + + public LinkedHashMap getEntrySchema() { + LinkedHashMap inputs = (LinkedHashMap) toscaTpl.getTpl().get(INPUTS); + LinkedHashMap inputValue = (LinkedHashMap) inputs.get(getInputName()); + return (LinkedHashMap) inputValue.get(ENTRY_SCHEMA); + } + + public ArrayList getArguments() { + return args; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java index 2acc79a..06a28d6 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,132 +35,126 @@ import java.util.ArrayList; public class GetOperationOutput extends Function { - public GetOperationOutput(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl,context,name,args); - } - - @Override - public void validate() { - if(args.size() == 4) { - _findNodeTemplate((String)args.get(0)); - String interfaceName = _findInterfaceName((String)args.get(1)); - _findOperationName(interfaceName,(String)args.get(2)); - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE159", - "ValueError: Illegal arguments for function \"get_operation_output\". " + - "Expected arguments: \"template_name\",\"interface_name\"," + - "\"operation_name\",\"output_variable_name\"")); - } - } - - private String _findInterfaceName(String _interfaceName) { - boolean bFound = false; - for(String sect: InterfacesDef.SECTIONS) { - if(sect.equals(_interfaceName)) { - bFound = true; - break; - } - } - if(bFound) { - return _interfaceName; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE160", String.format( - "ValueError: invalid interface name \"%s\" in \"get_operation_output\"", - _interfaceName))); - return null; - } - } - - private String _findOperationName(String interfaceName,String operationName) { - - if(interfaceName.equals("Configure") || - interfaceName.equals("tosca.interfaces.node.relationship.Configure")) { - boolean bFound = false; - for(String sect: StatefulEntityType.interfacesRelationshipConfigureOperations) { - if(sect.equals(operationName)) { - bFound = true; - break; - } - } - if(bFound) { - return operationName; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE161", String.format( - "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", - operationName))); - return null; - } - } - if(interfaceName.equals("Standard") || - interfaceName.equals("tosca.interfaces.node.lifecycle.Standard")) { - boolean bFound = false; - for(String sect: StatefulEntityType.interfacesNodeLifecycleOperations) { - if(sect.equals(operationName)) { - bFound = true; - break; - } - } - if(bFound) { - return operationName; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE162", String.format( - "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", - operationName))); - return null; - } - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE163", String.format( - "ValueError: Invalid interface name \"%s\" in \"get_operation_output\"", - interfaceName))); - return null; - } - } - - private NodeTemplate _findNodeTemplate(String nodeTemplateName) { - if(nodeTemplateName.equals(TARGET)) { - if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE164", - "KeyError: \"TARGET\" keyword can only be used in context " + - " to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate)context).getTarget(); - } - if(nodeTemplateName.equals(SOURCE)) { - if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE165", - "KeyError: \"SOURCE\" keyword can only be used in context " + - " to \"Relationships\" source node")); - return null; - } - return ((RelationshipTemplate)context).getTarget(); - } - String name; - if(nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { - name = ((NodeTemplate)context).getName(); - } - else { - name = nodeTemplateName; - } - for(NodeTemplate nt: toscaTpl.getNodeTemplates()) { - if(nodeTemplateName.equals(name)) { - return nt; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE166", String.format( - "KeyError: Node template \"%s\" was not found",nodeTemplateName))); - return null; + public GetOperationOutput(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); } - @Override - public Object result() { - return this; - } + @Override + public void validate() { + if (args.size() == 4) { + _findNodeTemplate((String) args.get(0)); + String interfaceName = _findInterfaceName((String) args.get(1)); + _findOperationName(interfaceName, (String) args.get(2)); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE159", + "ValueError: Illegal arguments for function \"get_operation_output\". " + + "Expected arguments: \"template_name\",\"interface_name\"," + + "\"operation_name\",\"output_variable_name\"")); + } + } + + private String _findInterfaceName(String _interfaceName) { + boolean bFound = false; + for (String sect : InterfacesDef.SECTIONS) { + if (sect.equals(_interfaceName)) { + bFound = true; + break; + } + } + if (bFound) { + return _interfaceName; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE160", String.format( + "ValueError: invalid interface name \"%s\" in \"get_operation_output\"", + _interfaceName))); + return null; + } + } + + private String _findOperationName(String interfaceName, String operationName) { + + if (interfaceName.equals("Configure") || + interfaceName.equals("tosca.interfaces.node.relationship.Configure")) { + boolean bFound = false; + for (String sect : StatefulEntityType.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS) { + if (sect.equals(operationName)) { + bFound = true; + break; + } + } + if (bFound) { + return operationName; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE161", String.format( + "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", + operationName))); + return null; + } + } + if (interfaceName.equals("Standard") || + interfaceName.equals("tosca.interfaces.node.lifecycle.Standard")) { + boolean bFound = false; + for (String sect : StatefulEntityType.INTERFACE_NODE_LIFECYCLE_OPERATIONS) { + if (sect.equals(operationName)) { + bFound = true; + break; + } + } + if (bFound) { + return operationName; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE162", String.format( + "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", + operationName))); + return null; + } + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE163", String.format( + "ValueError: Invalid interface name \"%s\" in \"get_operation_output\"", + interfaceName))); + return null; + } + } + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if (nodeTemplateName.equals(TARGET)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE164", + "KeyError: \"TARGET\" keyword can only be used in context " + + " to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + if (nodeTemplateName.equals(SOURCE)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE165", + "KeyError: \"SOURCE\" keyword can only be used in context " + + " to \"Relationships\" source node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + String name; + if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { + name = ((NodeTemplate) context).getName(); + } else { + name = nodeTemplateName; + } + for (NodeTemplate nt : toscaTpl.getNodeTemplates()) { + if (nodeTemplateName.equals(name)) { + return nt; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE166", String.format( + "KeyError: Node template \"%s\" was not found", nodeTemplateName))); + return null; + } + + @Override + public Object result() { + return this; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java index 2da57ef..90e0a8e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,14 +20,14 @@ package org.onap.sdc.toscaparser.api.functions; -import org.onap.sdc.toscaparser.api.*; -import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; +import org.onap.sdc.toscaparser.api.CapabilityAssignment; +import org.onap.sdc.toscaparser.api.NodeTemplate; +import org.onap.sdc.toscaparser.api.Property; +import org.onap.sdc.toscaparser.api.RelationshipTemplate; +import org.onap.sdc.toscaparser.api.RequirementAssignment; +import org.onap.sdc.toscaparser.api.TopologyTemplate; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -import org.onap.sdc.toscaparser.api.*; +import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; import org.onap.sdc.toscaparser.api.elements.EntityType; import org.onap.sdc.toscaparser.api.elements.NodeType; import org.onap.sdc.toscaparser.api.elements.PropertyDef; @@ -35,340 +35,331 @@ import org.onap.sdc.toscaparser.api.elements.RelationshipType; import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.ArrayList; +import java.util.LinkedHashMap; + public class GetProperty extends Function { - // Get a property value of an entity defined in the same service template - - // Arguments: - - // * Node template name | SELF | HOST | SOURCE | TARGET. - // * Requirement or capability name (optional). - // * Property name. - - // If requirement or capability name is specified, the behavior is as follows: - // The req or cap name is first looked up in the specified node template's - // requirements. - // If found, it would search for a matching capability - // of an other node template and get its property as specified in function - // arguments. - // Otherwise, the req or cap name would be looked up in the specified - // node template's capabilities and if found, it would return the property of - // the capability as specified in function arguments. - - // Examples: - - // * { get_property: [ mysql_server, port ] } - // * { get_property: [ SELF, db_port ] } - // * { get_property: [ SELF, database_endpoint, port ] } - // * { get_property: [ SELF, database_endpoint, port, 1 ] } - - - public GetProperty(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl,context,name,args); - } - - @Override - void validate() { - if(args.size() < 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE167", - "ValueError: Illegal arguments for function \"get_property\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); - return; - } - if(args.size() == 2) { - Property foundProp = _findProperty((String)args.get(1)); - if(foundProp == null) { - return; - } - Object prop = foundProp.getValue(); - if(prop instanceof Function) { - getFunction(toscaTpl,context, prop, toscaTpl.getResolveGetInput()); - } - } - else if(args.size() >= 3) { - // do not use _find_property to avoid raise KeyError - // if the prop is not found - // First check if there is property with this name - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - LinkedHashMap props; - if(nodeTpl != null) { - props = nodeTpl.getProperties(); - } - else { - props = new LinkedHashMap<>(); - } - int index = 2; - Object propertyValue; - if(props.get(args.get(1)) != null) { - propertyValue = ((Property)props.get(args.get(1))).getValue(); - } - else { - index = 3; - // then check the req or caps - propertyValue = _findReqOrCapProperty((String)args.get(1),(String)args.get(2)); - } - - if(args.size() > index) { - for(Object elem: args.subList(index,args.size()-1)) { - if(propertyValue instanceof ArrayList) { - int intElem = (int)elem; - propertyValue = _getIndexValue(propertyValue,intElem); - } - else { - propertyValue = _getAttributeValue(propertyValue,(String)elem); - } - } - } - } - } - - @SuppressWarnings("unchecked") - private Object _findReqOrCapProperty(String reqOrCap,String propertyName) { - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - if(nodeTpl == null) { - return null; + // Get a property value of an entity defined in the same service template + + // Arguments: + + // * Node template name | SELF | HOST | SOURCE | TARGET. + // * Requirement or capability name (optional). + // * Property name. + + // If requirement or capability name is specified, the behavior is as follows: + // The req or cap name is first looked up in the specified node template's + // requirements. + // If found, it would search for a matching capability + // of an other node template and get its property as specified in function + // arguments. + // Otherwise, the req or cap name would be looked up in the specified + // node template's capabilities and if found, it would return the property of + // the capability as specified in function arguments. + + // Examples: + + // * { get_property: [ mysql_server, port ] } + // * { get_property: [ SELF, db_port ] } + // * { get_property: [ SELF, database_endpoint, port ] } + // * { get_property: [ SELF, database_endpoint, port, 1 ] } + + + public GetProperty(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); + } + + @Override + void validate() { + if (args.size() < 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE167", + "ValueError: Illegal arguments for function \"get_property\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); + return; + } + if (args.size() == 2) { + Property foundProp = _findProperty((String) args.get(1)); + if (foundProp == null) { + return; + } + Object prop = foundProp.getValue(); + if (prop instanceof Function) { + getFunction(toscaTpl, context, prop, toscaTpl.getResolveGetInput()); + } + } else if (args.size() >= 3) { + // do not use _find_property to avoid raise KeyError + // if the prop is not found + // First check if there is property with this name + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + LinkedHashMap props; + if (nodeTpl != null) { + props = nodeTpl.getProperties(); + } else { + props = new LinkedHashMap<>(); + } + int index = 2; + Object propertyValue; + if (props.get(args.get(1)) != null) { + propertyValue = ((Property) props.get(args.get(1))).getValue(); + } else { + index = 3; + // then check the req or caps + propertyValue = _findReqOrCapProperty((String) args.get(1), (String) args.get(2)); + } + + if (args.size() > index) { + for (Object elem : args.subList(index, args.size() - 1)) { + if (propertyValue instanceof ArrayList) { + int intElem = (int) elem; + propertyValue = _getIndexValue(propertyValue, intElem); + } else { + propertyValue = _getAttributeValue(propertyValue, (String) elem); + } + } + } + } + } + + @SuppressWarnings("unchecked") + private Object _findReqOrCapProperty(String reqOrCap, String propertyName) { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl == null) { + return null; + } + // look for property in node template's requirements + for (RequirementAssignment req : nodeTpl.getRequirements().getAll()) { + String nodeName = req.getNodeTemplateName(); + if (req.getName().equals(reqOrCap)) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); + return _getCapabilityProperty(nodeTemplate, req.getName(), propertyName, true); + } + } + // If requirement was not found, look in node template's capabilities + return _getCapabilityProperty(nodeTpl, reqOrCap, propertyName, true); + } + + private Object _getCapabilityProperty(NodeTemplate nodeTemplate, + String capabilityName, + String propertyName, + boolean throwErrors) { + + // Gets a node template capability property + Object property = null; + CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); + if (cap != null) { + LinkedHashMap props = cap.getProperties(); + if (props != null && props.get(propertyName) != null) { + property = ((Property) props.get(propertyName)).getValue(); + } + if (property == null && throwErrors) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE168", String.format( + "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + propertyName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName()))); + } + return property; + } + if (throwErrors) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE169", String.format( + "KeyError: Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", + capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName()))); + } + + return null; + } + + private Property _findProperty(String propertyName) { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl == null) { + return null; + } + LinkedHashMap props = nodeTpl.getProperties(); + Property found = props.get(propertyName); + if (found == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE170", String.format( + "KeyError: Property \"%s\" was not found in node template \"%s\"", + propertyName, nodeTpl.getName()))); + } + return found; + } + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if (nodeTemplateName.equals(SELF)) { + return (NodeTemplate) context; + } + // enable the HOST value in the function + if (nodeTemplateName.equals(HOST)) { + NodeTemplate node = _findHostContainingProperty(null); + if (node == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE171", String.format( + "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + (String) args.get(2), (String) args.get(1), ((NodeTemplate) context).getName()))); + return null; + } + return node; + } + if (nodeTemplateName.equals(TARGET)) { + if (!(((RelationshipTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE172", + "KeyError: \"TARGET\" keyword can only be used in context to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + if (nodeTemplateName.equals(SOURCE)) { + if (!(((RelationshipTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE173", + "KeyError: \"SOURCE\" keyword can only be used in context to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate) context).getSource(); + } + if (toscaTpl.getNodeTemplates() == null) { + return null; } - // look for property in node template's requirements - for(RequirementAssignment req: nodeTpl.getRequirements().getAll()) { - String nodeName = req.getNodeTemplateName(); - if(req.getName().equals(reqOrCap)) { - NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); - return _getCapabilityProperty(nodeTemplate,req.getName(),propertyName,true); - } - } - // If requirement was not found, look in node template's capabilities - return _getCapabilityProperty(nodeTpl,reqOrCap,propertyName,true); - } - - private Object _getCapabilityProperty(NodeTemplate nodeTemplate, - String capabilityName, - String propertyName, - boolean throwErrors) { - - // Gets a node template capability property - Object property = null; - CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); - if(cap != null) { - LinkedHashMap props = cap.getProperties(); - if(props != null && props.get(propertyName) != null) { - property = ((Property)props.get(propertyName)).getValue(); - } - if(property == null && throwErrors) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE168", String.format( - "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - propertyName,capabilityName,nodeTemplate.getName(),((NodeTemplate)context).getName()))); - } - return property; - } - if(throwErrors) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE169", String.format( - "KeyError: Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", - capabilityName,((NodeTemplate)context).getName(),nodeTemplate.getName()))); - } - - return null; - } - - private Property _findProperty(String propertyName) { - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - if(nodeTpl == null) { - return null; + for (NodeTemplate nodeTemplate : toscaTpl.getNodeTemplates()) { + if (nodeTemplate.getName().equals(nodeTemplateName)) { + return nodeTemplate; + } } - LinkedHashMap props = nodeTpl.getProperties(); - Property found = props.get(propertyName); - if(found == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE170", String.format( - "KeyError: Property \"%s\" was not found in node template \"%s\"", - propertyName,nodeTpl.getName()))); - } - return found; - } - - private NodeTemplate _findNodeTemplate(String nodeTemplateName) { - if(nodeTemplateName.equals(SELF)) { - return (NodeTemplate)context; - } - // enable the HOST value in the function - if(nodeTemplateName.equals(HOST)) { - NodeTemplate node = _findHostContainingProperty(null); - if(node == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE171", String.format( - "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - (String)args.get(2),(String)args.get(1),((NodeTemplate)context).getName()))); - return null; - } - return node; - } - if(nodeTemplateName.equals(TARGET)) { - if(!(((RelationshipTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE172", - "KeyError: \"TARGET\" keyword can only be used in context to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate)context).getTarget(); - } - if(nodeTemplateName.equals(SOURCE)) { - if(!(((RelationshipTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE173", - "KeyError: \"SOURCE\" keyword can only be used in context to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate)context).getSource(); - } - if(toscaTpl.getNodeTemplates() == null) { - return null; - } - for(NodeTemplate nodeTemplate: toscaTpl.getNodeTemplates()) { - if(nodeTemplate.getName().equals(nodeTemplateName)) { - return nodeTemplate; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE174", String.format( - "KeyError: Node template \"%s\" was not found. Referenced from Node Template \"%s\"", - nodeTemplateName,((NodeTemplate)context).getName()))); - - return null; - } - - @SuppressWarnings("rawtypes") - private Object _getIndexValue(Object value,int index) { - if(value instanceof ArrayList) { - if(index < ((ArrayList)value).size()) { - return ((ArrayList)value).get(index); - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE175", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an element with index %d", - args.get(2),args.get(1),((NodeTemplate)context).getName(),index))); - - } - } - else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE174", String.format( + "KeyError: Node template \"%s\" was not found. Referenced from Node Template \"%s\"", + nodeTemplateName, ((NodeTemplate) context).getName()))); + + return null; + } + + @SuppressWarnings("rawtypes") + private Object _getIndexValue(Object value, int index) { + if (value instanceof ArrayList) { + if (index < ((ArrayList) value).size()) { + return ((ArrayList) value).get(index); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE175", String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an element with index %d", + args.get(2), args.get(1), ((NodeTemplate) context).getName(), index))); + + } + } else { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE176", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a list", - args.get(2),args.get(1),((NodeTemplate)context).getName()))); - } - return null; - } - - @SuppressWarnings("unchecked") - private Object _getAttributeValue(Object value,String attribute) { - if(value instanceof LinkedHashMap) { - Object ov = ((LinkedHashMap)value).get(attribute); - if(ov != null) { - return ov; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE177", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an attribute named \"%s\"", - args.get(2),args.get(1),((NodeTemplate)context).getName(),attribute))); - } - } - else { + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a list", + args.get(2), args.get(1), ((NodeTemplate) context).getName()))); + } + return null; + } + + @SuppressWarnings("unchecked") + private Object _getAttributeValue(Object value, String attribute) { + if (value instanceof LinkedHashMap) { + Object ov = ((LinkedHashMap) value).get(attribute); + if (ov != null) { + return ov; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE177", String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an attribute named \"%s\"", + args.get(2), args.get(1), ((NodeTemplate) context).getName(), attribute))); + } + } else { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE178", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a dict", - args.get(2),args.get(1),((NodeTemplate)context).getName()))); - } - return null; - } - - // Add this functions similar to get_attribute case - private NodeTemplate _findHostContainingProperty(String nodeTemplateName) { - if(nodeTemplateName == null) { - nodeTemplateName = SELF; - } - NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); - LinkedHashMap hostedOnRel = (LinkedHashMap) - EntityType.TOSCA_DEF.get(HOSTED_ON); - for(RequirementAssignment requirement: nodeTemplate.getRequirements().getAll()) { - String targetName = requirement.getNodeTemplateName(); - NodeTemplate targetNode = _findNodeTemplate(targetName); - NodeType targetType = (NodeType)targetNode.getTypeDefinition(); - for(CapabilityTypeDef capDef: targetType.getCapabilitiesObjects()) { - if(capDef.inheritsFrom((ArrayList)hostedOnRel.get("valid_target_types"))) { - if(_propertyExistsInType(targetType)) { - return targetNode; - } - // If requirement was not found, look in node - // template's capabilities - if(args.size() > 2 && - _getCapabilityProperty(targetNode,(String)args.get(1),(String)args.get(2),false) != null) { - return targetNode; - } - - return _findHostContainingProperty(targetName); - } - } - - } - return null; - } - - private boolean _propertyExistsInType(StatefulEntityType typeDefinition) { - LinkedHashMap propsDef = typeDefinition.getPropertiesDef(); - return propsDef.keySet().contains((String)args.get(1)); - } - - @Override - public Object result() { + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a dict", + args.get(2), args.get(1), ((NodeTemplate) context).getName()))); + } + return null; + } + + // Add this functions similar to get_attribute case + private NodeTemplate _findHostContainingProperty(String nodeTemplateName) { + if (nodeTemplateName == null) { + nodeTemplateName = SELF; + } + NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); + LinkedHashMap hostedOnRel = (LinkedHashMap) + EntityType.TOSCA_DEF.get(HOSTED_ON); + for (RequirementAssignment requirement : nodeTemplate.getRequirements().getAll()) { + String targetName = requirement.getNodeTemplateName(); + NodeTemplate targetNode = _findNodeTemplate(targetName); + NodeType targetType = (NodeType) targetNode.getTypeDefinition(); + for (CapabilityTypeDef capDef : targetType.getCapabilitiesObjects()) { + if (capDef.inheritsFrom((ArrayList) hostedOnRel.get("valid_target_types"))) { + if (_propertyExistsInType(targetType)) { + return targetNode; + } + // If requirement was not found, look in node + // template's capabilities + if (args.size() > 2 && + _getCapabilityProperty(targetNode, (String) args.get(1), (String) args.get(2), false) != null) { + return targetNode; + } + + return _findHostContainingProperty(targetName); + } + } + + } + return null; + } + + private boolean _propertyExistsInType(StatefulEntityType typeDefinition) { + LinkedHashMap propsDef = typeDefinition.getPropertiesDef(); + return propsDef.keySet().contains((String) args.get(1)); + } + + @Override + public Object result() { Object propertyValue; - if(args.size() >= 3) { - // First check if there is property with this name - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - LinkedHashMap props; - if(nodeTpl != null) { - props = nodeTpl.getProperties(); - } - else { - props = new LinkedHashMap<>(); - } - int index = 2; - if(props.get(args.get(1)) != null) { - propertyValue = ((Property)props.get(args.get(1))).getValue(); - } - else { - index = 3; - // then check the req or caps - propertyValue = _findReqOrCapProperty((String)args.get(1),(String)args.get(2)); - } - - if(args.size() > index) { - for(Object elem: args.subList(index,args.size()-1)) { - if(propertyValue instanceof ArrayList) { - int intElem = (int)elem; - propertyValue = _getIndexValue(propertyValue,intElem); - } - else { - propertyValue = _getAttributeValue(propertyValue,(String)elem); - } - } - } - } - else { - propertyValue = _findProperty((String)args.get(1)).getValue(); - } - if(propertyValue instanceof Function) { - return ((Function)propertyValue).result(); + if (args.size() >= 3) { + // First check if there is property with this name + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + LinkedHashMap props; + if (nodeTpl != null) { + props = nodeTpl.getProperties(); + } else { + props = new LinkedHashMap<>(); + } + int index = 2; + if (props.get(args.get(1)) != null) { + propertyValue = ((Property) props.get(args.get(1))).getValue(); + } else { + index = 3; + // then check the req or caps + propertyValue = _findReqOrCapProperty((String) args.get(1), (String) args.get(2)); + } + + if (args.size() > index) { + for (Object elem : args.subList(index, args.size() - 1)) { + if (propertyValue instanceof ArrayList) { + int intElem = (int) elem; + propertyValue = _getIndexValue(propertyValue, intElem); + } else { + propertyValue = _getAttributeValue(propertyValue, (String) elem); + } + } + } + } else { + propertyValue = _findProperty((String) args.get(1)).getValue(); + } + if (propertyValue instanceof Function) { + return ((Function) propertyValue).result(); + } + return getFunction(toscaTpl, context, propertyValue, toscaTpl.getResolveGetInput()); + } + + public String getNodeTemplateName() { + return (String) args.get(0); + } + + public String getPropertyName() { + if (args.size() > 2) { + return (String) args.get(2); } - return getFunction(toscaTpl,context,propertyValue, toscaTpl.getResolveGetInput()); - } - - public String getNodeTemplateName() { - return (String)args.get(0); - } - - public String getPropertyName() { - if(args.size() > 2) { - return (String)args.get(2); - } - return (String)args.get(1); - } - - public String getReqorCap() { - if(args.size() > 2) { - return (String)args.get(1); - } - return null; - } - + return (String) args.get(1); + } + + public String getReqorCap() { + if (args.size() > 2) { + return (String) args.get(1); + } + return null; + } + } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java index e8e160e..240ce85 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -46,39 +46,38 @@ public class Token extends Function { //Example: - // [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ] + // [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ] - public Token(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl,context,name,args); - } - - @Override - public Object result() { - return this; - } + public Token(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); + } - @Override - void validate() { - if(args.size() < 3) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE180", - "ValueError: Invalid arguments for function \"token\". " + - "Expected at least three arguments")); - } - else { - if(!(args.get(1) instanceof String) || - ((String)args.get(1)).length() != 1) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE181", + @Override + public Object result() { + return this; + } + + @Override + void validate() { + if (args.size() < 3) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE180", "ValueError: Invalid arguments for function \"token\". " + - "Expected single char value as second argument")); + "Expected at least three arguments")); + } else { + if (!(args.get(1) instanceof String) || + ((String) args.get(1)).length() != 1) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE181", + "ValueError: Invalid arguments for function \"token\". " + + "Expected single char value as second argument")); } - if(!(args.get(2) instanceof Integer)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE182", - "ValueError: Invalid arguments for function \"token\"" + - "Expected integer value as third argument")); - } - } - } + if (!(args.get(2) instanceof Integer)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE182", + "ValueError: Invalid arguments for function \"token\"" + + "Expected integer value as third argument")); + } + } + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java index 397c637..a34ebb5 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,77 +20,79 @@ package org.onap.sdc.toscaparser.api.parameters; +import org.onap.sdc.toscaparser.api.Property; +import org.onap.sdc.toscaparser.api.elements.enums.ToscaElementNames; + import java.util.ArrayList; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; -import org.onap.sdc.toscaparser.api.Property; -import org.onap.sdc.toscaparser.api.elements.enums.ToscaElementNames; +public class Annotation { + + private static final String HEAT = "HEAT"; + private String name; + private String type; + private ArrayList properties; + + + public Annotation() { + } + + @SuppressWarnings("unchecked") + public Annotation(Map.Entry annotationEntry) { + if (annotationEntry != null) { + name = annotationEntry.getKey(); + Map annValue = (Map) annotationEntry.getValue(); + type = (String) annValue.get(ToscaElementNames.TYPE.getName()); + properties = fetchProperties((Map) annValue.get(ToscaElementNames.PROPERTIES.getName())); + } + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public ArrayList getProperties() { + return properties; + } + + public void setProperties(ArrayList properties) { + this.properties = properties; + } + + private ArrayList fetchProperties(Map properties) { + if (properties != null) { + return (ArrayList) properties.entrySet().stream() + .map(Property::new) + .collect(Collectors.toList()); + } + return null; + } + + public boolean isHeatSourceType() { + if (properties == null) { + return false; + } + Optional sourceType = properties.stream() + .filter(p -> p.getName().equals(ToscaElementNames.SOURCE_TYPE.getName())) + .findFirst(); + if (!sourceType.isPresent()) { + return false; + } + return sourceType.get().getValue() != null && ((String) sourceType.get().getValue()).equals(HEAT); + } -public class Annotation{ - - private final static String HEAT = "HEAT"; - - private String name; - private String type; - private ArrayList properties; - - public Annotation(){} - @SuppressWarnings("unchecked") - public Annotation(Map.Entry annotationEntry){ - if(annotationEntry != null){ - name = annotationEntry.getKey(); - Map annValue = (Map) annotationEntry.getValue(); - type = (String) annValue.get(ToscaElementNames.TYPE.getName()); - properties = fetchProperties((Map) annValue.get(ToscaElementNames.PROPERTIES.getName())); - } - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public ArrayList getProperties() { - return properties; - } - - public void setProperties(ArrayList properties) { - this.properties = properties; - } - - private ArrayList fetchProperties(Map properties) { - if(properties != null){ - return (ArrayList) properties.entrySet().stream() - .map(Property::new) - .collect(Collectors.toList()); - } - return null; - } - - public boolean isHeatSourceType(){ - if(properties == null){ - return false; - } - Optional sourceType = properties.stream() - .filter(p -> p.getName().equals(ToscaElementNames.SOURCE_TYPE.getName())) - .findFirst(); - if(!sourceType.isPresent()){ - return false; - } - return sourceType.get().getValue() != null && ((String)sourceType.get().getValue()).equals(HEAT); - } - } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java index 106fe94..5d3ecb4 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,12 +20,6 @@ package org.onap.sdc.toscaparser.api.parameters; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.stream.Collectors; - import org.onap.sdc.toscaparser.api.DataEntity; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.elements.EntityType; @@ -34,171 +28,172 @@ import org.onap.sdc.toscaparser.api.elements.constraints.Schema; import org.onap.sdc.toscaparser.api.elements.enums.ToscaElementNames; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.stream.Collectors; + public class Input { - - private static final String TYPE = "type"; - private static final String DESCRIPTION = "description"; - private static final String DEFAULT = "default"; - private static final String CONSTRAINTS = "constraints"; - private static final String REQUIRED = "required"; - private static final String STATUS = "status"; - private static final String ENTRY_SCHEMA = "entry_schema"; - - public static final String INTEGER = "integer"; - public static final String STRING = "string"; - public static final String BOOLEAN = "boolean"; - public static final String FLOAT = "float"; - public static final String LIST = "list"; - public static final String MAP = "map"; - public static final String JSON = "json"; - - private static String INPUTFIELD[] = { - TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED,STATUS, ENTRY_SCHEMA + + private static final String TYPE = "type"; + private static final String DESCRIPTION = "description"; + private static final String DEFAULT = "default"; + private static final String CONSTRAINTS = "constraints"; + private static final String REQUIRED = "required"; + private static final String STATUS = "status"; + private static final String ENTRY_SCHEMA = "entry_schema"; + + public static final String INTEGER = "integer"; + public static final String STRING = "string"; + public static final String BOOLEAN = "boolean"; + public static final String FLOAT = "float"; + public static final String LIST = "list"; + public static final String MAP = "map"; + public static final String JSON = "json"; + + private static String[] inputField = { + TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED, STATUS, ENTRY_SCHEMA }; - - private static String PRIMITIVE_TYPES[] = { - INTEGER, STRING, BOOLEAN, FLOAT, LIST, MAP, JSON + + private static String[] primitiveTypes = { + INTEGER, STRING, BOOLEAN, FLOAT, LIST, MAP, JSON }; - + private String name; private Schema schema; - private LinkedHashMap customDefs; - private Map annotations; - - public Input(){ - /** - * Added to support Input serialization - */ - } - - public Input(String _name,LinkedHashMap _schemaDict,LinkedHashMap _customDefs) { - name = _name; - schema = new Schema(_name,_schemaDict); - customDefs = _customDefs; - } - - @SuppressWarnings("unchecked") - public void parseAnnotations() { - if(schema.getSchema() != null){ - LinkedHashMap annotations = (LinkedHashMap) schema.getSchema().get(ToscaElementNames.ANNOTATIONS.getName()); - if(annotations != null){ - setAnnotations(annotations.entrySet().stream() - .map(Annotation::new) - .filter(Annotation::isHeatSourceType) - .collect(Collectors.toMap(a -> a.getName(), a -> a))); - } - } - } - - public String getName() { - return name; - } - - public String getType() { - return schema.getType(); - } - - public String getDescription() { - return schema.getDescription(); - } - - public boolean isRequired() { - return schema.isRequired(); - } - - public Object getDefault() { - return schema.getDefault(); - } - - public ArrayList getConstraints() { - return schema.getConstraints(); - } + private LinkedHashMap customDefs; + private Map annotations; + + public Input() { + } + + public Input(String name, LinkedHashMap schema, LinkedHashMap customDefinitions) { + this.name = name; + this.schema = new Schema(name, schema); + customDefs = customDefinitions; + } + + @SuppressWarnings("unchecked") + public void parseAnnotations() { + if (schema.getSchema() != null) { + LinkedHashMap annotations = (LinkedHashMap) schema.getSchema().get(ToscaElementNames.ANNOTATIONS.getName()); + if (annotations != null) { + setAnnotations(annotations.entrySet().stream() + .map(Annotation::new) + .filter(Annotation::isHeatSourceType) + .collect(Collectors.toMap(Annotation::getName, a -> a))); + } + } + } + + public String getName() { + return name; + } + + public String getType() { + return schema.getType(); + } + + public String getDescription() { + return schema.getDescription(); + } + + public boolean isRequired() { + return schema.isRequired(); + } + + public Object getDefault() { + return schema.getDefault(); + } + + public ArrayList getConstraints() { + return schema.getConstraints(); + } public void validate(Object value) { - _validateField(); - _validateType(getType()); - if(value != null) { - _validateValue(value); + validateField(); + validateType(getType()); + if (value != null) { + validateValue(value); } } - private void _validateField() { - for(String key: schema.getSchema().keySet()) { - boolean bFound = false; - for(String ifld: INPUTFIELD) { - if(key.equals(ifld)) { - bFound = true; - break; - } - } - if(!bFound) { + private void validateField() { + for (String key : schema.getSchema().keySet()) { + boolean bFound = false; + for (String ifld : inputField) { + if (key.equals(ifld)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE214", String.format( - "UnknownFieldError: Input \"%s\" contains unknown field \"%s\"", - name,key))); - } - } - } - - private void _validateType(String inputType) { - boolean bFound = false; - for(String pt: Schema.PROPERTY_TYPES) { - if(pt.equals(inputType)) { - bFound = true; - break; - } - } - - if(!bFound) { - if(customDefs.get(inputType) != null) { - bFound = true; - } - } - - if(!bFound) { + "UnknownFieldError: Input \"%s\" contains unknown field \"%s\"", + name, key))); + } + } + } + + private void validateType(String inputType) { + boolean bFound = false; + for (String pt : Schema.PROPERTY_TYPES) { + if (pt.equals(inputType)) { + bFound = true; + break; + } + } + + if (!bFound) { + if (customDefs.get(inputType) != null) { + bFound = true; + } + } + + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE215", String.format( - "ValueError: Invalid type \"%s\"",inputType))); - } + "ValueError: Invalid type \"%s\"", inputType))); + } } - + @SuppressWarnings("unchecked") - private void _validateValue(Object value) { - Object datatype = null; - if(EntityType.TOSCA_DEF.get(getType()) != null) { - datatype = EntityType.TOSCA_DEF.get(getType()); - } - else if(EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()) != null) { - datatype = EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()); - } - - String type = getType(); - // if it's one of the basic types DON'T look in customDefs - if(Arrays.asList(PRIMITIVE_TYPES).contains(type)) { - DataEntity.validateDatatype(getType(), value, null, customDefs, null); - return; - } - else if(customDefs.get(getType()) != null) { - datatype = customDefs.get(getType()); - DataEntity.validateDatatype(getType(), value, (LinkedHashMap)datatype, customDefs, null); - return; - } - - DataEntity.validateDatatype(getType(), value, null, customDefs, null); - } - - public Map getAnnotations() { - return annotations; - } - - private void setAnnotations(Map annotations) { - this.annotations = annotations; - } - - public void resetAnnotaions(){ - annotations = null; - } - - public LinkedHashMap getEntrySchema() { - return schema.getEntrySchema(); - } + private void validateValue(Object value) { + Object datatype; + if (EntityType.TOSCA_DEF.get(getType()) != null) { + datatype = EntityType.TOSCA_DEF.get(getType()); + } else if (EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()) != null) { + datatype = EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()); + } + + String type = getType(); + // if it's one of the basic types DON'T look in customDefs + if (Arrays.asList(primitiveTypes).contains(type)) { + DataEntity.validateDatatype(getType(), value, null, customDefs, null); + return; + } else if (customDefs.get(getType()) != null) { + datatype = customDefs.get(getType()); + DataEntity.validateDatatype(getType(), value, (LinkedHashMap) datatype, customDefs, null); + return; + } + + DataEntity.validateDatatype(getType(), value, null, customDefs, null); + } + + public Map getAnnotations() { + return annotations; + } + + private void setAnnotations(Map annotations) { + this.annotations = annotations; + } + + public void resetAnnotaions() { + annotations = null; + } + + public LinkedHashMap getEntrySchema() { + return schema.getEntrySchema(); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java index df122f0..8ef82b3 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,100 +21,99 @@ package org.onap.sdc.toscaparser.api.parameters; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.LinkedHashMap; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class Output { - - private static final String DESCRIPTION = "description"; - public static final String VALUE = "value"; - private static final String OUTPUTFIELD[] = {DESCRIPTION, VALUE}; - - private String name; - private LinkedHashMap attrs;//TYPE??? - - public Output(String oname,LinkedHashMap oattrs) { - name = oname; - attrs = oattrs; - } - - public String getDescription() { - return (String)attrs.get(DESCRIPTION); - } - - public Object getValue() { - return attrs.get(VALUE); - } - - public void validate() { - _validateField(); - } - - private void _validateField() { - if(!(attrs instanceof LinkedHashMap)) { - //TODO wrong error message... + + private static final String DESCRIPTION = "description"; + public static final String VALUE = "value"; + private static final String[] OUTPUT_FIELD = {DESCRIPTION, VALUE}; + + private String name; + private LinkedHashMap attributes; + + public Output(String name, LinkedHashMap attributes) { + this.name = name; + this.attributes = attributes; + } + + public String getDescription() { + return (String) attributes.get(DESCRIPTION); + } + + public Object getValue() { + return attributes.get(VALUE); + } + + public void validate() { + validateField(); + } + + private void validateField() { + if (attributes == null) { + //TODO wrong error message... ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE216", String.format( "ValidationError: Output \"%s\" has wrong type. Expecting a dict", - name))); - } - - if(getValue() == null) { + name))); + } + + if (getValue() == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE217", String.format( "MissingRequiredFieldError: Output \"%s\" is missing required \"%s\"", - name,VALUE))); - } - for(String key: attrs.keySet()) { - boolean bFound = false; - for(String of: OUTPUTFIELD) { - if(key.equals(of)) { - bFound = true; - break; - } - } - if(!bFound) { + name, VALUE))); + } + for (String key : attributes.keySet()) { + boolean bFound = false; + for (String of : OUTPUT_FIELD) { + if (key.equals(of)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE218", String.format( - "UnknownFieldError: Output \"%s\" contains unknown field \"%s\"", - name,key))); + "UnknownFieldError: Output \"%s\" contains unknown field \"%s\"", + name, key))); } } - } - - // getter/setter - - public String getName() { - return name; - } - - public void setAttr(String name,Object value) { - attrs.put(name, value); - } + } + + // getter/setter + + public String getName() { + return name; + } + + public void setAttr(String name, Object value) { + attributes.put(name, value); + } } /*python class Output(object): - OUTPUTFIELD = (DESCRIPTION, VALUE) = ('description', 'value') + OUTPUT_FIELD = (DESCRIPTION, VALUE) = ('description', 'value') - def __init__(self, name, attrs): + def __init__(self, name, attributes): self.name = name - self.attrs = attrs + self.attributes = attributes @property def description(self): - return self.attrs.get(self.DESCRIPTION) + return self.attributes.get(self.DESCRIPTION) @property def value(self): - return self.attrs.get(self.VALUE) + return self.attributes.get(self.VALUE) def validate(self): self._validate_field() def _validate_field(self): - if not isinstance(self.attrs, dict): + if not isinstance(self.attributes, dict): ValidationIssueCollector.appendException( MissingRequiredFieldError(what='Output "%s"' % self.name, required=self.VALUE)) @@ -122,8 +121,8 @@ class Output(object): ValidationIssueCollector.appendException( MissingRequiredFieldError(what='Output "%s"' % self.name, required=self.VALUE)) - for name in self.attrs: - if name not in self.OUTPUTFIELD: + for name in self.attributes: + if name not in self.OUTPUT_FIELD: ValidationIssueCollector.appendException( UnknownFieldError(what='Output "%s"' % self.name, field=name)) diff --git a/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java b/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java index 92d5194..4ada267 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -50,204 +50,199 @@ import org.yaml.snakeyaml.Yaml; public class CSAR { - private static Logger log = LoggerFactory.getLogger(CSAR.class.getName()); - private static final ArrayList META_PROPERTIES_FILES = new ArrayList<>(Arrays.asList("TOSCA-Metadata/TOSCA.meta", "csar.meta")); + private static Logger log = LoggerFactory.getLogger(CSAR.class.getName()); + private static final ArrayList META_PROPERTIES_FILES = new ArrayList<>(Arrays.asList("TOSCA-Metadata/TOSCA.meta", "csar.meta")); - private String path; + private String path; private boolean isFile; private boolean isValidated; private boolean errorCaught; private String csar; private String tempDir; -// private Metadata metaData; + // private Metadata metaData; private File tempFile; - private LinkedHashMap> metaProperties; + private LinkedHashMap> metaProperties; - public CSAR(String csarPath, boolean aFile) { - path = csarPath; - isFile = aFile; + public CSAR(String csarPath, boolean aFile) { + path = csarPath; + isFile = aFile; isValidated = false; errorCaught = false; csar = null; tempDir = null; tempFile = null; - metaProperties = new LinkedHashMap<>(); - } + metaProperties = new LinkedHashMap<>(); + } + + public boolean validate() throws JToscaException { + isValidated = true; - public boolean validate() throws JToscaException { - isValidated = true; - //validate that the file or URL exists - - if(isFile) { - File f = new File(path); - if (!f.isFile()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE220", String.format("\"%s\" is not a file", path))); - return false; - } - else { - this.csar = path; - } - } - else { - if(!UrlUtils.validateUrl(path)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE221", String.format("ImportError: \"%s\" does not exist",path))); - return false; - } - // get it to a local file - try { - File tempFile = File.createTempFile("csartmp",".csar"); - Path ptf = Paths.get(tempFile.getPath()); - URL webfile = new URL(path); - InputStream in = webfile.openStream(); - Files.copy(in,ptf,StandardCopyOption.REPLACE_EXISTING); - } - catch(Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE222", "ImportError: failed to load CSAR from " + path)); - return false; - } - - log.debug("CSAR - validate - currently only files are supported"); - return false; - } - - _parseAndValidateMetaProperties(); - - if(errorCaught) { - return false; - } - + + if (isFile) { + File f = new File(path); + if (!f.isFile()) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE220", String.format("\"%s\" is not a file", path))); + return false; + } else { + this.csar = path; + } + } else { + if (!UrlUtils.validateUrl(path)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE221", String.format("ImportError: \"%s\" does not exist", path))); + return false; + } + // get it to a local file + try { + File tempFile = File.createTempFile("csartmp", ".csar"); + Path ptf = Paths.get(tempFile.getPath()); + URL webfile = new URL(path); + InputStream in = webfile.openStream(); + Files.copy(in, ptf, StandardCopyOption.REPLACE_EXISTING); + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE222", "ImportError: failed to load CSAR from " + path)); + return false; + } + + log.debug("CSAR - validate - currently only files are supported"); + return false; + } + + _parseAndValidateMetaProperties(); + + if (errorCaught) { + return false; + } + // validate that external references in the main template actually exist and are accessible _validateExternalReferences(); - + return !errorCaught; - } - - private void _parseAndValidateMetaProperties() throws JToscaException { - - ZipFile zf = null; - - try { - - // validate that it is a valid zip file - RandomAccessFile raf = new RandomAccessFile(csar, "r"); - long n = raf.readInt(); - raf.close(); - // check if Zip's magic number - if (n != 0x504B0304) { - String errorString = String.format("\"%s\" is not a valid zip file", csar); - log.error(errorString); - throw new JToscaException(errorString , JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue()); - } - - // validate that it contains the metadata file in the correct location - zf = new ZipFile(csar); - ZipEntry ze = zf.getEntry("TOSCA-Metadata/TOSCA.meta"); - if (ze == null) { - - String errorString = String.format( - "\"%s\" is not a valid CSAR as it does not contain the " + - "required file \"TOSCA.meta\" in the folder \"TOSCA-Metadata\"", csar); - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.MISSING_META_FILE.getValue()); - } - - //Going over expected metadata files and parsing them - for (String metaFile: META_PROPERTIES_FILES) { - - byte ba[] = new byte[4096]; - ze = zf.getEntry(metaFile); - if (ze != null) { - InputStream inputStream = zf.getInputStream(ze); - n = inputStream.read(ba, 0, 4096); - String md = new String(ba); - md = md.substring(0, (int) n); - - String errorString = String.format( - "The file \"%s\" in the" + - " CSAR \"%s\" does not contain valid YAML content", ze.getName(), csar); - - try { - Yaml yaml = new Yaml(); - Object mdo = yaml.load(md); - if (!(mdo instanceof LinkedHashMap)) { - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); - } - - String[] split = ze.getName().split("/"); - String fileName = split[split.length - 1]; - - if (!metaProperties.containsKey(fileName)) { - metaProperties.put(fileName, (LinkedHashMap) mdo); - } - } - catch(Exception e) { - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); - } - } - } - - // verify it has "Entry-Definition" - String edf = _getMetadata("Entry-Definitions"); - if (edf == null) { - String errorString = String.format( - "The CSAR \"%s\" is missing the required metadata " + - "\"Entry-Definitions\" in \"TOSCA-Metadata/TOSCA.meta\"", csar); - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.ENTRY_DEFINITION_NOT_DEFINED.getValue()); - } - - //validate that "Entry-Definitions' metadata value points to an existing file in the CSAR - boolean foundEDF = false; - Enumeration entries = zf.entries(); - while (entries.hasMoreElements()) { - ze = entries.nextElement(); - if (ze.getName().equals(edf)) { - foundEDF = true; - break; - } - } - if (!foundEDF) { - String errorString = String.format( - "The \"Entry-Definitions\" file defined in the CSAR \"%s\" does not exist", csar); - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.MISSING_ENTRY_DEFINITION_FILE.getValue()); - } - } catch (JToscaException e) { - //ThreadLocalsHolder.getCollector().appendCriticalException(e.getMessage()); - throw e; - } catch (Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE223", "ValidationError: " + e.getMessage())); - errorCaught = true; - } - - try { - if (zf != null) { - zf.close(); - } - } catch (IOException e) { - } - } - - public void cleanup() { - try { - if(tempFile != null) { - tempFile.delete(); - } - } - catch(Exception e) { - } - } - + } + + private void _parseAndValidateMetaProperties() throws JToscaException { + + ZipFile zf = null; + + try { + + // validate that it is a valid zip file + RandomAccessFile raf = new RandomAccessFile(csar, "r"); + long n = raf.readInt(); + raf.close(); + // check if Zip's magic number + if (n != 0x504B0304) { + String errorString = String.format("\"%s\" is not a valid zip file", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue()); + } + + // validate that it contains the metadata file in the correct location + zf = new ZipFile(csar); + ZipEntry ze = zf.getEntry("TOSCA-Metadata/TOSCA.meta"); + if (ze == null) { + + String errorString = String.format( + "\"%s\" is not a valid CSAR as it does not contain the " + + "required file \"TOSCA.meta\" in the folder \"TOSCA-Metadata\"", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.MISSING_META_FILE.getValue()); + } + + //Going over expected metadata files and parsing them + for (String metaFile : META_PROPERTIES_FILES) { + + byte ba[] = new byte[4096]; + ze = zf.getEntry(metaFile); + if (ze != null) { + InputStream inputStream = zf.getInputStream(ze); + n = inputStream.read(ba, 0, 4096); + String md = new String(ba); + md = md.substring(0, (int) n); + + String errorString = String.format( + "The file \"%s\" in the" + + " CSAR \"%s\" does not contain valid YAML content", ze.getName(), csar); + + try { + Yaml yaml = new Yaml(); + Object mdo = yaml.load(md); + if (!(mdo instanceof LinkedHashMap)) { + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); + } + + String[] split = ze.getName().split("/"); + String fileName = split[split.length - 1]; + + if (!metaProperties.containsKey(fileName)) { + metaProperties.put(fileName, (LinkedHashMap) mdo); + } + } catch (Exception e) { + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); + } + } + } + + // verify it has "Entry-Definition" + String edf = _getMetadata("Entry-Definitions"); + if (edf == null) { + String errorString = String.format( + "The CSAR \"%s\" is missing the required metadata " + + "\"Entry-Definitions\" in \"TOSCA-Metadata/TOSCA.meta\"", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.ENTRY_DEFINITION_NOT_DEFINED.getValue()); + } + + //validate that "Entry-Definitions' metadata value points to an existing file in the CSAR + boolean foundEDF = false; + Enumeration entries = zf.entries(); + while (entries.hasMoreElements()) { + ze = entries.nextElement(); + if (ze.getName().equals(edf)) { + foundEDF = true; + break; + } + } + if (!foundEDF) { + String errorString = String.format( + "The \"Entry-Definitions\" file defined in the CSAR \"%s\" does not exist", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.MISSING_ENTRY_DEFINITION_FILE.getValue()); + } + } catch (JToscaException e) { + //ThreadLocalsHolder.getCollector().appendCriticalException(e.getMessage()); + throw e; + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE223", "ValidationError: " + e.getMessage())); + errorCaught = true; + } + + try { + if (zf != null) { + zf.close(); + } + } catch (IOException e) { + } + } + + public void cleanup() { + try { + if (tempFile != null) { + tempFile.delete(); + } + } catch (Exception e) { + } + } + private String _getMetadata(String key) throws JToscaException { - if(!isValidated) { - validate(); - } - Object value = _getMetaProperty("TOSCA.meta").get(key); - return value != null ? value.toString() : null; + if (!isValidated) { + validate(); + } + Object value = _getMetaProperty("TOSCA.meta").get(key); + return value != null ? value.toString() : null; } public String getAuthor() throws JToscaException { @@ -258,276 +253,266 @@ public class CSAR { return _getMetadata("CSAR-Version"); } - public LinkedHashMap> getMetaProperties() { - return metaProperties; - } - - private LinkedHashMap _getMetaProperty(String propertiesFile) { - return metaProperties.get(propertiesFile); - } - - public String getMainTemplate() throws JToscaException { - String entryDef = _getMetadata("Entry-Definitions"); - ZipFile zf; - boolean ok = false; - try { - zf = new ZipFile(path); - ok = (zf.getEntry(entryDef) != null); - zf.close(); - } - catch(IOException e) { - if(!ok) { - log.error("CSAR - getMainTemplate - failed to open {}", path); - } - } - if(ok) { - return entryDef; - } - else { - return null; - } + public LinkedHashMap> getMetaProperties() { + return metaProperties; + } + + private LinkedHashMap _getMetaProperty(String propertiesFile) { + return metaProperties.get(propertiesFile); + } + + public String getMainTemplate() throws JToscaException { + String entryDef = _getMetadata("Entry-Definitions"); + ZipFile zf; + boolean ok = false; + try { + zf = new ZipFile(path); + ok = (zf.getEntry(entryDef) != null); + zf.close(); + } catch (IOException e) { + if (!ok) { + log.error("CSAR - getMainTemplate - failed to open {}", path); + } + } + if (ok) { + return entryDef; + } else { + return null; + } } - @SuppressWarnings("unchecked") - public LinkedHashMap getMainTemplateYaml() throws JToscaException { - String mainTemplate = tempDir + File.separator + getMainTemplate(); - if(mainTemplate != null) { - try (InputStream input = new FileInputStream(new File(mainTemplate));){ - Yaml yaml = new Yaml(); - Object data = yaml.load(input); - if(!(data instanceof LinkedHashMap)) { - throw new IOException(); - } - return (LinkedHashMap)data; - } - catch(Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE224", String.format( - "The file \"%s\" in the CSAR \"%s\" does not " + - "contain valid TOSCA YAML content", - mainTemplate,csar))); - } - } - return null; + @SuppressWarnings("unchecked") + public LinkedHashMap getMainTemplateYaml() throws JToscaException { + String mainTemplate = tempDir + File.separator + getMainTemplate(); + if (mainTemplate != null) { + try (InputStream input = new FileInputStream(new File(mainTemplate));) { + Yaml yaml = new Yaml(); + Object data = yaml.load(input); + if (!(data instanceof LinkedHashMap)) { + throw new IOException(); + } + return (LinkedHashMap) data; + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE224", String.format( + "The file \"%s\" in the CSAR \"%s\" does not " + + "contain valid TOSCA YAML content", + mainTemplate, csar))); + } + } + return null; } - + public String getDescription() throws JToscaException { String desc = _getMetadata("Description"); - if(desc != null) { + if (desc != null) { return desc; } - Map metaData = metaProperties.get("TOSCA.meta"); - metaData.put("Description", getMainTemplateYaml().get("description")); - return _getMetadata("Description"); + Map metaData = metaProperties.get("TOSCA.meta"); + metaData.put("Description", getMainTemplateYaml().get("description")); + return _getMetadata("Description"); } public String getTempDir() { - return tempDir; + return tempDir; } - + public void decompress() throws IOException, JToscaException { - if(!isValidated) { + if (!isValidated) { validate(); } - - if(tempDir == null || tempDir.isEmpty()) { - tempDir = Files.createTempDirectory("JTP").toString(); - unzip(path,tempDir); + + if (tempDir == null || tempDir.isEmpty()) { + tempDir = Files.createTempDirectory("JTP").toString(); + unzip(path, tempDir); } } - - private void _validateExternalReferences() throws JToscaException { + + private void _validateExternalReferences() throws JToscaException { // Extracts files referenced in the main template - // These references are currently supported: + // These references are currently supported: // * imports // * interface implementations // * artifacts try { decompress(); String mainTplFile = getMainTemplate(); - if(mainTplFile == null) { + if (mainTplFile == null) { return; } - - LinkedHashMap mainTpl = getMainTemplateYaml(); - if(mainTpl.get("imports") != null) { - // this loads the imports - ImportsLoader il = new ImportsLoader((ArrayList)mainTpl.get("imports"), - tempDir + File.separator + mainTplFile, - (Object)null, - (LinkedHashMap)null); + + LinkedHashMap mainTpl = getMainTemplateYaml(); + if (mainTpl.get("imports") != null) { + // this loads the imports + ImportsLoader il = new ImportsLoader((ArrayList) mainTpl.get("imports"), + tempDir + File.separator + mainTplFile, + (Object) null, + (LinkedHashMap) null); } - - if(mainTpl.get("topology_template") != null) { - LinkedHashMap topologyTemplate = - (LinkedHashMap)mainTpl.get("topology_template"); - - if(topologyTemplate.get("node_templates") != null) { - LinkedHashMap nodeTemplates = - (LinkedHashMap)topologyTemplate.get("node_templates"); - for(String nodeTemplateKey: nodeTemplates.keySet()) { - LinkedHashMap nodeTemplate = - (LinkedHashMap)nodeTemplates.get(nodeTemplateKey); - if(nodeTemplate.get("artifacts") != null) { - LinkedHashMap artifacts = - (LinkedHashMap)nodeTemplate.get("artifacts"); - for(String artifactKey: artifacts.keySet()) { - Object artifact = artifacts.get(artifactKey); - if(artifact instanceof String) { - _validateExternalReference(mainTplFile,(String)artifact,true); - } - else if(artifact instanceof LinkedHashMap) { - String file = (String)((LinkedHashMap)artifact).get("file"); - if(file != null) { - _validateExternalReference(mainTplFile,file,true); - } - } - else { + + if (mainTpl.get("topology_template") != null) { + LinkedHashMap topologyTemplate = + (LinkedHashMap) mainTpl.get("topology_template"); + + if (topologyTemplate.get("node_templates") != null) { + LinkedHashMap nodeTemplates = + (LinkedHashMap) topologyTemplate.get("node_templates"); + for (String nodeTemplateKey : nodeTemplates.keySet()) { + LinkedHashMap nodeTemplate = + (LinkedHashMap) nodeTemplates.get(nodeTemplateKey); + if (nodeTemplate.get("artifacts") != null) { + LinkedHashMap artifacts = + (LinkedHashMap) nodeTemplate.get("artifacts"); + for (String artifactKey : artifacts.keySet()) { + Object artifact = artifacts.get(artifactKey); + if (artifact instanceof String) { + _validateExternalReference(mainTplFile, (String) artifact, true); + } else if (artifact instanceof LinkedHashMap) { + String file = (String) ((LinkedHashMap) artifact).get("file"); + if (file != null) { + _validateExternalReference(mainTplFile, file, true); + } + } else { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE225", String.format( - "ValueError: Unexpected artifact definition for \"%s\"", - artifactKey))); - errorCaught = true; - } - } - } - if(nodeTemplate.get("interfaces") != null) { - LinkedHashMap interfaces = - (LinkedHashMap)nodeTemplate.get("interfaces"); - for(String interfaceKey: interfaces.keySet()) { - LinkedHashMap _interface = - (LinkedHashMap)interfaces.get(interfaceKey); - for(String operationKey: _interface.keySet()) { - Object operation = _interface.get(operationKey); - if(operation instanceof String) { - _validateExternalReference(mainTplFile,(String)operation,false); - } - else if(operation instanceof LinkedHashMap) { - String imp = (String)((LinkedHashMap)operation).get("implementation"); - if(imp != null) { - _validateExternalReference(mainTplFile,imp,true); - } - } - } - } - } - } - } + "ValueError: Unexpected artifact definition for \"%s\"", + artifactKey))); + errorCaught = true; + } + } + } + if (nodeTemplate.get("interfaces") != null) { + LinkedHashMap interfaces = + (LinkedHashMap) nodeTemplate.get("interfaces"); + for (String interfaceKey : interfaces.keySet()) { + LinkedHashMap _interface = + (LinkedHashMap) interfaces.get(interfaceKey); + for (String operationKey : _interface.keySet()) { + Object operation = _interface.get(operationKey); + if (operation instanceof String) { + _validateExternalReference(mainTplFile, (String) operation, false); + } else if (operation instanceof LinkedHashMap) { + String imp = (String) ((LinkedHashMap) operation).get("implementation"); + if (imp != null) { + _validateExternalReference(mainTplFile, imp, true); + } + } + } + } + } + } + } } + } catch (IOException e) { + errorCaught = true; + } finally { + // delete tempDir (only here?!?) + File fdir = new File(tempDir); + deleteDir(fdir); + tempDir = null; } - catch(IOException e) { - errorCaught = true; - } - finally { - // delete tempDir (only here?!?) - File fdir = new File(tempDir); - deleteDir(fdir); - tempDir = null; + } + + public static void deleteDir(File fdir) { + try { + if (fdir.isDirectory()) { + for (File c : fdir.listFiles()) + deleteDir(c); + } + fdir.delete(); + } catch (Exception e) { } - } - - public static void deleteDir(File fdir) { - try { - if (fdir.isDirectory()) { - for (File c : fdir.listFiles()) - deleteDir(c); - } - fdir.delete(); - } - catch(Exception e) { - } - } - - private void _validateExternalReference(String tplFile,String resourceFile,boolean raiseExc) { + } + + private void _validateExternalReference(String tplFile, String resourceFile, boolean raiseExc) { // Verify that the external resource exists // If resource_file is a URL verify that the URL is valid. // If resource_file is a relative path verify that the path is valid // considering base folder (self.temp_dir) and tpl_file. // Note that in a CSAR resource_file cannot be an absolute path. - if(UrlUtils.validateUrl(resourceFile)) { - String msg = String.format("URLException: The resource at \"%s\" cannot be accessed",resourceFile); + if (UrlUtils.validateUrl(resourceFile)) { + String msg = String.format("URLException: The resource at \"%s\" cannot be accessed", resourceFile); try { - if(UrlUtils.isUrlAccessible(resourceFile)) { + if (UrlUtils.isUrlAccessible(resourceFile)) { return; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE226", msg)); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE226", msg)); errorCaught = true; } - } - catch (Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE227", msg)); + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE227", msg)); } } - String dirPath = Paths.get(tplFile).getParent().toString(); - String filePath = tempDir + File.separator + dirPath + File.separator + resourceFile; - File f = new File(filePath); - if(f.isFile()) { - return; - } - - if(raiseExc) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE228", String.format( - "ValueError: The resource \"%s\" does not exist",resourceFile))); - } - errorCaught = true; - } - + String dirPath = Paths.get(tplFile).getParent().toString(); + String filePath = tempDir + File.separator + dirPath + File.separator + resourceFile; + File f = new File(filePath); + if (f.isFile()) { + return; + } + + if (raiseExc) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE228", String.format( + "ValueError: The resource \"%s\" does not exist", resourceFile))); + } + errorCaught = true; + } + private void unzip(String zipFilePath, String destDirectory) throws IOException { File destDir = new File(destDirectory); if (!destDir.exists()) { destDir.mkdir(); } - try (ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath));){ - ZipEntry entry = zipIn.getNextEntry(); - // iterates over entries in the zip file - while (entry != null) { - // create all directories needed for nested items - String[] parts = entry.getName().split("/"); - String s = destDirectory + File.separator ; - for(int i=0; i< parts.length-1; i++) { - s += parts[i]; - File idir = new File(s); - if(!idir.exists()) { - idir.mkdir(); - } - s += File.separator; - } - String filePath = destDirectory + File.separator + entry.getName(); - if (!entry.isDirectory()) { - // if the entry is a file, extracts it - extractFile(zipIn, filePath); - } else { - // if the entry is a directory, make the directory - File dir = new File(filePath); - dir.mkdir(); - } - zipIn.closeEntry(); - entry = zipIn.getNextEntry(); - } - } + try (ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath));) { + ZipEntry entry = zipIn.getNextEntry(); + // iterates over entries in the zip file + while (entry != null) { + // create all directories needed for nested items + String[] parts = entry.getName().split("/"); + String s = destDirectory + File.separator; + for (int i = 0; i < parts.length - 1; i++) { + s += parts[i]; + File idir = new File(s); + if (!idir.exists()) { + idir.mkdir(); + } + s += File.separator; + } + String filePath = destDirectory + File.separator + entry.getName(); + if (!entry.isDirectory()) { + // if the entry is a file, extracts it + extractFile(zipIn, filePath); + } else { + // if the entry is a directory, make the directory + File dir = new File(filePath); + dir.mkdir(); + } + zipIn.closeEntry(); + entry = zipIn.getNextEntry(); + } + } } - + /** * Extracts a zip entry (file entry) + * * @param zipIn * @param filePath * @throws IOException */ private static final int BUFFER_SIZE = 4096; - + private void extractFile(ZipInputStream zipIn, String filePath) throws IOException { //BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath)); - try (FileOutputStream fos = new FileOutputStream(filePath); - BufferedOutputStream bos = new BufferedOutputStream(fos);){ - byte[] bytesIn = new byte[BUFFER_SIZE]; - int read = 0; - while ((read = zipIn.read(bytesIn)) != -1) { - bos.write(bytesIn, 0, read); - } - } + try (FileOutputStream fos = new FileOutputStream(filePath); + BufferedOutputStream bos = new BufferedOutputStream(fos);) { + byte[] bytesIn = new byte[BUFFER_SIZE]; + int read = 0; + while ((read = zipIn.read(bytesIn)) != -1) { + bos.write(bytesIn, 0, read); + } + } } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java index a15afe4..237b738 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,24 +26,25 @@ import java.util.Map; public class CopyUtils { + private CopyUtils() { + } + @SuppressWarnings("unchecked") - public static Object copyLhmOrAl(Object src) { - if(src instanceof LinkedHashMap) { - LinkedHashMap dst = new LinkedHashMap(); - for(Map.Entry me: ((LinkedHashMap)src).entrySet()) { - dst.put(me.getKey(),me.getValue()); - } - return dst; - } - else if(src instanceof ArrayList) { - ArrayList dst = new ArrayList(); - for(Object o: (ArrayList)src) { - dst.add(o); - } - return dst; - } - else { - return null; - } + public static Object copyLhmOrAl(Object src) { + if (src instanceof LinkedHashMap) { + LinkedHashMap dst = new LinkedHashMap(); + for (Map.Entry me : ((LinkedHashMap) src).entrySet()) { + dst.put(me.getKey(), me.getValue()); + } + return dst; + } else if (src instanceof ArrayList) { + ArrayList dst = new ArrayList(); + for (Object o : (ArrayList) src) { + dst.add(o); + } + return dst; + } else { + return null; + } } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java index d87103b..158a3e1 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,51 +25,44 @@ import java.util.LinkedHashMap; import java.util.Map; public class DumpUtils { - - @SuppressWarnings("unchecked") - public static void dumpYaml(Object yo,int level) { - final String indent = " "; - try { - if(yo == null) { - System.out.println(""); - return; - } - String cname = yo.getClass().getSimpleName(); - System.out.print(cname); - if(cname.equals("LinkedHashMap")) { - LinkedHashMap lhm = (LinkedHashMap)yo; - System.out.println(); - for(Map.Entry me: lhm.entrySet()) { - System.out.print(indent.substring(0,level) + me.getKey() + ": "); - dumpYaml(me.getValue(),level+2); - } - } - else if(cname.equals("ArrayList")) { - ArrayList al = (ArrayList)yo; - System.out.println(); - for (int i=0; i \"" + (String)yo + "\""); - } - else if(cname.equals("Integer")) { - System.out.println(" ==> " + (int)yo); - } - else if(cname.equals("Boolean")) { - System.out.println(" ==> " + (boolean)yo); - } - else if(cname.equals("Double")) { - System.out.println(" ==> " + (double)yo); - } - else { - System.out.println(" !! unexpected type"); - } - } - catch(Exception e) { - System.out.println("Exception!! " + e.getMessage()); - } - } + + @SuppressWarnings("unchecked") + private static void dumpYaml(Object yo, int level) { + final String indent = " "; + try { + if (yo == null) { + System.out.println(""); + return; + } + String cname = yo.getClass().getSimpleName(); + System.out.print(cname); + if (cname.equals("LinkedHashMap")) { + LinkedHashMap lhm = (LinkedHashMap) yo; + System.out.println(); + for (Map.Entry me : lhm.entrySet()) { + System.out.print(indent.substring(0, level) + me.getKey() + ": "); + dumpYaml(me.getValue(), level + 2); + } + } else if (cname.equals("ArrayList")) { + ArrayList al = (ArrayList) yo; + System.out.println(); + for (int i = 0; i < al.size(); i++) { + System.out.format("%s[%d] ", indent.substring(0, level), i); + dumpYaml(al.get(i), level + 2); + } + } else if (cname.equals("String")) { + System.out.println(" ==> \"" + (String) yo + "\""); + } else if (cname.equals("Integer")) { + System.out.println(" ==> " + (int) yo); + } else if (cname.equals("Boolean")) { + System.out.println(" ==> " + (boolean) yo); + } else if (cname.equals("Double")) { + System.out.println(" ==> " + (double) yo); + } else { + System.out.println(" !! unexpected type"); + } + } catch (Exception e) { + System.out.println("Exception!! " + e.getMessage()); + } + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java index 3515ed0..3849ce0 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,7 +25,7 @@ public enum JToscaErrorCodes { MISSING_META_FILE("JE1001"), INVALID_META_YAML_CONTENT("JE1002"), ENTRY_DEFINITION_NOT_DEFINED("JE1003"), - MISSING_ENTRY_DEFINITION_FILE ("JE1004"), + MISSING_ENTRY_DEFINITION_FILE("JE1004"), GENERAL_ERROR("JE1005"), PATH_NOT_VALID("JE1006"), CSAR_TOSCA_VALIDATION_ERROR("JE1007"), @@ -33,7 +33,7 @@ public enum JToscaErrorCodes { private String value; - private JToscaErrorCodes(String value) { + JToscaErrorCodes(String value) { this.value = value; } @@ -42,8 +42,8 @@ public enum JToscaErrorCodes { } public static JToscaErrorCodes getByCode(String code) { - for(JToscaErrorCodes v : values()){ - if( v.getValue().equals(code)){ + for (JToscaErrorCodes v : values()) { + if (v.getValue().equals(code)) { return v; } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java index 838fb07..a753d62 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,104 +25,111 @@ import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import java.util.regex.Matcher; import java.util.regex.Pattern; -public class TOSCAVersionProperty {// test with functions/test_concat.yaml - - private String version; - - private static final String versionRe = - "^(?([0-9][0-9]*))" + - "(\\.(?([0-9][0-9]*)))?" + - "(\\.(?([0-9][0-9]*)))?" + - "(\\.(?([0-9A-Za-z]+)))?" + - "(\\-(?[0-9])*)?$"; - - private String minorVersion = null; - private String majorVersion = null; - private String fixVersion = null; - private String qualifier = null; - private String buildVersion = null; - - - public TOSCAVersionProperty(Object _version) { - version = _version.toString(); - - if(version.equals("0") || version.equals("0.0") || version.equals("0.0.0")) { - //log.warning(_('Version assumed as not provided')) - version = ""; +// test with functions/test_concat.yaml +public class TOSCAVersionProperty { + + private String version; + + private static final String VERSION_RE = + "^(?([0-9][0-9]*))" + + "(\\.(?([0-9][0-9]*)))?" + + "(\\.(?([0-9][0-9]*)))?" + + "(\\.(?([0-9A-Za-z]+)))?" + + "(\\-(?[0-9])*)?$"; + + private String minorVersion = null; + private String majorVersion = null; + private String fixVersion = null; + private String qualifier = null; + private String buildVersion = null; + + + public TOSCAVersionProperty(String version) { + + if (version.equals("0") || version.equals("0.0") || version.equals("0.0.0")) { return; } - Pattern pattern = Pattern.compile(versionRe); - Matcher matcher = pattern.matcher(version); - if(!matcher.find()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE252", String.format( - "InvalidTOSCAVersionPropertyException: " + - "Value of TOSCA version property \"%s\" is invalid", - version))); + Pattern pattern = Pattern.compile(VERSION_RE); + Matcher matcher = pattern.matcher(version); + if (!matcher.find()) { + ThreadLocalsHolder.getCollector().appendValidationIssue( + new JToscaValidationIssue( + "JE252", + "InvalidTOSCAVersionPropertyException: " + + "Value of TOSCA version property \"" + version + "\" is invalid" + )); return; - } + } minorVersion = matcher.group("gMinorVersion"); majorVersion = matcher.group("gMajorVersion"); fixVersion = matcher.group("gFixVersion"); - qualifier = _validateQualifier(matcher.group("gQualifier")); - buildVersion = _validateBuild(matcher.group("gBuildVersion")); - _validateMajorVersion(majorVersion); - - } - - private String _validateMajorVersion(String value) { + qualifier = validateQualifier(matcher.group("gQualifier")); + buildVersion = validateBuild(matcher.group("gBuildVersion")); + validateMajorVersion(majorVersion); + + this.version = version; + + } + + private String validateMajorVersion(String value) { // Validate major version // Checks if only major version is provided and assumes // minor version as 0. // Eg: If version = 18, then it returns version = '18.0' - if(minorVersion == null && buildVersion == null && !value.equals("0")) { + if (minorVersion == null && buildVersion == null && !value.equals("0")) { //log.warning(_('Minor version assumed "0".')) version = version + "0"; } return value; - } - - private String _validateQualifier(String value) { - // Validate qualifier - - // TOSCA version is invalid if a qualifier is present without the - // fix version or with all of major, minor and fix version 0s. - - // For example, the following versions are invalid - // 18.0.abc - // 0.0.0.abc - - if((fixVersion == null && value != null) || - (minorVersion.equals("0") && majorVersion.equals("0") && - fixVersion.equals("0") && value != null)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE253", String.format( - "InvalidTOSCAVersionPropertyException: " + - "Value of TOSCA version property \"%s\" is invalid", - version))); - } - return value; - } - - private String _validateBuild(String value) { + } + + private String validateQualifier(String value) { + // Validate qualifier + + // TOSCA version is invalid if a qualifier is present without the + // fix version or with all of major, minor and fix version 0s. + + // For example, the following versions are invalid + // 18.0.abc + // 0.0.0.abc + + if ((fixVersion == null && value != null) || (minorVersion.equals("0") && majorVersion.equals("0") + && fixVersion.equals("0") && value != null)) { + ThreadLocalsHolder.getCollector().appendValidationIssue( + new JToscaValidationIssue( + "JE253", + "InvalidTOSCAVersionPropertyException: Value of TOSCA version property \"" + + version + + "\" is invalid" + )); + } + return value; + } + + private String validateBuild(String value) { // Validate build version // TOSCA version is invalid if build version is present without the qualifier. // Eg: version = 18.0.0-1 is invalid. - if(qualifier == null && value != null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE254", String.format( - "InvalidTOSCAVersionPropertyException: " + - "Value of TOSCA version property \"%s\" is invalid", - version))); - } + if (qualifier == null && value != null) { + ThreadLocalsHolder.getCollector().appendValidationIssue( + new JToscaValidationIssue( + "JE254", + "InvalidTOSCAVersionPropertyException: " + + "Value of TOSCA version property \"" + version + "\" is invalid" + ) + ); + } return value; } - public Object getVersion() { - return version; - } + public Object getVersion() { + return version; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java index 2ea8d08..4c4581b 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,21 +24,22 @@ import org.onap.sdc.toscaparser.api.common.ValidationIssueCollector; public class ThreadLocalsHolder { - private static final ThreadLocal exceptionCollectorThreadLocal = new ThreadLocal<>(); + private static final ThreadLocal EXCEPTION_COLLECTOR_THREAD_LOCAL = new ThreadLocal<>(); - private ThreadLocalsHolder(){} + private ThreadLocalsHolder() { + } public static ValidationIssueCollector getCollector() { - return exceptionCollectorThreadLocal.get(); + return EXCEPTION_COLLECTOR_THREAD_LOCAL.get(); } public static void setCollector(ValidationIssueCollector validationIssueCollector) { cleanup(); - exceptionCollectorThreadLocal.set(validationIssueCollector); + EXCEPTION_COLLECTOR_THREAD_LOCAL.set(validationIssueCollector); } - public static void cleanup(){ - exceptionCollectorThreadLocal.remove(); + public static void cleanup() { + EXCEPTION_COLLECTOR_THREAD_LOCAL.remove(); } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java index 72e5122..d081d91 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,61 +28,63 @@ import java.net.MalformedURLException; import java.net.URL; public class UrlUtils { - - public static boolean validateUrl(String sUrl) { + + private static final int HTTP_STATUS_OK = 200; + + private UrlUtils() { + } + + public static boolean validateUrl(String sUrl) { // Validates whether the given path is a URL or not // If the given path includes a scheme (http, https, ftp, ...) and a net // location (a domain name such as www.github.com) it is validated as a URL - try { - URL url = new URL(sUrl); - if(url.getProtocol().equals("file")) { - return true; - } - return url.getAuthority() != null; - } - catch(MalformedURLException e) { - return false; - } - } - - public static String joinUrl(String sUrl,String relativePath) { + try { + URL url = new URL(sUrl); + if (url.getProtocol().equals("file")) { + return true; + } + return url.getAuthority() != null; + } catch (MalformedURLException e) { + return false; + } + } + + public static String joinUrl(String sUrl, String relativePath) { // Builds a new URL from the given URL and the relative path // Example: // url: http://www.githib.com/openstack/heat // relative_path: heat-translator // - joined: http://www.githib.com/openstack/heat-translator - if(!validateUrl(sUrl)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE255", String.format( - "ValueError: The URL \"%s\" is malformed",sUrl))); - } - try { - URL base = new URL(sUrl); - return (new URL(base,relativePath)).toString(); - } - catch(MalformedURLException e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE256", String.format( - "ValueError: Joining URL \"%s\" and relative path \"%s\" caused an exception",sUrl,relativePath))); - return sUrl; - } - } - - public static boolean isUrlAccessible(String sUrl) { + if (!validateUrl(sUrl)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE255", String.format( + "ValueError: The URL \"%s\" is malformed", sUrl))); + } + try { + URL base = new URL(sUrl); + return (new URL(base, relativePath)).toString(); + } catch (MalformedURLException e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE256", String.format( + "ValueError: Joining URL \"%s\" and relative path \"%s\" caused an exception", sUrl, relativePath))); + return sUrl; + } + } + + public static boolean isUrlAccessible(String sUrl) { // Validates whether the given URL is accessible // Returns true if the get call returns a 200 response code. // Otherwise, returns false. - try { - HttpURLConnection connection = (HttpURLConnection) new URL(sUrl).openConnection(); - connection.setRequestMethod("HEAD"); - int responseCode = connection.getResponseCode(); - return responseCode == 200; - } - catch(IOException e) { - return false; - } - } + try { + HttpURLConnection connection = (HttpURLConnection) new URL(sUrl).openConnection(); + connection.setRequestMethod("HEAD"); + int responseCode = connection.getResponseCode(); + return responseCode == HTTP_STATUS_OK; + } catch (IOException e) { + return false; + } + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java index a9786ae..b90d882 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,246 +27,241 @@ import java.util.Date; import java.util.LinkedHashMap; public class ValidateUtils { - - private static final String RANGE_UNBOUNDED = "UNBOUNDED"; - - public static Object strToNum(Object value) { - // Convert a string representation of a number into a numeric type - // TODO(TBD) we should not allow numeric values in, input should be str - if(value instanceof Number) { - return value; - } - if(!(value instanceof String)) { - - } - try { - return Integer.parseInt((String)value); - } - catch(NumberFormatException e) { - } - try { - return Float.parseFloat((String)value); - } - catch(Exception e) { - } - return null; - } - - public static Object validateNumeric(Object value) { - if(value != null) { - if (!(value instanceof Number)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE257", String.format( - "ValueError: \"%s\" is not a numeric",value.toString()))); - } - } - return value; - } - - public static Object validateInteger(Object value) { - if(value != null) { - if (!(value instanceof Integer)) { - // allow "true" and "false" - if (value instanceof Boolean) { - return (Boolean) value ? 1 : 0; - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE258", String.format( - "ValueError: \"%s\" is not an integer",value.toString()))); - } - } - return value; - } - - public static Object validateFloat(Object value) { - if(value != null) { - if (!(value instanceof Float || value instanceof Double)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE259", String.format( - "ValueError: \"%s\" is not a float",value.toString()))); - } - } - return value; - } - - public static Object validateString(Object value) { - if(value != null) { - if (!(value instanceof String)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE260", String.format( - "ValueError: \'%s\' is not a string",value.toString()))); - } - } - return value; - } - - public static Object validateList(Object value) { - if(value != null) { - if (!(value instanceof ArrayList)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE261", String.format( - "ValueError: \"%s\" is not a list",value.toString()))); - } - } - return value; - } - - - @SuppressWarnings("unchecked") - public static Object validateRange(Object range) { - // list class check - validateList(range); - // validate range list has a min and max - if(range instanceof ArrayList && ((ArrayList)range).size() != 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE262", String.format( - "ValueError: \"%s\" is not a valid range",range.toString()))); - // too dangerous to continue... - return range; - } - // validate min and max are numerics or the keyword UNBOUNDED - boolean minTest = false; - boolean maxTest = false; - Object r0 = ((ArrayList)range).get(0); - Object r1 = ((ArrayList)range).get(1); - - if(!(r0 instanceof Integer) && !(r0 instanceof Float) || - !(r1 instanceof Integer) && !(r1 instanceof Float)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE263", String.format( - "ValueError: \"%s\" is not a valid range",range.toString()))); - // too dangerous to continue... - return range; - } - - Float min = 0.0F; - Float max = 0.0F; - if(r0 instanceof String && ((String)r0).equals(RANGE_UNBOUNDED)) { - minTest = true; - } - else { - min = r0 instanceof Integer ? ((Integer)r0).floatValue() : (Float)r0; - } - if(r1 instanceof String && ((String)r1).equals(RANGE_UNBOUNDED)) { - maxTest = true; - } - else { - max = r1 instanceof Integer ? ((Integer)r1).floatValue() : (Float)r1; - } - - // validate the max > min (account for UNBOUNDED) - if(!minTest && !maxTest) { - // Note: min == max is allowed - if(min > max) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE264", String.format( - "ValueError:\"%s\" is not a valid range",range.toString()))); - } - } - return range; - } - - @SuppressWarnings("unchecked") - public static Object validateValueInRange(Object value,Object range,String propName) { - // verify all 3 are numeric and convert to Floats - if(!(value instanceof Integer || value instanceof Float)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE265", String.format( - "ValueError: validateInRange: \"%s\" is not a number",range.toString()))); + + private static final String RANGE_UNBOUNDED = "UNBOUNDED"; + + private ValidateUtils() { + } + + public static Object strToNum(Object value) { + // Convert a string representation of a number into a numeric type + // TODO(TBD) we should not allow numeric values in, input should be str + if (value instanceof Number) { + return value; + } + try { + return Integer.parseInt((String) value); + } catch (NumberFormatException e) { + } + try { + return Float.parseFloat((String) value); + } catch (Exception e) { + } + return null; + } + + public static Object validateNumeric(Object value) { + if (value != null) { + if (!(value instanceof Number)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE257", String.format( + "ValueError: \"%s\" is not a numeric", value.toString()))); + } + } + return value; + } + + public static Object validateInteger(Object value) { + if (value != null) { + if (!(value instanceof Integer)) { + // allow "true" and "false" + if (value instanceof Boolean) { + return (Boolean) value ? 1 : 0; + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE258", String.format( + "ValueError: \"%s\" is not an integer", value.toString()))); + } + } + return value; + } + + public static Object validateFloat(Object value) { + if (value != null) { + if (!(value instanceof Float || value instanceof Double)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE259", String.format( + "ValueError: \"%s\" is not a float", value.toString()))); + } + } + return value; + } + + public static Object validateString(Object value) { + if (value != null) { + if (!(value instanceof String)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE260", String.format( + "ValueError: \'%s\' is not a string", value.toString()))); + } + } + return value; + } + + public static Object validateList(Object value) { + if (value != null) { + if (!(value instanceof ArrayList)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE261", String.format( + "ValueError: \"%s\" is not a list", value.toString()))); + } + } + return value; + } + + + @SuppressWarnings("unchecked") + public static Object validateRange(Object range) { + // list class check + validateList(range); + // validate range list has a min and max + if (range instanceof ArrayList && ((ArrayList) range).size() != 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE262", String.format( + "ValueError: \"%s\" is not a valid range", range.toString()))); + // too dangerous to continue... + return range; + } + // validate min and max are numerics or the keyword UNBOUNDED + boolean minTest = false; + boolean maxTest = false; + Object r0 = ((ArrayList) range).get(0); + Object r1 = ((ArrayList) range).get(1); + + if (!(r0 instanceof Integer) && !(r0 instanceof Float) + || !(r1 instanceof Integer) && !(r1 instanceof Float)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE263", String.format( + "ValueError: \"%s\" is not a valid range", range.toString()))); + // too dangerous to continue... + return range; + } + + Float min = 0.0F; + Float max = 0.0F; + if (r0 instanceof String && ((String) r0).equals(RANGE_UNBOUNDED)) { + minTest = true; + } else { + min = r0 instanceof Integer ? ((Integer) r0).floatValue() : (Float) r0; + } + if (r1 instanceof String && ((String) r1).equals(RANGE_UNBOUNDED)) { + maxTest = true; + } else { + max = r1 instanceof Integer ? ((Integer) r1).floatValue() : (Float) r1; + } + + // validate the max > min (account for UNBOUNDED) + if (!minTest && !maxTest) { + // Note: min == max is allowed + if (min > max) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE264", String.format( + "ValueError:\"%s\" is not a valid range", range.toString()))); + } + } + return range; + } + + @SuppressWarnings("unchecked") + public static Object validateValueInRange(Object value, Object range, String propName) { + // verify all 3 are numeric and convert to Floats + if (!(value instanceof Integer || value instanceof Float)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE265", String.format( + "ValueError: validateInRange: \"%s\" is not a number", range.toString()))); return value; - } - Float fval = value instanceof Integer ? ((Integer)value).floatValue() : (Float)value; - - ////////////////////////// - //"validateRange(range);" - ////////////////////////// - // better safe than sorry... - // validate that range list has a min and max - if(range instanceof ArrayList && ((ArrayList)range).size() != 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE266", String.format( - "ValueError: \"%s\" is not a valid range",range.toString()))); - // too dangerous to continue... - return value; - } - // validate min and max are numerics or the keyword UNBOUNDED - boolean minTest = false; - boolean maxTest = false; - Object r0 = ((ArrayList)range).get(0); - Object r1 = ((ArrayList)range).get(1); - - if(!(r0 instanceof Integer) && !(r0 instanceof Float) || - !(r1 instanceof Integer) && !(r1 instanceof Float)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE267", String.format( - "ValueError: \"%s\" is not a valid range",range.toString()))); - // too dangerous to continue... - return value; - } - - Float min = 0.0F; - Float max = 0.0F; - if(r0 instanceof String && ((String)r0).equals(RANGE_UNBOUNDED)) { - minTest = true; - } - else { - min = r0 instanceof Integer ? ((Integer)r0).floatValue() : (Float)r0; - } - if(r1 instanceof String && ((String)r1).equals(RANGE_UNBOUNDED)) { - maxTest = true; - } - else { - max = r1 instanceof Integer ? ((Integer)r1).floatValue() : (Float)r1; - } - - // validate the max > min (account for UNBOUNDED) - if(!minTest && !maxTest) { - // Note: min == max is allowed - if(min > max) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE268", String.format( - "ValueError:\"%s\" is not a valid range",range.toString()))); - } - } - // finally... - boolean bError = false; - //Note: value is valid if equal to min - if(!minTest) { - if(fval < min) { - bError = true; - } - } - // Note: value is valid if equal to max - if(!maxTest) { - if(fval > max) { - bError = true; - } - } - if(bError) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE269", String.format( - "RangeValueError: Property \"%s\", \"%s\" not in range [\"%s\" - \"%s\"", - propName,value.toString(),r0.toString(),r1.toString()))); - } - return value; - } - - public static Object validateMap(Object ob) { - if(ob != null) { - if (!(ob instanceof LinkedHashMap)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE270", String.format( - "ValueError\"%s\" is not a map.",ob.toString()))); - } - } - return ob; - } - - public static Object validateBoolean(Object value) { - if(value != null) { - if (value instanceof Boolean) { - return value; - } - if (value instanceof String) { - String normalized = ((String) value).toLowerCase(); - if (normalized.equals("true") || normalized.equals("false")) { - return normalized.equals("true"); - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE271", String.format( - "ValueError: \"%s\" is not a boolean",value.toString()))); - } - return value; - } - - public static Object validateTimestamp(Object value) { - /* + } + Float fval = value instanceof Integer ? ((Integer) value).floatValue() : (Float) value; + + ////////////////////////// + //"validateRange(range);" + ////////////////////////// + // better safe than sorry... + // validate that range list has a min and max + if (range instanceof ArrayList && ((ArrayList) range).size() != 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE266", String.format( + "ValueError: \"%s\" is not a valid range", range.toString()))); + // too dangerous to continue... + return value; + } + // validate min and max are numerics or the keyword UNBOUNDED + boolean minTest = false; + boolean maxTest = false; + Object r0 = ((ArrayList) range).get(0); + Object r1 = ((ArrayList) range).get(1); + + if (!(r0 instanceof Integer) && !(r0 instanceof Float) + || !(r1 instanceof Integer) && !(r1 instanceof Float)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE267", String.format( + "ValueError: \"%s\" is not a valid range", range.toString()))); + // too dangerous to continue... + return value; + } + + Float min = 0.0F; + Float max = 0.0F; + if (r0 instanceof String && ((String) r0).equals(RANGE_UNBOUNDED)) { + minTest = true; + } else { + min = r0 instanceof Integer ? ((Integer) r0).floatValue() : (Float) r0; + } + if (r1 instanceof String && ((String) r1).equals(RANGE_UNBOUNDED)) { + maxTest = true; + } else { + max = r1 instanceof Integer ? ((Integer) r1).floatValue() : (Float) r1; + } + + // validate the max > min (account for UNBOUNDED) + if (!minTest && !maxTest) { + // Note: min == max is allowed + if (min > max) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE268", String.format( + "ValueError:\"%s\" is not a valid range", range.toString()))); + } + } + // finally... + boolean bError = false; + //Note: value is valid if equal to min + if (!minTest) { + if (fval < min) { + bError = true; + } + } + // Note: value is valid if equal to max + if (!maxTest) { + if (fval > max) { + bError = true; + } + } + if (bError) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE269", String.format( + "RangeValueError: Property \"%s\", \"%s\" not in range [\"%s\" - \"%s\"", + propName, value.toString(), r0.toString(), r1.toString()))); + } + return value; + } + + public static Object validateMap(Object ob) { + if (ob != null) { + if (!(ob instanceof LinkedHashMap)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE270", String.format( + "ValueError\"%s\" is not a map.", ob.toString()))); + } + } + return ob; + } + + public static Object validateBoolean(Object value) { + if (value != null) { + if (value instanceof Boolean) { + return value; + } + if (value instanceof String) { + String normalized = ((String) value).toLowerCase(); + if (normalized.equals("true") || normalized.equals("false")) { + return normalized.equals("true"); + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE271", String.format( + "ValueError: \"%s\" is not a boolean", value.toString()))); + } + return value; + } + + public static Object validateTimestamp(Object value) { + + /* try: # Note: we must return our own exception message # as dateutil's parser returns different types / values on @@ -280,19 +275,18 @@ public class ValidateUtils { ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') % {'val': value, 'msg': original_err_msg})) */ - - // timestamps are loaded as Date objects by the YAML parser - if(value != null) { - if (!(value instanceof Date)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE272", String.format( - "ValueError: \"%s\" is not a valid timestamp", - value.toString()))); - - } - } - return value; - } - + // timestamps are loaded as Date objects by the YAML parser + if (value != null) { + if (!(value instanceof Date)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE272", String.format( + "ValueError: \"%s\" is not a valid timestamp", + value.toString()))); + + } + } + return value; + } + } /*python diff --git a/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java b/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java index 3902219..140a6e9 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,6 +29,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Scanner; + //Generate excel file, include all validation issues errors in jtosca //the error java code, the line number and file name for each error. public class GetValidationIssues { @@ -37,13 +38,13 @@ public class GetValidationIssues { public static List data = new ArrayList<>(); public static void main(String[] args) { - System.out.println("GetAllValidationIssues - path to project files Directory is " + Arrays.toString(args)); - File jtoscaFiles = new File(args[0]+ "\\jtosca\\src\\main\\java\\org\\onap\\sdc\\toscaparser\\api"); + System.out.println("GetAllValidationIssues - path to project files Directory is " + Arrays.toString(args)); + File jtoscaFiles = new File(args[0] + "\\jtosca\\src\\main\\java\\org\\onap\\sdc\\toscaparser\\api"); try { printFiles(jtoscaFiles); - fileWriter = new CSVWriter(new FileWriter(args[1]+"\\JToscaValidationIssues_"+System.currentTimeMillis()+".csv"), '\t'); - fileWriter.writeNext(new String[] {"Error Message", "Class Name", "Line No."}, false); + fileWriter = new CSVWriter(new FileWriter(args[1] + "\\JToscaValidationIssues_" + System.currentTimeMillis() + ".csv"), '\t'); + fileWriter.writeNext(new String[]{"Error Message", "Class Name", "Line No."}, false); fileWriter.writeAll(data, false); } catch (IOException e) { e.printStackTrace(); diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java index 13e17ce..5876ac7 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java @@ -5,9 +5,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,81 +39,81 @@ import static org.junit.Assert.*; public class JToscaImportTest { - @Test - public void testNoMissingTypeValidationError() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/sdc-onboarding_csar.csar") - .getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List missingTypeErrors = ThreadLocalsHolder.getCollector().getValidationIssueReport().stream() - .filter(s -> s.contains("JE136")).collect(Collectors.toList()); - assertEquals(0, missingTypeErrors.size()); - } - - @Test - public void testNoStackOverFlowError() { - Exception jte = null; - try { - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/sdc-onboarding_csar.csar") - .getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - } catch (Exception e) { - jte = e; - } - assertEquals(null, jte); - } - - @Test - public void testNoInvalidImports() throws JToscaException { - List fileNames = new ArrayList<>(); - fileNames.add("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); - fileNames.add("csars/sdc-onboarding_csar.csar"); - fileNames.add("csars/resource-Spgw-csar-ZTE.csar"); - - for (String fileName : fileNames) { - String fileStr = JToscaImportTest.class.getClassLoader().getResource(fileName).getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List invalidImportErrors = ThreadLocalsHolder.getCollector().getValidationIssueReport().stream() - .filter(s -> s.contains("JE195")).collect(Collectors.toList()); - assertEquals(0, invalidImportErrors.size()); - } - } - - @Test - public void testParseAnnotations() throws JToscaException { - - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - - List inputs = toscaTemplate.getInputs(); - assertNotNull(inputs); - assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); - - inputs.forEach(Input::parseAnnotations); - assertTrue(!inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); - } - - @Test - public void testGetInputsWithAndWithoutAnnotations() throws JToscaException { - - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List inputs = toscaTemplate.getInputs(); - assertNotNull(inputs); - assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); - - inputs = toscaTemplate.getInputs(true); - assertNotNull(inputs); - validateInputsAnnotations(inputs); - - inputs = toscaTemplate.getInputs(false); - assertNotNull(inputs); - assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); - } + @Test + public void testNoMissingTypeValidationError() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/sdc-onboarding_csar.csar") + .getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List missingTypeErrors = ThreadLocalsHolder.getCollector().getValidationIssueReport().stream() + .filter(s -> s.contains("JE136")).collect(Collectors.toList()); + assertEquals(0, missingTypeErrors.size()); + } + + @Test + public void testNoStackOverFlowError() { + Exception jte = null; + try { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/sdc-onboarding_csar.csar") + .getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } catch (Exception e) { + jte = e; + } + assertEquals(null, jte); + } + + @Test + public void testNoInvalidImports() throws JToscaException { + List fileNames = new ArrayList<>(); + fileNames.add("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); + fileNames.add("csars/sdc-onboarding_csar.csar"); + fileNames.add("csars/resource-Spgw-csar-ZTE.csar"); + + for (String fileName : fileNames) { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(fileName).getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List invalidImportErrors = ThreadLocalsHolder.getCollector().getValidationIssueReport().stream() + .filter(s -> s.contains("JE195")).collect(Collectors.toList()); + assertEquals(0, invalidImportErrors.size()); + } + } + + @Test + public void testParseAnnotations() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + + List inputs = toscaTemplate.getInputs(); + assertNotNull(inputs); + assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + + inputs.forEach(Input::parseAnnotations); + assertTrue(!inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + } + + @Test + public void testGetInputsWithAndWithoutAnnotations() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List inputs = toscaTemplate.getInputs(); + assertNotNull(inputs); + assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + + inputs = toscaTemplate.getInputs(true); + assertNotNull(inputs); + validateInputsAnnotations(inputs); + + inputs = toscaTemplate.getInputs(false); + assertNotNull(inputs); + assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + } @Test public void testGetPropertyNameTest() throws JToscaException { @@ -123,7 +123,7 @@ public class JToscaImportTest { ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(0); - ArrayList valueList = (ArrayList)nodeTemplate.getPropertyValueFromTemplatesByName("vmxvpfe_sriov41_0_port_vlanfilter"); + ArrayList valueList = (ArrayList) nodeTemplate.getPropertyValueFromTemplatesByName("vmxvpfe_sriov41_0_port_vlanfilter"); assertEquals(4, valueList.size()); assertEquals("vPE", (String) nodeTemplate.getPropertyValueFromTemplatesByName("nf_role")); @@ -131,24 +131,24 @@ public class JToscaImportTest { assertNull(nodeTemplate.getPropertyValueFromTemplatesByName("test")); } - @Test - public void testGetParentNodeTemplateTest() throws JToscaException { - - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(0); - //parent of this VF is service (null) - assertNull(nodeTemplate.getParentNodeTemplate()); - List children = nodeTemplate.getSubMappingToscaTemplate().getNodeTemplates(); - assertFalse(children.isEmpty()); - NodeTemplate cVFC = children.get(4); - //parent is the VF above - assertEquals("2017-488_ADIOD-vPE 0", cVFC.getParentNodeTemplate().getName()); - List children1 = cVFC.getSubMappingToscaTemplate().getNodeTemplates(); - assertFalse(children1.isEmpty()); - //parent is the CVFC above - assertEquals(cVFC, children1.get(0).getParentNodeTemplate()); + @Test + public void testGetParentNodeTemplateTest() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(0); + //parent of this VF is service (null) + assertNull(nodeTemplate.getParentNodeTemplate()); + List children = nodeTemplate.getSubMappingToscaTemplate().getNodeTemplates(); + assertFalse(children.isEmpty()); + NodeTemplate cVFC = children.get(4); + //parent is the VF above + assertEquals("2017-488_ADIOD-vPE 0", cVFC.getParentNodeTemplate().getName()); + List children1 = cVFC.getSubMappingToscaTemplate().getNodeTemplates(); + assertFalse(children1.isEmpty()); + //parent is the CVFC above + assertEquals(cVFC, children1.get(0).getParentNodeTemplate()); /* @@ -161,149 +161,149 @@ public class JToscaImportTest { policies = tt.getPolicies(); */ - } - - @Test - public void testNullValueHasNoNullPointerException() throws JToscaException { - - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-JennyVtsbcKarunaSvc-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List inputs = toscaTemplate.getInputs(); - assertNotNull(inputs); - } - - @Test - public void testGetPolicyMetadata() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - ArrayList policies = toscaTemplate.getPolicies(); - assertNotNull(policies); - assertEquals(1, policies.size()); - assertEquals("org.openecomp.policies.External", policies.get(0).getType()); - assertEquals("adf03496-bf87-43cf-b20a-450e47cb44bd", policies.get(0).getMetaData().getOrDefault("UUID", "").toString()); - assertTrue(policies.get(0).getMetaData().getOrDefault("UUID_test", "").toString().isEmpty()); - } - - @Test - public void testGetPolicyMetadataObj() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - ArrayList policies = toscaTemplate.getPolicies(); - assertNotNull(policies); - assertEquals(1, policies.size()); - assertEquals("adf03496-bf87-43cf-b20a-450e47cb44bd", policies.get(0).getMetaDataObj().getAllProperties().getOrDefault("UUID", "").toString()); - assertTrue(policies.get(0).getMetaDataObj().getAllProperties().getOrDefault("name_test", "").toString().isEmpty()); - } + } + + @Test + public void testNullValueHasNoNullPointerException() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-JennyVtsbcKarunaSvc-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List inputs = toscaTemplate.getInputs(); + assertNotNull(inputs); + } + + @Test + public void testGetPolicyMetadata() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + ArrayList policies = toscaTemplate.getPolicies(); + assertNotNull(policies); + assertEquals(1, policies.size()); + assertEquals("org.openecomp.policies.External", policies.get(0).getType()); + assertEquals("adf03496-bf87-43cf-b20a-450e47cb44bd", policies.get(0).getMetaData().getOrDefault("UUID", "").toString()); + assertTrue(policies.get(0).getMetaData().getOrDefault("UUID_test", "").toString().isEmpty()); + } + + @Test + public void testGetPolicyMetadataObj() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + ArrayList policies = toscaTemplate.getPolicies(); + assertNotNull(policies); + assertEquals(1, policies.size()); + assertEquals("adf03496-bf87-43cf-b20a-450e47cb44bd", policies.get(0).getMetaDataObj().getAllProperties().getOrDefault("UUID", "").toString()); + assertTrue(policies.get(0).getMetaDataObj().getAllProperties().getOrDefault("name_test", "").toString().isEmpty()); + } private void validateInputsAnnotations(List inputs) { - List inputsWithAnnotations = inputs.stream().filter(i -> i.getAnnotations() != null) - .collect(Collectors.toList()); - assertTrue(!inputs.isEmpty()); - inputsWithAnnotations.stream().forEach(i -> validateAnnotations(i)); - } - - private void validateAnnotations(Input input) { - assertNotNull(input.getAnnotations()); - assertEquals(input.getAnnotations().size(), 1); - Annotation annotation = input.getAnnotations().get("source"); - assertEquals(annotation.getName(), "source"); - assertEquals(annotation.getType().toLowerCase(), "org.openecomp.annotations.source"); - assertNotNull(annotation.getProperties()); - Optional source_type = annotation.getProperties().stream() - .filter(p -> p.getName().equals("source_type")).findFirst(); - assertTrue(source_type.isPresent()); - assertEquals(source_type.get().getValue(), "HEAT"); - } - - private static final String TEST_DATATYPE_FILENAME ="csars/dataTypes-test-service.csar"; - private static final String TEST_DATATYPE_TEST1 = "TestType1"; - private static final String TEST_DATATYPE_TEST2 = "TestType2"; - private static final String TEST_DATATYPE_PROPERTY_STR = "strdata"; - private static final String TEST_DATATYPE_PROPERTY_INT = "intdata"; - private static final String TEST_DATATYPE_PROPERTY_LIST = "listdata"; - private static final String TEST_DATATYPE_PROPERTY_TYPE = "type"; - private static final String TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA = "entry_schema"; - private static final String TEST_DATATYPE_TOSTRING = "data_types="; - - @Test - public void testGetDataType() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_DATATYPE_FILENAME).getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - HashSet dataTypes = toscaTemplate.getDataTypes(); - assertThat(dataTypes,notNullValue()); - assertThat(dataTypes.size(),is(2)); - - for(DataType dataType: dataTypes){ - LinkedHashMap properties; - PropertyDef property; - if(dataType.getType().equals(TEST_DATATYPE_TEST1)){ - properties = dataType.getAllProperties(); - property = properties.get(TEST_DATATYPE_PROPERTY_STR); - assertThat(property,notNullValue()); - assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_STR)); - assertThat( property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.STRING)); - } - if(dataType.getType().equals(TEST_DATATYPE_TEST2)){ - properties = dataType.getAllProperties(); - property = properties.get(TEST_DATATYPE_PROPERTY_INT); - assertThat(property,notNullValue()); - assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_INT)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.INTEGER)); - - property = properties.get(TEST_DATATYPE_PROPERTY_LIST); - assertThat(property,notNullValue()); - assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_LIST)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.LIST)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA),is(TEST_DATATYPE_TEST1)); - - assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST1),notNullValue()); - assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST2),notNullValue()); - assertThat(toscaTemplate.toString(),containsString(TEST_DATATYPE_TOSTRING)); - } - } - - } - - @Test - public void testGetInputValidate() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_DATATYPE_FILENAME).getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - HashSet dataTypes = toscaTemplate.getDataTypes(); - assertThat(dataTypes,notNullValue()); - assertThat(dataTypes.size(),is(2)); - - for(DataType dataType: dataTypes) { - LinkedHashMap properties; - PropertyDef property; - if(dataType.getType().equals(TEST_DATATYPE_TEST1)) { - properties = dataType.getAllProperties(); - property = properties.get(TEST_DATATYPE_PROPERTY_STR); - assertThat(property,notNullValue()); - assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_STR)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.STRING)); - } - if(dataType.getType().equals(TEST_DATATYPE_TEST2)) { - properties = dataType.getAllProperties(); - property = properties.get(TEST_DATATYPE_PROPERTY_INT); - assertThat(property,notNullValue()); - assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_INT)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.INTEGER)); - - property = properties.get(TEST_DATATYPE_PROPERTY_LIST); - assertThat(property,notNullValue()); - assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_LIST)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.LIST)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA),is(TEST_DATATYPE_TEST1)); - - assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST1),notNullValue()); - assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST2),notNullValue()); - assertThat(toscaTemplate.toString(),containsString(TEST_DATATYPE_TOSTRING)); - } - } - } + List inputsWithAnnotations = inputs.stream().filter(i -> i.getAnnotations() != null) + .collect(Collectors.toList()); + assertTrue(!inputs.isEmpty()); + inputsWithAnnotations.stream().forEach(i -> validateAnnotations(i)); + } + + private void validateAnnotations(Input input) { + assertNotNull(input.getAnnotations()); + assertEquals(input.getAnnotations().size(), 1); + Annotation annotation = input.getAnnotations().get("source"); + assertEquals(annotation.getName(), "source"); + assertEquals(annotation.getType().toLowerCase(), "org.openecomp.annotations.source"); + assertNotNull(annotation.getProperties()); + Optional source_type = annotation.getProperties().stream() + .filter(p -> p.getName().equals("source_type")).findFirst(); + assertTrue(source_type.isPresent()); + assertEquals(source_type.get().getValue(), "HEAT"); + } + + private static final String TEST_DATATYPE_FILENAME = "csars/dataTypes-test-service.csar"; + private static final String TEST_DATATYPE_TEST1 = "TestType1"; + private static final String TEST_DATATYPE_TEST2 = "TestType2"; + private static final String TEST_DATATYPE_PROPERTY_STR = "strdata"; + private static final String TEST_DATATYPE_PROPERTY_INT = "intdata"; + private static final String TEST_DATATYPE_PROPERTY_LIST = "listdata"; + private static final String TEST_DATATYPE_PROPERTY_TYPE = "type"; + private static final String TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA = "entry_schema"; + private static final String TEST_DATATYPE_TOSTRING = "data_types="; + + @Test + public void testGetDataType() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_DATATYPE_FILENAME).getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + HashSet dataTypes = toscaTemplate.getDataTypes(); + assertThat(dataTypes, notNullValue()); + assertThat(dataTypes.size(), is(2)); + + for (DataType dataType : dataTypes) { + LinkedHashMap properties; + PropertyDef property; + if (dataType.getType().equals(TEST_DATATYPE_TEST1)) { + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_STR); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_STR)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.STRING)); + } + if (dataType.getType().equals(TEST_DATATYPE_TEST2)) { + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_INT); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_INT)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.INTEGER)); + + property = properties.get(TEST_DATATYPE_PROPERTY_LIST); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA), is(TEST_DATATYPE_TEST1)); + + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST1), notNullValue()); + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST2), notNullValue()); + assertThat(toscaTemplate.toString(), containsString(TEST_DATATYPE_TOSTRING)); + } + } + + } + + @Test + public void testGetInputValidate() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_DATATYPE_FILENAME).getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + HashSet dataTypes = toscaTemplate.getDataTypes(); + assertThat(dataTypes, notNullValue()); + assertThat(dataTypes.size(), is(2)); + + for (DataType dataType : dataTypes) { + LinkedHashMap properties; + PropertyDef property; + if (dataType.getType().equals(TEST_DATATYPE_TEST1)) { + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_STR); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_STR)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.STRING)); + } + if (dataType.getType().equals(TEST_DATATYPE_TEST2)) { + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_INT); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_INT)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.INTEGER)); + + property = properties.get(TEST_DATATYPE_PROPERTY_LIST); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA), is(TEST_DATATYPE_TEST1)); + + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST1), notNullValue()); + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST2), notNullValue()); + assertThat(toscaTemplate.toString(), containsString(TEST_DATATYPE_TOSTRING)); + } + } + } } diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java index f8295d7..3f5290d 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -52,30 +52,30 @@ public class JToscaMetadataParse { File file = new File(fileStr); ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); - assertTrue(validationIssuesCaught == 0 ); + assertTrue(validationIssuesCaught == 0); } - + @Test public void testEmptyCsar() throws JToscaException { String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/emptyCsar.csar").getFile(); File file = new File(fileStr); try { - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); } catch (JToscaException e) { - assertTrue(e.getCode().equals(JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue())); - } + assertTrue(e.getCode().equals(JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue())); + } int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); - assertTrue(validationIssuesCaught == 0 ); + assertTrue(validationIssuesCaught == 0); } - + @Test public void testEmptyPath() throws JToscaException { String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("").getFile(); File file = new File(fileStr); try { - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - }catch (JToscaException e) { - assertTrue(e.getCode().equals(JToscaErrorCodes.PATH_NOT_VALID.getValue())); - } + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } catch (JToscaException e) { + assertTrue(e.getCode().equals(JToscaErrorCodes.PATH_NOT_VALID.getValue())); + } } } diff --git a/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java b/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java index eaf182e..fd84d6e 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java b/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java index 271eb59..d65de28 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,45 +31,45 @@ import static org.junit.Assert.assertEquals; public class EntityTypeTest { - private static final Map origMap = EntityType.TOSCA_DEF; + private static final Map origMap = EntityType.TOSCA_DEF; - @Test - public void testUpdateDefinitions() throws Exception { + @Test + public void testUpdateDefinitions() throws Exception { - Map testData = new HashMap<>(); - testData.put("tosca.nodes.nfv.VNF", "{derived_from=tosca.nodes.Root, properties={id={type=string, description=ID of this VNF}, vendor={type=string, description=name of the vendor who generate this VNF}, version={type=version, description=version of the software for this VNF}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}]}"); - testData.put("tosca.nodes.nfv.VDU", "{derived_from=tosca.nodes.Compute, capabilities={high_availability={type=tosca.capabilities.nfv.HA}, virtualbinding={type=tosca.capabilities.nfv.VirtualBindable}, monitoring_parameter={type=tosca.capabilities.nfv.Metric}}, requirements=[{high_availability={capability=tosca.capabilities.nfv.HA, relationship=tosca.relationships.nfv.HA, node=tosca.nodes.nfv.VDU, occurrences=[0, 1]}}]}"); - testData.put("tosca.nodes.nfv.CP", "{derived_from=tosca.nodes.network.Port, properties={type={type=string, required=false}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}, {virtualBinding={capability=tosca.capabilities.nfv.VirtualBindable, relationship=tosca.relationships.nfv.VirtualBindsTo, node=tosca.nodes.nfv.VDU}}], attributes={address={type=string}}}"); - testData.put("tosca.nodes.nfv.VL", "{derived_from=tosca.nodes.network.Network, properties={vendor={type=string, required=true, description=name of the vendor who generate this VL}}, capabilities={virtual_linkable={type=tosca.capabilities.nfv.VirtualLinkable}}}"); - testData.put("tosca.nodes.nfv.VL.ELine", "{derived_from=tosca.nodes.nfv.VL, capabilities={virtual_linkable={occurrences=2}}}"); - testData.put("tosca.nodes.nfv.VL.ELAN", "{derived_from=tosca.nodes.nfv.VL}"); - testData.put("tosca.nodes.nfv.VL.ETree", "{derived_from=tosca.nodes.nfv.VL}"); - testData.put("tosca.nodes.nfv.FP", "{derived_from=tosca.nodes.Root, properties={policy={type=string, required=false, description=name of the vendor who generate this VL}}, requirements=[{forwarder={capability=tosca.capabilities.nfv.Forwarder, relationship=tosca.relationships.nfv.ForwardsTo}}]}"); - testData.put("tosca.groups.nfv.VNFFG", "{derived_from=tosca.groups.Root, properties={vendor={type=string, required=true, description=name of the vendor who generate this VNFFG}, version={type=string, required=true, description=version of this VNFFG}, number_of_endpoints={type=integer, required=true, description=count of the external endpoints included in this VNFFG}, dependent_virtual_link={type=list, entry_schema={type=string}, required=true, description=Reference to a VLD used in this Forwarding Graph}, connection_point={type=list, entry_schema={type=string}, required=true, description=Reference to Connection Points forming the VNFFG}, constituent_vnfs={type=list, entry_schema={type=string}, required=true, description=Reference to a list of VNFD used in this VNF Forwarding Graph}}}"); - testData.put("tosca.relationships.nfv.VirtualLinksTo", "{derived_from=tosca.relationships.network.LinksTo, valid_target_types=[tosca.capabilities.nfv.VirtualLinkable]}"); - testData.put("tosca.relationships.nfv.VirtualBindsTo", "{derived_from=tosca.relationships.network.BindsTo, valid_target_types=[tosca.capabilities.nfv.VirtualBindable]}"); - testData.put("tosca.relationships.nfv.HA", "{derived_from=tosca.relationships.Root, valid_target_types=[tosca.capabilities.nfv.HA]}"); - testData.put("tosca.relationships.nfv.Monitor", "{derived_from=tosca.relationships.ConnectsTo, valid_target_types=[tosca.capabilities.nfv.Metric]}"); - testData.put("tosca.relationships.nfv.ForwardsTo", "{derived_from=tosca.relationships.root, valid_target_types=[tosca.capabilities.nfv.Forwarder]}"); - testData.put("tosca.capabilities.nfv.VirtualLinkable", "{derived_from=tosca.capabilities.network.Linkable}"); - testData.put("tosca.capabilities.nfv.VirtualBindable", "{derived_from=tosca.capabilities.network.Bindable}"); - testData.put("tosca.capabilities.nfv.HA", "{derived_from=tosca.capabilities.Root, valid_source_types=[tosca.nodes.nfv.VDU]}"); - testData.put("tosca.capabilities.nfv.HA.ActiveActive", "{derived_from=tosca.capabilities.nfv.HA}"); - testData.put("tosca.capabilities.nfv.HA.ActivePassive", "{derived_from=tosca.capabilities.nfv.HA}"); - testData.put("tosca.capabilities.nfv.Metric", "{derived_from=tosca.capabilities.Root}"); - testData.put("tosca.capabilities.nfv.Forwarder", "{derived_from=tosca.capabilities.Root}"); + Map testData = new HashMap<>(); + testData.put("tosca.nodes.nfv.VNF", "{derived_from=tosca.nodes.Root, properties={id={type=string, description=ID of this VNF}, vendor={type=string, description=name of the vendor who generate this VNF}, version={type=version, description=version of the software for this VNF}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}]}"); + testData.put("tosca.nodes.nfv.VDU", "{derived_from=tosca.nodes.Compute, capabilities={high_availability={type=tosca.capabilities.nfv.HA}, virtualbinding={type=tosca.capabilities.nfv.VirtualBindable}, monitoring_parameter={type=tosca.capabilities.nfv.Metric}}, requirements=[{high_availability={capability=tosca.capabilities.nfv.HA, relationship=tosca.relationships.nfv.HA, node=tosca.nodes.nfv.VDU, occurrences=[0, 1]}}]}"); + testData.put("tosca.nodes.nfv.CP", "{derived_from=tosca.nodes.network.Port, properties={type={type=string, required=false}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}, {virtualBinding={capability=tosca.capabilities.nfv.VirtualBindable, relationship=tosca.relationships.nfv.VirtualBindsTo, node=tosca.nodes.nfv.VDU}}], attributes={address={type=string}}}"); + testData.put("tosca.nodes.nfv.VL", "{derived_from=tosca.nodes.network.Network, properties={vendor={type=string, required=true, description=name of the vendor who generate this VL}}, capabilities={virtual_linkable={type=tosca.capabilities.nfv.VirtualLinkable}}}"); + testData.put("tosca.nodes.nfv.VL.ELine", "{derived_from=tosca.nodes.nfv.VL, capabilities={virtual_linkable={occurrences=2}}}"); + testData.put("tosca.nodes.nfv.VL.ELAN", "{derived_from=tosca.nodes.nfv.VL}"); + testData.put("tosca.nodes.nfv.VL.ETree", "{derived_from=tosca.nodes.nfv.VL}"); + testData.put("tosca.nodes.nfv.FP", "{derived_from=tosca.nodes.Root, properties={policy={type=string, required=false, description=name of the vendor who generate this VL}}, requirements=[{forwarder={capability=tosca.capabilities.nfv.Forwarder, relationship=tosca.relationships.nfv.ForwardsTo}}]}"); + testData.put("tosca.groups.nfv.VNFFG", "{derived_from=tosca.groups.Root, properties={vendor={type=string, required=true, description=name of the vendor who generate this VNFFG}, version={type=string, required=true, description=version of this VNFFG}, number_of_endpoints={type=integer, required=true, description=count of the external endpoints included in this VNFFG}, dependent_virtual_link={type=list, entry_schema={type=string}, required=true, description=Reference to a VLD used in this Forwarding Graph}, connection_point={type=list, entry_schema={type=string}, required=true, description=Reference to Connection Points forming the VNFFG}, constituent_vnfs={type=list, entry_schema={type=string}, required=true, description=Reference to a list of VNFD used in this VNF Forwarding Graph}}}"); + testData.put("tosca.relationships.nfv.VirtualLinksTo", "{derived_from=tosca.relationships.network.LinksTo, valid_target_types=[tosca.capabilities.nfv.VirtualLinkable]}"); + testData.put("tosca.relationships.nfv.VirtualBindsTo", "{derived_from=tosca.relationships.network.BindsTo, valid_target_types=[tosca.capabilities.nfv.VirtualBindable]}"); + testData.put("tosca.relationships.nfv.HA", "{derived_from=tosca.relationships.Root, valid_target_types=[tosca.capabilities.nfv.HA]}"); + testData.put("tosca.relationships.nfv.Monitor", "{derived_from=tosca.relationships.ConnectsTo, valid_target_types=[tosca.capabilities.nfv.Metric]}"); + testData.put("tosca.relationships.nfv.ForwardsTo", "{derived_from=tosca.relationships.root, valid_target_types=[tosca.capabilities.nfv.Forwarder]}"); + testData.put("tosca.capabilities.nfv.VirtualLinkable", "{derived_from=tosca.capabilities.network.Linkable}"); + testData.put("tosca.capabilities.nfv.VirtualBindable", "{derived_from=tosca.capabilities.network.Bindable}"); + testData.put("tosca.capabilities.nfv.HA", "{derived_from=tosca.capabilities.Root, valid_source_types=[tosca.nodes.nfv.VDU]}"); + testData.put("tosca.capabilities.nfv.HA.ActiveActive", "{derived_from=tosca.capabilities.nfv.HA}"); + testData.put("tosca.capabilities.nfv.HA.ActivePassive", "{derived_from=tosca.capabilities.nfv.HA}"); + testData.put("tosca.capabilities.nfv.Metric", "{derived_from=tosca.capabilities.Root}"); + testData.put("tosca.capabilities.nfv.Forwarder", "{derived_from=tosca.capabilities.Root}"); - Map expectedDefMap = origMap; - expectedDefMap.putAll(testData); - EntityType.updateDefinitions("tosca_simple_profile_for_nfv_1_0_0"); + Map expectedDefMap = origMap; + expectedDefMap.putAll(testData); + EntityType.updateDefinitions("tosca_simple_profile_for_nfv_1_0_0"); - assertEquals(expectedDefMap, EntityType.TOSCA_DEF); + assertEquals(expectedDefMap, EntityType.TOSCA_DEF); - } + } - @After - public void tearDown() throws Exception { - EntityType.TOSCA_DEF = (LinkedHashMap) origMap; - } + @After + public void tearDown() throws Exception { + EntityType.TOSCA_DEF = (LinkedHashMap) origMap; + } } diff --git a/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java b/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java index 577fb17..98e5102 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java @@ -5,9 +5,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -41,10 +41,10 @@ public class GetInputTest { private static final String TEST_PROPERTY_LONGITUDE = "longitude"; private static final String TEST_DEFAULT_VALUE = "dsvpn-hub"; private static final String TEST_DESCRIPTION_VALUE = "This is used for SDWAN only"; - private static final String TEST_INPUT_TYPE="type"; - private static final String TEST_INPUT_SCHEMA_TYPE="tosca.datatypes.siteresource.site"; + private static final String TEST_INPUT_TYPE = "type"; + private static final String TEST_INPUT_SCHEMA_TYPE = "tosca.datatypes.siteresource.site"; private static final String TEST_TOSTRING = "get_input:[sites, 1, longitude]"; - private static final String TEST_INPUT_SITES= "sites"; + private static final String TEST_INPUT_SITES = "sites"; @Test public void validate() throws JToscaException { @@ -54,33 +54,33 @@ public class GetInputTest { NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(1).getSubMappingToscaTemplate().getNodeTemplates().get(0); ArrayList inputs = toscaTemplate.getNodeTemplates().get(1).getSubMappingToscaTemplate().getInputs(); LinkedHashMap properties = nodeTemplate.getProperties(); - assertThat(properties,notNullValue()); - assertThat(properties.size(),is(14)); + assertThat(properties, notNullValue()); + assertThat(properties.size(), is(14)); Property property = properties.get(TEST_PROPERTY_ROLE); - assertThat(properties,notNullValue()); - assertThat(property.getName(),is(TEST_PROPERTY_ROLE)); - assertThat(property.getType(),is(Schema.STRING)); - assertThat(property.getDefault(),is(TEST_DEFAULT_VALUE)); - assertThat(property.getDescription(),is(TEST_DESCRIPTION_VALUE)); - GetInput getInput= (GetInput)property.getValue(); - assertThat(getInput.getEntrySchema().get(TEST_INPUT_TYPE).toString(),is(TEST_INPUT_SCHEMA_TYPE)); + assertThat(properties, notNullValue()); + assertThat(property.getName(), is(TEST_PROPERTY_ROLE)); + assertThat(property.getType(), is(Schema.STRING)); + assertThat(property.getDefault(), is(TEST_DEFAULT_VALUE)); + assertThat(property.getDescription(), is(TEST_DESCRIPTION_VALUE)); + GetInput getInput = (GetInput) property.getValue(); + assertThat(getInput.getEntrySchema().get(TEST_INPUT_TYPE).toString(), is(TEST_INPUT_SCHEMA_TYPE)); property = properties.get(TEST_PROPERTY_LONGITUDE); - assertThat(properties,notNullValue()); + assertThat(properties, notNullValue()); assertThat(property.getName(), is(TEST_PROPERTY_LONGITUDE)); - assertThat(property.getValue().toString(),is(TEST_TOSTRING)); - getInput= (GetInput)property.getValue(); + assertThat(property.getValue().toString(), is(TEST_TOSTRING)); + getInput = (GetInput) property.getValue(); ArrayList getInputArguments = getInput.getArguments(); - assertThat(getInputArguments.size(),is(3)); + assertThat(getInputArguments.size(), is(3)); assertThat(getInputArguments.get(0).toString(), is(TEST_INPUT_SITES)); assertThat(getInputArguments.get(1).toString(), is("1")); assertThat(getInputArguments.get(2).toString(), is(TEST_PROPERTY_LONGITUDE)); Input in = inputs.get(10); assertThat(in.getEntrySchema().get(TEST_INPUT_TYPE), is(TEST_INPUT_SCHEMA_TYPE)); - assertThat(in.getName(),is(TEST_INPUT_SITES)); - assertThat(in.getType(),is(Input.LIST)); + assertThat(in.getName(), is(TEST_INPUT_SITES)); + assertThat(in.getType(), is(Input.LIST)); } @Test @@ -88,9 +88,9 @@ public class GetInputTest { //invalid file String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_FILENAME_NG).getFile(); File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null,false); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null, false); List issues = ThreadLocalsHolder.getCollector().getValidationIssueReport(); assertTrue(issues.stream().anyMatch(x -> x.contains("JE282"))); } - } +} -- 2.16.6