1 package org.onap.sdc.dcae.checker;
3 import java.lang.reflect.Method;
4 import java.lang.reflect.InvocationTargetException;
8 import java.io.IOException;
11 import java.net.URISyntaxException;
13 import java.util.HashMap;
14 import java.util.TreeMap;
15 import java.util.Iterator;
16 import java.util.ListIterator;
18 import java.util.List;
19 import java.util.LinkedList;
20 import java.util.ArrayList;
21 import java.util.Arrays;
23 import java.util.Collection;
24 import java.util.Collections;
25 import java.util.regex.Pattern;
26 import java.util.regex.Matcher;
27 import java.util.stream.Collectors;
29 import org.onap.sdc.common.onaplog.OnapLoggerDebug;
30 import org.onap.sdc.common.onaplog.OnapLoggerError;
31 import org.onap.sdc.common.onaplog.Enums.LogLevel;
32 import org.yaml.snakeyaml.Yaml;
34 import com.google.common.collect.Maps;
35 import com.google.common.collect.MapDifference;
36 import com.google.common.reflect.Invokable;
38 import com.google.common.collect.Table;
39 import com.google.common.collect.HashBasedTable;
41 import kwalify.Validator;
44 import kwalify.ValidationException;
45 import kwalify.SchemaException;
47 import org.apache.commons.jxpath.JXPathContext;
48 import org.apache.commons.jxpath.JXPathException;
49 import org.apache.commons.lang.reflect.ConstructorUtils;
50 import org.onap.sdc.dcae.checker.annotations.Catalogs;
51 import org.onap.sdc.dcae.checker.annotations.Checks;
52 import org.reflections.Reflections;
53 import org.reflections.util.FilterBuilder;
54 import org.reflections.util.ConfigurationBuilder;
55 import org.reflections.scanners.TypeAnnotationsScanner;
56 import org.reflections.scanners.SubTypesScanner;
57 import org.reflections.scanners.MethodAnnotationsScanner;
60 * To consider: model consistency checking happens now along with validation
61 * (is implemented as part of the validation hooks). It might be better to
62 * separate the 2 stages and perform all the consistency checking once
63 * validation is completed.
65 public class Checker {
66 private static final String PROPERTIES = "properties";
67 private static final String DEFAULT = "default";
68 private static final String ATTRIBUTES = "attributes";
69 private static final String DATA_TYPES = "data_types";
70 private static final String CAPABILITY_TYPES = "capability_types";
71 private static final String VALID_SOURCE_TYPES = "valid_source_types";
72 private static final String RELATIONSHIP_TYPES = "relationship_types";
73 private static final String INTERFACES = "interfaces";
74 private static final String VALID_TARGET_TYPES = "valid_target_types";
75 private static final String ARTIFACT_TYPES = "artifact_types";
76 private static final String INTERFACE_TYPES = "interface_types";
77 private static final String NODE_TYPES = "node_types";
78 private static final String REQUIREMENTS = "requirements";
79 private static final String CAPABILITIES = "capabilities";
80 private static final String GROUP_TYPES = "group_types";
81 private static final String TARGETS_CONSTANT = "targets";
82 private static final String POLICY_TYPES = "policy_types";
83 private static final String IS_NONE_OF_THOSE = "' is none of those";
84 private static final String INPUTS = "inputs";
85 private static final String CAPABILITY = "capability";
86 private static final String ARTIFACTS = "artifacts";
87 private static final String WAS_DEFINED_FOR_THE_NODE_TYPE = " was defined for the node type ";
88 private static final String UNKNOWN = "Unknown ";
89 private static final String TYPE = " type ";
91 private Target target = null; //what we're validating at the moment
93 private Map<String, Target> grammars = new HashMap<>(); //grammars for the different tosca versions
95 private Catalog catalog;
96 private TargetLocator locator = new CommonLocator();
98 private Table<String, Method, Object> checks = HashBasedTable.create();
99 private Table<String, Method, Object> catalogs = HashBasedTable.create();
101 private static OnapLoggerError errLogger = OnapLoggerError.getInstance();
102 private static OnapLoggerDebug debugLogger = OnapLoggerDebug.getInstance();
104 private static Catalog commonsCatalogInstance = null;
106 private static final String[] EMPTY_STRING_ARRAY = new String[0];
108 /* Need a proper way to indicate where the grammars are and how they should be identified */
109 private static final String[] grammarFiles = new String[]{"tosca/tosca_simple_yaml_1_0.grammar",
110 "tosca/tosca_simple_yaml_1_1.grammar"};
112 private Pattern spacePattern = Pattern.compile("\\s");
114 private Pattern indexPattern = Pattern.compile("/\\p{Digit}+");
116 //this is getting silly ..
117 private static Class[][] checkHookArgTypes =
119 new Class[]{Map.class, CheckContext.class},
120 new Class[]{List.class, CheckContext.class}};
122 private static Class[] validationHookArgTypes =
123 new Class[]{Object.class, Rule.class, Validator.ValidationContext.class};
125 public Checker() throws CheckerException {
130 public static void main(String[] theArgs) {
131 if (theArgs.length == 0) {
132 errLogger.log(LogLevel.ERROR, Checker.class.getName(), "checker resource_to_validate [processor]*");
137 Catalog cat = Checker.check(new File(theArgs[0]));
139 for (Target t : cat.targets()) {
140 errLogger.log(LogLevel.ERROR, Checker.class.getName(), "{}\n{}\n{}", t.getLocation(), cat.importString(t), t.getReport());
143 for (Target t : cat.sortedTargets()) {
144 errLogger.log(LogLevel.ERROR, Checker.class.getName(), t.toString());
147 } catch (Exception x) {
148 errLogger.log(LogLevel.ERROR, Checker.class.getName(),"Exception {}", x);
152 private void loadGrammars() throws CheckerException {
154 for (String grammarFile : grammarFiles) {
155 Target grammarTarget = this.locator.resolve(grammarFile);
156 if (grammarTarget == null) {
157 errLogger.log(LogLevel.WARN, this.getClass().getName(), "Failed to locate grammar {}", grammarFile);
161 parseTarget(grammarTarget);
162 if (grammarTarget.getReport().hasErrors()) {
163 errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid grammar {}: {}", grammarFile, grammarTarget.getReport().toString());
167 List versions = null;
172 ((Map) grammarTarget.getTarget())
174 .get("tosca_definitions_version"))
176 } catch (Exception x) {
177 errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid grammar {}: cannot locate tosca_definitions_versions. Exception{}", grammarFile, x);
179 if (versions == null || versions.isEmpty()) {
180 errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invalid grammar {}: no tosca_definitions_versions specified", grammarFile);
184 for (Object version : versions) {
185 this.grammars.put(version.toString(), grammarTarget);
189 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Loaded grammars: {}", this.grammars);
192 private void loadAnnotations() {
193 Reflections reflections = new Reflections(
194 new ConfigurationBuilder()
195 .forPackages("org.onap.sdc.dcae")
196 .filterInputsBy(new FilterBuilder()
197 .include(".*\\.class")
199 .setScanners(new TypeAnnotationsScanner(),
200 new SubTypesScanner(),
201 new MethodAnnotationsScanner())
202 .setExpandSuperTypes(false)
205 Map<Class, Object> handlers = new HashMap<>();
207 Set<Method> checkHandlers = reflections.getMethodsAnnotatedWith(Checks.class);
208 for (Method checkHandler : checkHandlers) {
209 checks.put(checkHandler.getAnnotation(Checks.class).path(),
211 handlers.computeIfAbsent(checkHandler.getDeclaringClass(),
214 return (getClass() == type) ? this
215 : type.newInstance();
216 } catch (Exception x) {
217 throw new RuntimeException(x);
222 Set<Method> catalogHandlers = reflections.getMethodsAnnotatedWith(Catalogs.class);
223 for (Method catalogHandler : catalogHandlers) {
224 catalogs.put(catalogHandler.getAnnotation(Catalogs.class).path(),
226 handlers.computeIfAbsent(catalogHandler.getDeclaringClass(),
229 return (getClass() == type) ? this
230 : type.newInstance();
231 } catch (Exception x) {
232 throw new RuntimeException(x);
239 public void setTargetLocator(TargetLocator theLocator) {
240 this.locator = theLocator;
243 public Collection<Target> targets() {
244 if (this.catalog == null) {
245 throw new IllegalStateException("targets are only available after check");
248 return this.catalog.targets();
251 public Catalog catalog() {
255 public void process(Processor theProcessor) {
257 theProcessor.process(this.catalog);
260 /* a facility for handling all files in a target directory .. */
261 public static Catalog check(File theSource)
262 throws CheckerException {
264 Catalog catalog = new Catalog(commonsCatalog());
265 Checker checker = new Checker();
267 if (theSource.isDirectory()) {
268 for (File f : theSource.listFiles()) {
270 checker.check(new Target(theSource.getCanonicalPath(), f.toURI().normalize()), catalog);
274 checker.check(new Target(theSource.getCanonicalPath(), theSource.toURI().normalize()), catalog);
276 } catch (IOException iox) {
277 throw new CheckerException("Failed to initialize target", iox);
283 public void check(String theSource)
284 throws CheckerException {
285 check(theSource, buildCatalog());
288 public void check(String theSource, Catalog theCatalog)
289 throws CheckerException {
291 this.locator.resolve(theSource);
293 throw new CheckerException("Unable to locate the target " + theSource);
296 check(tgt, theCatalog);
299 public void check(Target theTarget) throws CheckerException {
300 check(theTarget, buildCatalog());
303 public void check(Target theTarget, Catalog theCatalog) throws CheckerException {
305 this.catalog = theCatalog;
306 this.locator.addSearchPath(theTarget.getLocation());
308 if (this.catalog.addTarget(theTarget, null)) {
309 List<Target> targets = parseTarget(theTarget);
310 if (theTarget.getReport().hasErrors()) {
313 for (Target targetItr : targets) {
314 this.catalog.addTarget(targetItr, null);
315 if (!validateTarget(targetItr).getReport().hasErrors()) {
316 checkTarget(targetItr);
322 public void validate(Target theTarget) throws CheckerException {
323 validate(theTarget, buildCatalog());
326 public void validate(Target theTarget, Catalog theCatalog) throws CheckerException {
327 this.catalog = theCatalog;
328 this.locator.addSearchPath(theTarget.getLocation());
330 if (this.catalog.addTarget(theTarget, null)) {
331 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "@validateTarget");
332 if (!validateTarget(theTarget).getReport().hasErrors()) {
333 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "@checkTarget");
334 checkTarget(theTarget);
339 private List<Target> parseTarget(final Target theTarget)
340 throws CheckerException {
341 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "parseTarget {}", theTarget);
343 Reader source = null;
345 source = theTarget.open();
346 } catch (IOException iox) {
347 throw new CheckerException("Failed to open target " + theTarget, iox);
351 ArrayList<Object> yamlRoots = new ArrayList<>();
353 Yaml yaml = new Yaml();
354 for (Object yamlRoot : yaml.loadAll(source)) {
355 yamlRoots.add(yamlRoot);
359 } catch (Exception x) {
361 return Collections.emptyList();
365 } catch (IOException iox) {
366 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "Exception {}", iox);
370 ArrayList targets = new ArrayList(yamlRoots.size());
371 if (yamlRoots.size() == 1) {
372 //he target turned out to be a bare document
373 theTarget.setTarget(yamlRoots.get(0));
374 targets.add(theTarget);
376 //the target turned out to be a stream containing multiple documents
377 for (int i = 0; i < yamlRoots.size(); i++) {
379 !!We're changing the target below, i.e. we're changing the target implementation hence caching implementation will suffer!!
381 Target newTarget = new Target(theTarget.getName(),
382 fragmentTargetURI(theTarget.getLocation(), String.valueOf(i)));
383 newTarget.setTarget(yamlRoots.get(i));
384 targets.add(newTarget);
388 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), " exiting parseTarget {}", theTarget);
392 private URI fragmentTargetURI(URI theRoot, String theFragment) {
394 return new URI(theRoot.getScheme(),
395 theRoot.getSchemeSpecificPart(),
397 } catch (URISyntaxException urisx) {
398 throw new RuntimeException(urisx);
402 private Target validateTarget(Target theTarget)
403 throws CheckerException {
404 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "entering validateTarget {}", theTarget);
406 String version = (String)
407 ((Map) theTarget.getTarget())
408 .get("tosca_definitions_version");
409 if (version == null) {
410 throw new CheckerException("Target " + theTarget + " does not specify a tosca_definitions_version");
413 Target grammar = this.grammars.get(version);
414 if (grammar == null) {
415 throw new CheckerException("Target " + theTarget + " specifies unknown tosca_definitions_version " + version);
418 TOSCAValidator validator = null;
420 validator = new TOSCAValidator(theTarget, grammar.getTarget());
421 } catch (SchemaException sx) {
422 throw new CheckerException("Grammar error at: " + sx.getPath(), sx);
425 theTarget.getReport().addAll(
426 validator.validate(theTarget.getTarget()));
428 if (!theTarget.getReport().hasErrors()) {
429 applyCanonicals(theTarget.getTarget(), validator.canonicals);
432 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), " exiting validateTarget {}", theTarget);
436 private Target checkTarget(Target theTarget) {
438 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "entering checkTarget {}", theTarget);
440 CheckContext ctx = new CheckContext(theTarget);
442 checkServiceTemplateDefinition(
443 (Map<String, Object>) theTarget.getTarget(), ctx);
445 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "exiting checkTarget {}", theTarget);
449 public void checkProperties(
450 Map<String, Map> theDefinitions, CheckContext theContext) {
451 theContext.enter(PROPERTIES);
453 if (!checkDefinition(PROPERTIES, theDefinitions, theContext)) {
457 for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) {
458 Map.Entry<String, Map> e = i.next();
459 checkPropertyDefinition(e.getKey(), e.getValue(), theContext);
466 private void checkPropertyDefinition(
467 String theName, Map theDefinition, CheckContext theContext) {
468 theContext.enter(theName);
469 if (!checkDefinition(theName, theDefinition, theContext)) {
473 if (!checkDataType(theDefinition, theContext)) {
476 //check default value is compatible with type
477 Object defaultValue = theDefinition.get(DEFAULT);
478 if (defaultValue != null) {
479 checkDataValuation(defaultValue, theDefinition, theContext);
485 private void checkAttributes(
486 Map<String, Map> theDefinitions, CheckContext theContext) {
487 theContext.enter(ATTRIBUTES);
489 if (!checkDefinition(ATTRIBUTES, theDefinitions, theContext)) {
493 for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) {
494 Map.Entry<String, Map> e = i.next();
495 checkAttributeDefinition(e.getKey(), e.getValue(), theContext);
502 private void checkAttributeDefinition(
503 String theName, Map theDefinition, CheckContext theContext) {
504 theContext.enter(theName);
506 if (!checkDefinition(theName, theDefinition, theContext)) {
509 if (!checkDataType(theDefinition, theContext)) {
517 /* top level rule, we collected the whole information set.
518 * this is where checking starts
520 private void checkServiceTemplateDefinition(
521 Map<String, Object> theDef, CheckContext theContext) {
522 theContext.enter("");
524 if (theDef == null) {
525 theContext.addError("Empty template", null);
529 //!!! imports need to be processed first now that catalogging takes place at check time!!
531 //first catalog whatever it is there to be cataloged so that the checks can perform cross-checking
532 for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator();
534 Map.Entry<String, Object> e = ri.next();
535 catalogs(e.getKey(), e.getValue(), theContext);
538 for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator();
540 Map.Entry<String, Object> e = ri.next();
541 checks(e.getKey(), e.getValue(), theContext);
546 @Catalogs(path = "/data_types")
547 protected void catalog_data_types(
548 Map<String, Map> theDefinitions, CheckContext theContext) {
549 theContext.enter(DATA_TYPES);
551 catalogTypes(Construct.Data, theDefinitions, theContext);
557 @Checks(path = "/data_types")
558 protected void check_data_types(
559 Map<String, Map> theDefinitions, CheckContext theContext) {
560 theContext.enter(DATA_TYPES);
563 if (!checkDefinition(DATA_TYPES, theDefinitions, theContext)) {
567 for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) {
568 Map.Entry<String, Map> e = i.next();
569 checkDataTypeDefinition(e.getKey(), e.getValue(), theContext);
576 private void checkDataTypeDefinition(String theName,
578 CheckContext theContext) {
579 theContext.enter(theName, Construct.Data);
581 if (!checkDefinition(theName, theDefinition, theContext)) {
585 if (theDefinition.containsKey(PROPERTIES)) {
587 (Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
588 checkTypeConstructFacet(Construct.Data, theName, theDefinition,
589 Facet.properties, theContext);
596 @Catalogs(path = "/capability_types")
597 protected void catalog_capability_types(
598 Map<String, Map> theDefinitions, CheckContext theContext) {
599 theContext.enter(CAPABILITY_TYPES);
601 catalogTypes(Construct.Capability, theDefinitions, theContext);
608 @Checks(path = "/capability_types")
609 protected void check_capability_types(
610 Map<String, Map> theTypes, CheckContext theContext) {
611 theContext.enter(CAPABILITY_TYPES);
613 if (!checkDefinition(CAPABILITY_TYPES, theTypes, theContext)) {
617 for (Iterator<Map.Entry<String, Map>> i = theTypes.entrySet().iterator(); i.hasNext(); ) {
618 Map.Entry<String, Map> e = i.next();
619 checkCapabilityTypeDefinition(e.getKey(), e.getValue(), theContext);
626 private void checkCapabilityTypeDefinition(String theName,
628 CheckContext theContext) {
629 theContext.enter(theName, Construct.Capability);
632 if (!checkDefinition(theName, theDefinition, theContext)) {
636 if (theDefinition.containsKey(PROPERTIES)) {
638 (Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
639 checkTypeConstructFacet(Construct.Capability, theName, theDefinition,
640 Facet.properties, theContext);
643 if (theDefinition.containsKey(ATTRIBUTES)) {
645 (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext);
646 checkTypeConstructFacet(Construct.Capability, theName, theDefinition,
647 Facet.attributes, theContext);
650 //valid_source_types: see capability_type_definition
651 //unclear: how is the valid_source_types list definition eveolving across
652 //the type hierarchy: additive, overwriting, ??
653 if (theDefinition.containsKey(VALID_SOURCE_TYPES)) {
654 checkTypeReference(Construct.Node, theContext,
655 ((List<String>) theDefinition.get(VALID_SOURCE_TYPES)).toArray(EMPTY_STRING_ARRAY));
662 @Catalogs(path = "/relationship_types")
663 protected void catalog_relationship_types(
664 Map<String, Map> theDefinitions, CheckContext theContext) {
665 theContext.enter(RELATIONSHIP_TYPES);
667 catalogTypes(Construct.Relationship, theDefinitions, theContext);
674 @Checks(path = "/relationship_types")
675 protected void check_relationship_types(
676 Map<String, Map> theDefinition, CheckContext theContext) {
677 theContext.enter(RELATIONSHIP_TYPES);
679 if (!checkDefinition(RELATIONSHIP_TYPES, theDefinition, theContext)) {
683 for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
684 Map.Entry<String, Map> e = i.next();
685 checkRelationshipTypeDefinition(e.getKey(), e.getValue(), theContext);
692 private void checkRelationshipTypeDefinition(String theName,
694 CheckContext theContext) {
695 theContext.enter(theName, Construct.Relationship);
697 if (!checkDefinition(theName, theDefinition, theContext)) {
701 if (theDefinition.containsKey(PROPERTIES)) {
703 (Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
704 checkTypeConstructFacet(Construct.Relationship, theName, theDefinition,
705 Facet.properties, theContext);
708 if (theDefinition.containsKey(ATTRIBUTES)) {
710 (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext);
711 checkTypeConstructFacet(Construct.Relationship, theName, theDefinition,
712 Facet.attributes, theContext);
715 Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get(INTERFACES);
716 if (interfaces != null) {
717 theContext.enter(INTERFACES);
718 for (Iterator<Map.Entry<String, Map>> i =
719 interfaces.entrySet().iterator(); i.hasNext(); ) {
720 Map.Entry<String, Map> e = i.next();
721 check_type_interface_definition(
722 e.getKey(), e.getValue(), theContext);
727 if (theDefinition.containsKey(VALID_TARGET_TYPES)) {
728 checkTypeReference(Construct.Capability, theContext,
729 ((List<String>) theDefinition.get(VALID_TARGET_TYPES)).toArray(EMPTY_STRING_ARRAY));
736 @Catalogs(path = "/artifact_types")
737 protected void catalog_artifact_types(
738 Map<String, Map> theDefinitions, CheckContext theContext) {
739 theContext.enter(ARTIFACT_TYPES);
741 catalogTypes(Construct.Artifact, theDefinitions, theContext);
748 @Checks(path = "/artifact_types")
749 protected void check_artifact_types(
750 Map<String, Map> theDefinition, CheckContext theContext) {
751 theContext.enter(ARTIFACT_TYPES);
753 if (!checkDefinition(ARTIFACT_TYPES, theDefinition, theContext)) {
757 for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
758 Map.Entry<String, Map> e = i.next();
759 checkArtifactTypeDefinition(e.getKey(), e.getValue(), theContext);
766 private void checkArtifactTypeDefinition(String theName,
768 CheckContext theContext) {
769 theContext.enter(theName, Construct.Artifact);
771 checkDefinition(theName, theDefinition, theContext);
777 @Catalogs(path = "/interface_types")
778 protected void catalog_interface_types(
779 Map<String, Map> theDefinitions, CheckContext theContext) {
780 theContext.enter(INTERFACE_TYPES);
782 catalogTypes(Construct.Interface, theDefinitions, theContext);
788 @Checks(path = "/interface_types")
789 protected void check_interface_types(
790 Map<String, Map> theDefinition, CheckContext theContext) {
791 theContext.enter(INTERFACE_TYPES);
793 if (!checkDefinition(INTERFACE_TYPES, theDefinition, theContext)) {
797 for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
798 Map.Entry<String, Map> e = i.next();
799 checkInterfaceTypeDefinition(e.getKey(), e.getValue(), theContext);
806 private void checkInterfaceTypeDefinition(String theName,
808 CheckContext theContext) {
809 theContext.enter(theName, Construct.Interface);
811 checkDefinition(theName, theDefinition, theContext);
817 @Catalogs(path = "/node_types")
818 protected void catalog_node_types(
819 Map<String, Map> theDefinitions, CheckContext theContext) {
820 theContext.enter(NODE_TYPES);
822 catalogTypes(Construct.Node, theDefinitions, theContext);
829 @Checks(path = "/node_types")
830 protected void check_node_types(
831 Map<String, Map> theDefinition, CheckContext theContext) {
832 theContext.enter(NODE_TYPES);
834 if (!checkDefinition(NODE_TYPES, theDefinition, theContext)) {
838 for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
839 Map.Entry<String, Map> e = i.next();
840 checkNodeTypeDefinition(e.getKey(), e.getValue(), theContext);
847 private void checkNodeTypeDefinition(String theName,
849 CheckContext theContext) {
850 theContext.enter(theName, Construct.Node);
853 if (!checkDefinition(theName, theDefinition, theContext)) {
857 if (theDefinition.containsKey(PROPERTIES)) {
859 (Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
860 checkTypeConstructFacet(Construct.Node, theName, theDefinition,
861 Facet.properties, theContext);
864 if (theDefinition.containsKey(ATTRIBUTES)) {
866 (Map<String, Map>) theDefinition.get(ATTRIBUTES), theContext);
867 checkTypeConstructFacet(Construct.Node, theName, theDefinition,
868 Facet.attributes, theContext);
872 if (theDefinition.containsKey(REQUIREMENTS)) {
874 (List<Map>) theDefinition.get(REQUIREMENTS), theContext);
878 if (theDefinition.containsKey(CAPABILITIES)) {
880 (Map<String, Map>) theDefinition.get(CAPABILITIES), theContext);
884 Map<String, Map> interfaces =
885 (Map<String, Map>) theDefinition.get(INTERFACES);
886 checkMapTypeInterfaceDefinition(theContext, interfaces);
892 private void checkMapTypeInterfaceDefinition(CheckContext theContext, Map<String, Map> interfaces) {
893 if (interfaces != null) {
895 theContext.enter(INTERFACES);
896 for (Iterator<Map.Entry<String, Map>> i =
897 interfaces.entrySet().iterator(); i.hasNext(); ) {
898 Map.Entry<String, Map> e = i.next();
899 check_type_interface_definition(
900 e.getKey(), e.getValue(), theContext);
908 @Catalogs(path = "/group_types")
909 protected void catalog_group_types(
910 Map<String, Map> theDefinitions, CheckContext theContext) {
911 theContext.enter(GROUP_TYPES);
913 catalogTypes(Construct.Group, theDefinitions, theContext);
919 @Checks(path = "/group_types")
920 protected void check_group_types(
921 Map<String, Map> theDefinition, CheckContext theContext) {
922 theContext.enter(GROUP_TYPES);
924 if (!checkDefinition(GROUP_TYPES, theDefinition, theContext)) {
928 for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
929 Map.Entry<String, Map> e = i.next();
930 checkGroupTypeDefinition(e.getKey(), e.getValue(), theContext);
937 private void checkGroupTypeDefinition(String theName,
939 CheckContext theContext) {
940 theContext.enter(theName, Construct.Group);
943 if (!checkDefinition(theName, theDefinition, theContext)) {
947 if (theDefinition.containsKey(PROPERTIES)) {
949 (Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
950 checkTypeConstructFacet(Construct.Group, theName, theDefinition,
951 Facet.properties, theContext);
954 if (theDefinition.containsKey(TARGETS_CONSTANT)) {
955 checkTypeReference(Construct.Node, theContext,
956 ((List<String>) theDefinition.get(TARGETS_CONSTANT)).toArray(EMPTY_STRING_ARRAY));
960 Map<String, Map> interfaces =
961 (Map<String, Map>) theDefinition.get(INTERFACES);
962 checkMapTypeInterfaceDefinition(theContext, interfaces);
969 @Catalogs(path = "/policy_types")
970 protected void catalog_policy_types(
971 Map<String, Map> theDefinitions, CheckContext theContext) {
972 theContext.enter(POLICY_TYPES);
974 catalogTypes(Construct.Policy, theDefinitions, theContext);
981 @Checks(path = "/policy_types")
982 protected void check_policy_types(
983 Map<String, Map> theDefinition, CheckContext theContext) {
984 theContext.enter(POLICY_TYPES);
986 if (!checkDefinition(POLICY_TYPES, theDefinition, theContext)) {
990 for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
991 Map.Entry<String, Map> e = i.next();
992 checkPolicyTypeDefinition(e.getKey(), e.getValue(), theContext);
999 private void checkPolicyTypeDefinition(String theName,
1001 CheckContext theContext) {
1002 theContext.enter(theName, Construct.Policy);
1005 if (!checkDefinition(theName, theDefinition, theContext)) {
1009 if (theDefinition.containsKey(PROPERTIES)) {
1011 (Map<String, Map>) theDefinition.get(PROPERTIES), theContext);
1012 checkTypeConstructFacet(Construct.Policy, theName, theDefinition,
1013 Facet.properties, theContext);
1016 //the targets can be known node types or group types
1017 List<String> targets = (List<String>) theDefinition.get(TARGETS_CONSTANT);
1018 if ((targets != null) && (checkDefinition(TARGETS_CONSTANT, targets, theContext))) {
1019 for (String targetItr : targets) {
1020 if (!(this.catalog.hasType(Construct.Node, targetItr) ||
1021 this.catalog.hasType(Construct.Group, targetItr))) {
1022 theContext.addError("The 'targets' entry must contain a reference to a node type or group type, '" + target + IS_NONE_OF_THOSE, null);
1031 //checking of actual constructs (capability, ..)
1033 /* First, interface types do not have a hierarchical organization (no
1034 * 'derived_from' in a interface type definition).
1035 * So, when interfaces (with a certain type) are defined in a node
1036 * or relationship type (and they can define new? operations), what
1037 * is there to check:
1038 * Can operations here re-define their declaration from the interface
1039 * type spec?? From A.5.11.3 we are to understand indicates override to be
1040 * the default interpretation .. but they talk about sub-classing so it
1041 * probably intended as a reference to the node or relationship type
1042 * hierarchy and not the interface type (no hierarchy there).
1043 * Or is this a a case of augmentation where new operations can be added??
1045 private void check_type_interface_definition(
1046 String theName, Map theDef, CheckContext theContext) {
1047 theContext.enter(theName);
1049 if (!checkDefinition(theName, theDef, theContext)) {
1053 if (!checkType(Construct.Interface, theDef, theContext)) {
1057 if (theDef.containsKey(INPUTS)) {
1058 check_inputs((Map<String, Map>) theDef.get(INPUTS), theContext);
1065 private void check_capabilities(Map<String, Map> theDefinition,
1066 CheckContext theContext) {
1067 theContext.enter(CAPABILITIES);
1069 if (!checkDefinition(CAPABILITIES, theDefinition, theContext)) {
1073 for (Iterator<Map.Entry<String, Map>> i = theDefinition.entrySet().iterator(); i.hasNext(); ) {
1074 Map.Entry<String, Map> e = i.next();
1075 checkCapabilityDefinition(e.getKey(), e.getValue(), theContext);
1082 /* A capability definition appears within the context ot a node type */
1083 private void checkCapabilityDefinition(String theName,
1085 CheckContext theContext) {
1086 theContext.enter(theName, Construct.Capability);
1089 if (!checkDefinition(theName, theDef, theContext)) {
1093 //check capability type
1094 if (!checkType(Construct.Capability, theDef, theContext)) {
1099 if (!checkFacetAugmentation(
1100 Construct.Capability, theDef, Facet.properties, theContext)) {
1105 if (!checkFacetAugmentation(
1106 Construct.Capability, theDef, Facet.attributes, theContext)) {
1110 //valid_source_types: should point to valid template nodes
1111 if (theDef.containsKey(VALID_SOURCE_TYPES)) {
1112 checkTypeReference(Construct.Node, theContext,
1113 ((List<String>) theDef.get(VALID_SOURCE_TYPES)).toArray(EMPTY_STRING_ARRAY));
1114 //per A.6.1.4 there is an additinal check to be performed here:
1115 //"Any Node Type (names) provides as values for the valid_source_types keyname SHALL be type-compatible (i.e., derived from the same parent Node Type) with any Node Types defined using the same keyname in the parent Capability Type."
1117 //occurences: were verified in range_definition
1124 private void check_requirements(List<Map> theDefinition,
1125 CheckContext theContext) {
1126 theContext.enter(REQUIREMENTS);
1128 if (!checkDefinition(REQUIREMENTS, theDefinition, theContext)) {
1132 for (Iterator<Map> i = theDefinition.iterator(); i.hasNext(); ) {
1134 Iterator<Map.Entry<String, Map>> ei =
1135 (Iterator<Map.Entry<String, Map>>) e.entrySet().iterator();
1136 Map.Entry<String, Map> eie = ei.next();
1137 checkRequirementDefinition(eie.getKey(), eie.getValue(), theContext);
1138 assert !ei.hasNext();
1145 private void checkRequirementDefinition(String theName,
1147 CheckContext theContext) {
1148 theContext.enter(theName, Construct.Requirement);
1151 if (!checkDefinition(theName, theDef, theContext)) {
1154 //check capability type
1155 String capabilityType = (String) theDef.get(CAPABILITY);
1156 if (null != capabilityType) {
1157 checkTypeReference(Construct.Capability, theContext, capabilityType);
1161 String nodeType = (String) theDef.get("node");
1162 if (null != nodeType) {
1163 checkTypeReference(Construct.Node, theContext, nodeType);
1166 //check relationship type
1167 Map relationshipSpec = (Map) theDef.get("relationship");
1168 String relationshipType = null;
1169 if (null != relationshipSpec) {
1170 relationshipType = (String) relationshipSpec.get("type");
1171 if (relationshipType != null) { //should always be the case
1172 checkTypeReference(Construct.Relationship, theContext, relationshipType);
1175 Map<String, Map> interfaces = (Map<String, Map>)
1176 relationshipSpec.get(INTERFACES);
1177 if (interfaces != null) {
1178 //augmentation (additional properties or operations) of the interfaces
1179 //defined by the above relationship types
1181 //check that the interface types are known
1182 for (Map interfaceDef : interfaces.values()) {
1183 checkType(Construct.Interface, interfaceDef, theContext);
1190 //the capability definition might come from the capability type or from the capability definition
1191 //within the node type. We might have more than one as a node might specify multiple capabilities of the
1193 //the goal here is to cross check the compatibility of the valid_source_types specification in the
1194 //target capability definition (if that definition contains a valid_source_types entry).
1195 List<Map> capabilityDefs = new LinkedList<>();
1196 //nodeType exposes capabilityType
1197 if (nodeType != null) {
1198 Map<String, Map> capabilities =
1199 findTypeFacetByType(Construct.Node, nodeType,
1200 Facet.capabilities, capabilityType);
1201 if (capabilities.isEmpty()) {
1202 theContext.addError("The node type " + nodeType + " does not appear to expose a capability of a type compatible with " + capabilityType, null);
1204 for (Map.Entry<String, Map> capability : capabilities.entrySet()) {
1205 //this is the capability as it was defined in the node type
1206 Map capabilityDef = capability.getValue();
1207 //if it defines a valid_source_types then we're working with it,
1208 //otherwise we're working with the capability type it points to.
1209 //The spec does not make it clear if the valid_source_types in a capability definition augments or
1210 //overwrites the one from the capabilityType (it just says they must be compatible).
1211 if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) {
1212 capabilityDefs.add(capabilityDef);
1215 catalog.getTypeDefinition(Construct.Capability, (String) capabilityDef.get("type"));
1216 if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) {
1217 capabilityDefs.add(capabilityDef);
1219 //!!if there is a capability that does not have a valid_source_type than there is no reason to
1220 //make any further verification (as there is a valid node_type/capability target for this requirement)
1221 capabilityDefs.clear();
1228 Map capabilityDef = catalog.getTypeDefinition(Construct.Capability, capabilityType);
1229 if (capabilityDef.containsKey(VALID_SOURCE_TYPES)) {
1230 capabilityDefs.add(capabilityDef);
1234 //check that the node type enclosing this requirement definition
1235 //is in the list of valid_source_types
1236 if (!capabilityDefs.isEmpty()) {
1237 String enclosingNodeType =
1238 theContext.enclosingConstruct(Construct.Node);
1239 assert enclosingNodeType != null;
1241 if (!capabilityDefs.stream().anyMatch(
1242 (Map capabilityDef) -> {
1243 List<String> valid_source_types =
1244 (List<String>) capabilityDef.get(VALID_SOURCE_TYPES);
1245 return valid_source_types.stream().anyMatch(
1246 (String source_type) -> catalog.isDerivedFrom(
1247 Construct.Node, enclosingNodeType, source_type));
1249 theContext.addError("Node type: " + enclosingNodeType + " not compatible with any of the valid_source_types provided in the definition of compatible capabilities", null);
1253 //if we have a relationship type, check if it has a valid_target_types
1254 //if it does, make sure that the capability type is compatible with one
1256 if (relationshipType != null) { //should always be the case
1257 Map relationshipTypeDef = catalog.getTypeDefinition(
1258 Construct.Relationship, relationshipType);
1259 if (relationshipTypeDef != null) {
1260 List<String> valid_target_types =
1261 (List<String>) relationshipTypeDef.get(VALID_TARGET_TYPES);
1262 if (valid_target_types != null) {
1263 boolean found = false;
1264 for (String target_type : valid_target_types) {
1265 if (catalog.isDerivedFrom(
1266 Construct.Capability, capabilityType, target_type)) {
1272 theContext.addError("Capability type: " + capabilityType + " not compatible with any of the valid_target_types " + valid_target_types + " provided in the definition of relationship type " + relationshipType, null);
1278 //relationship declares the capabilityType in its valid_target_type set
1279 //in A.6.9 'Relationship Type' the spec does not indicate how inheritance
1280 //is to be applied to the valid_target_type spec: cumulative, overwrites,
1281 //so we treat it as an overwrite.
1287 //topology_template_definition and sub-rules
1289 @Checks(path = "/topology_template")
1290 protected void check_topology_template(
1291 Map theDef, CheckContext theContext) {
1293 theContext.enter("topology_template");
1295 for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator();
1297 Map.Entry<String, Object> e = ri.next();
1298 checks(e.getKey(), e.getValue(), theContext);
1304 * Once the syntax of the imports section is validated parse/validate/catalog * all the imported template information
1306 @Checks(path = "/imports")
1307 protected void check_imports(List theImports, CheckContext theContext) {
1308 theContext.enter("imports");
1310 for (ListIterator li = theImports.listIterator(); li.hasNext(); ) {
1311 Object importEntry = li.next();
1312 Object importFile = ((Map) mapEntry(importEntry).getValue()).get("file");
1315 tgt = catalog.getTarget((URI) importFile);
1316 } catch (ClassCastException ccx) {
1317 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Import is {}. Exception {}", importFile, ccx);
1320 if (tgt == null || tgt.getReport().hasErrors()) {
1321 //import failed parsing or validation, we skip it
1325 //import should have been fully processed by now ???
1326 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "Processing import {}.", tgt);
1334 @Checks(path = "/topology_template/substitution_mappings")
1335 protected void check_substitution_mappings(Map<String, Object> theSub,
1336 CheckContext theContext) {
1337 theContext.enter("substitution_mappings");
1340 String type = (String) theSub.get("node_type");
1341 if (!checkTypeReference(Construct.Node, theContext, type)) {
1342 theContext.addError("Unknown node type: " + type + "", null);
1343 return; //not much to go on with
1346 Map<String, List> capabilities = (Map<String, List>) theSub.get(CAPABILITIES);
1347 if (null != capabilities) {
1348 for (Map.Entry<String, List> ce : capabilities.entrySet()) {
1349 //the key must be a capability of the type
1350 if (null == findTypeFacetByName(Construct.Node, type,
1351 Facet.capabilities, ce.getKey())) {
1352 theContext.addError("Unknown node type capability: " + ce.getKey() + ", type " + type, null);
1354 //the value is a 2 element list: first is a local node,
1355 //second is the name of one of its capabilities
1356 List targetList = ce.getValue();
1357 if (targetList.size() != 2) {
1358 theContext.addError("Invalid capability mapping: " + target + ", expecting 2 elements", null);
1362 String targetNode = (String) targetList.get(0);
1363 String targetCapability = (String) targetList.get(1);
1365 Map<String, Object> targetNodeDef = (Map<String, Object>)
1366 this.catalog.getTemplate(theContext.target(), Construct.Node, targetNode);
1367 if (null == targetNodeDef) {
1368 theContext.addError("Invalid capability mapping node template: " + targetNode, null);
1372 String targetNodeType = (String) targetNodeDef.get("type");
1373 if (null == findTypeFacetByName(Construct.Node, targetNodeType,
1374 Facet.capabilities, targetCapability)) {
1375 theContext.addError("Invalid capability mapping capability: " + targetCapability + ". No such capability found for node template " + targetNode + ", of type " + targetNodeType, null);
1380 Map<String, List> requirements = (Map<String, List>) theSub.get(REQUIREMENTS);
1381 if (null != requirements) {
1382 for (Map.Entry<String, List> re : requirements.entrySet()) {
1383 //the key must be a requirement of the type
1384 if (null == findNodeTypeRequirementByName(type, re.getKey())) {
1385 theContext.addError("Unknown node type requirement: " + re.getKey() + ", type " + type, null);
1388 List targetList = re.getValue();
1389 if (targetList.size() != 2) {
1390 theContext.addError("Invalid requirement mapping: " + targetList + ", expecting 2 elements", null);
1394 String targetNode = (String) targetList.get(0);
1395 String targetRequirement = (String) targetList.get(1);
1397 Map<String, Object> targetNodeDef = (Map<String, Object>)
1398 this.catalog.getTemplate(theContext.target(), Construct.Node, targetNode);
1399 if (null == targetNodeDef) {
1400 theContext.addError("Invalid requirement mapping node template: " + targetNode, null);
1404 String targetNodeType = (String) targetNodeDef.get("type");
1405 if (null == findNodeTypeRequirementByName(targetNodeType, targetRequirement)) {
1406 theContext.addError("Invalid requirement mapping requirement: " + targetRequirement + ". No such requirement found for node template " + targetNode + ", of type " + targetNodeType, null);
1417 @Checks(path = "/topology_template/inputs")
1418 protected void check_inputs(Map<String, Map> theInputs,
1419 CheckContext theContext) {
1420 theContext.enter(INPUTS);
1423 if (!checkDefinition(INPUTS, theInputs, theContext)) {
1427 for (Iterator<Map.Entry<String, Map>> i = theInputs.entrySet().iterator(); i.hasNext(); ) {
1428 Map.Entry<String, Map> e = i.next();
1429 checkInputDefinition(e.getKey(), e.getValue(), theContext);
1436 private void checkInputDefinition(String theName,
1438 CheckContext theContext) {
1439 theContext.enter(theName);
1441 if (!checkDefinition(theName, theDef, theContext)) {
1445 if (!checkDataType(theDef, theContext)) {
1448 //check default value
1449 Object defaultValue = theDef.get(DEFAULT);
1450 if (defaultValue != null) {
1451 checkDataValuation(defaultValue, theDef, theContext);
1458 @Checks(path = "topology_template/outputs")
1459 protected void check_outputs(Map<String, Map> theOutputs,
1460 CheckContext theContext) {
1461 theContext.enter("outputs");
1464 if (!checkDefinition("outputs", theOutputs, theContext)) {
1468 for (Iterator<Map.Entry<String, Map>> i = theOutputs.entrySet().iterator(); i.hasNext(); ) {
1469 Map.Entry<String, Map> e = i.next();
1470 checkOutputDefinition(e.getKey(), e.getValue(), theContext);
1477 private void checkOutputDefinition(String theName,
1479 CheckContext theContext) {
1480 theContext.enter(theName);
1482 checkDefinition(theName, theDef, theContext);
1483 //check the expression
1489 @Checks(path = "/topology_template/groups")
1490 protected void check_groups(Map<String, Map> theGroups,
1491 CheckContext theContext) {
1492 theContext.enter("groups");
1495 if (!checkDefinition("groups", theGroups, theContext)) {
1499 for (Iterator<Map.Entry<String, Map>> i = theGroups.entrySet().iterator(); i.hasNext(); ) {
1500 Map.Entry<String, Map> e = i.next();
1501 checkGroupDefinition(e.getKey(), e.getValue(), theContext);
1508 private void checkGroupDefinition(String theName,
1510 CheckContext theContext) {
1511 theContext.enter(theName);
1513 if (!checkDefinition(theName, theDef, theContext)) {
1517 if (!checkType(Construct.Group, theDef, theContext)) {
1522 Construct.Group, theDef, Facet.properties, theContext)) {
1526 if (theDef.containsKey(TARGETS_CONSTANT)) {
1528 List<String> targetsTypes = (List<String>)
1529 this.catalog.getTypeDefinition(Construct.Group,
1530 (String) theDef.get("type"))
1531 .get(TARGETS_CONSTANT);
1533 List<String> targets = (List<String>) theDef.get(TARGETS_CONSTANT);
1534 for (String targetItr : targets) {
1535 if (!this.catalog.hasTemplate(theContext.target(), Construct.Node, targetItr)) {
1536 theContext.addError("The 'targets' entry must contain a reference to a node template, '" + targetItr + "' is not one", null);
1538 if (targetsTypes != null) {
1539 String targetType = (String)
1540 this.catalog.getTemplate(theContext.target(), Construct.Node, targetItr).get("type");
1542 boolean found = false;
1543 for (String type : targetsTypes) {
1544 found = this.catalog
1545 .isDerivedFrom(Construct.Node, targetType, type);
1552 theContext.addError("The 'targets' entry '" + targetItr + "' is not type compatible with any of types specified in policy type targets", null);
1563 @Checks(path = "/topology_template/policies")
1564 protected void check_policies(List<Map<String, Map>> thePolicies,
1565 CheckContext theContext) {
1566 theContext.enter("policies");
1569 if (!checkDefinition("policies", thePolicies, theContext)) {
1573 for (Map<String, Map> policy : thePolicies) {
1574 assert policy.size() == 1;
1575 Map.Entry<String, Map> e = policy.entrySet().iterator().next();
1576 checkPolicyDefinition(e.getKey(), e.getValue(), theContext);
1583 private void checkPolicyDefinition(String theName,
1585 CheckContext theContext) {
1586 theContext.enter(theName);
1588 if (!checkDefinition(theName, theDef, theContext)) {
1592 if (!checkType(Construct.Policy, theDef, theContext)) {
1597 Construct.Policy, theDef, Facet.properties, theContext)) {
1601 //targets: must point to node or group templates (that are of a type
1602 //specified in the policy type definition, if targets were specified
1604 if (theDef.containsKey(TARGETS_CONSTANT)) {
1605 List<String> targetsTypes = (List<String>)
1606 this.catalog.getTypeDefinition(Construct.Policy,
1607 (String) theDef.get("type"))
1608 .get(TARGETS_CONSTANT);
1610 List<String> targets = (List<String>) theDef.get(TARGETS_CONSTANT);
1611 for (String targetItr : targets) {
1612 Construct targetConstruct = null;
1614 if (this.catalog.hasTemplate(theContext.target(), Construct.Group, targetItr)) {
1615 targetConstruct = Construct.Group;
1616 } else if (this.catalog.hasTemplate(theContext.target(), Construct.Node, targetItr)) {
1617 targetConstruct = Construct.Node;
1619 theContext.addError("The 'targets' entry must contain a reference to a node template or group template, '" + target + IS_NONE_OF_THOSE, null);
1622 if (targetConstruct != null &&
1623 targetsTypes != null) {
1624 //get the target type and make sure is compatible with the types
1625 //indicated in the type spec
1626 String targetType = (String)
1627 this.catalog.getTemplate(theContext.target(), targetConstruct, targetItr).get("type");
1629 boolean found = false;
1630 for (String type : targetsTypes) {
1631 found = this.catalog
1632 .isDerivedFrom(targetConstruct, targetType, type);
1639 theContext.addError("The 'targets' " + targetConstruct + " entry '" + targetItr + "' is not type compatible with any of types specified in policy type targets", null);
1651 @Checks(path = "/topology_template/node_templates")
1652 protected void check_node_templates(Map<String, Map> theTemplates,
1653 CheckContext theContext) {
1654 theContext.enter("node_templates");
1656 if (!checkDefinition("node_templates", theTemplates, theContext)) {
1660 for (Iterator<Map.Entry<String, Map>> i = theTemplates.entrySet().iterator(); i.hasNext(); ) {
1661 Map.Entry<String, Map> e = i.next();
1662 checkNodeTemplateDefinition(e.getKey(), e.getValue(), theContext);
1670 private void checkNodeTemplateDefinition(String theName,
1672 CheckContext theContext) {
1673 theContext.enter(theName, Construct.Node);
1676 if (!checkDefinition(theName, theNode, theContext)) {
1680 if (!checkType(Construct.Node, theNode, theContext)) {
1685 String copy = (String) theNode.get("copy");
1687 if (!checkTemplateReference(Construct.Node, theContext, copy)) {
1688 theContext.addError("The 'copy' reference " + copy + " does not point to a known node template", null);
1690 //the 'copy' node specification should be used to provide 'defaults'
1691 //for this specification
1695 /* check that we operate on properties and attributes within the scope of
1696 the specified node type */
1698 Construct.Node, /*theName,*/theNode, Facet.properties, theContext)) {
1703 Construct.Node, /*theName,*/theNode, Facet.attributes, theContext)) {
1707 //requirement assignment seq
1708 if (theNode.containsKey(REQUIREMENTS)) {
1709 checkRequirementsAssignmentDefinition(
1710 (List<Map>) theNode.get(REQUIREMENTS), theContext);
1713 //capability assignment map: subject to augmentation
1714 if (theNode.containsKey(CAPABILITIES)) {
1715 checkCapabilitiesAssignmentDefinition(
1716 (Map<String, Map>) theNode.get(CAPABILITIES), theContext);
1720 if (theNode.containsKey(INTERFACES)) {
1721 checkTemplateInterfacesDefinition(
1722 (Map<String, Map>) theNode.get(INTERFACES), theContext);
1725 //artifacts: artifacts do not have different definition forms/syntax
1726 //depending on the context (type or template) but they are still subject
1728 if (theNode.containsKey(ARTIFACTS)) {
1729 check_template_artifacts_definition(
1730 (Map<String, Object>) theNode.get(ARTIFACTS), theContext);
1733 /* node_filter: the context to which the node filter is applied is very
1734 * wide here as opposed to the node filter specification in a requirement
1735 * assignment which has a more strict context (target node/capability are
1737 * We could check that there are nodes in this template having the
1738 * properties/capabilities specified in this filter, i.e. the filter has
1739 * a chance to succeed.
1746 @Checks(path = "/topology_template/relationship_templates")
1747 protected void check_relationship_templates(Map theTemplates,
1748 CheckContext theContext) {
1749 theContext.enter("relationship_templates");
1751 for (Iterator<Map.Entry<String, Map>> i = theTemplates.entrySet().iterator(); i.hasNext(); ) {
1752 Map.Entry<String, Map> e = i.next();
1753 checkRelationshipTemplateDefinition(e.getKey(), e.getValue(), theContext);
1758 private void checkRelationshipTemplateDefinition(
1760 Map theRelationship,
1761 CheckContext theContext) {
1762 theContext.enter(theName, Construct.Relationship);
1764 if (!checkDefinition(theName, theRelationship, theContext)) {
1768 if (!checkType(Construct.Relationship, theRelationship, theContext)) {
1772 /* check that we operate on properties and attributes within the scope of
1773 the specified relationship type */
1774 if (!checkFacet(Construct.Relationship, theRelationship,
1775 Facet.properties, theContext)) {
1779 if (!checkFacet(Construct.Relationship, theRelationship,
1780 Facet.attributes, theContext)) {
1784 /* interface definitions
1785 note: augmentation is allowed here so not clear what to check ..
1786 maybe report augmentations if so configured .. */
1793 //requirements and capabilities assignment appear in a node templates
1794 private void checkRequirementsAssignmentDefinition(
1795 List<Map> theRequirements, CheckContext theContext) {
1796 theContext.enter(REQUIREMENTS);
1798 if (!checkDefinition(REQUIREMENTS, theRequirements, theContext)) {
1802 //the node type for the node template enclosing these requirements
1803 String nodeType = (String) catalog.getTemplate(
1804 theContext.target(),
1806 theContext.enclosingConstruct(Construct.Node))
1809 for (Iterator<Map> ri = theRequirements.iterator(); ri.hasNext(); ) {
1810 Map<String, Map> requirement = (Map<String, Map>) ri.next();
1812 Iterator<Map.Entry<String, Map>> rai = requirement.entrySet().iterator();
1814 Map.Entry<String, Map> requirementEntry = rai.next();
1815 assert !rai.hasNext();
1817 String requirementName = requirementEntry.getKey();
1818 Map requirementDef = findNodeTypeRequirementByName(
1819 nodeType, requirementName);
1821 if (requirementDef == null) {
1822 theContext.addError("No requirement " + requirementName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null);
1826 checkRequirementAssignmentDefinition(
1827 requirementName, requirementEntry.getValue(), requirementDef, theContext);
1834 private void checkRequirementAssignmentDefinition(
1835 String theRequirementName,
1838 CheckContext theContext) {
1839 theContext//.enter("requirement_assignment")
1840 .enter(theRequirementName, Construct.Requirement);
1842 //grab the node type definition to verify compatibility
1846 boolean targetNodeIsTemplate = false;
1847 String targetNode = (String) theAssignment.get("node");
1848 if (targetNode == null) {
1849 targetNode = (String) theDefinition.get("node");
1850 //targetNodeIsTemplate stays false, targetNode must be a type
1852 //the value must be a node template or a node type
1853 targetNodeIsTemplate = isTemplateReference(
1854 Construct.Node, theContext, targetNode);
1855 if ((!targetNodeIsTemplate) && (!isTypeReference(Construct.Node, targetNode))){
1856 theContext.addError("The 'node' entry must contain a reference to a node template or node type, '" + targetNode + IS_NONE_OF_THOSE, null);
1861 String targetNodeDef = (String) theDefinition.get("node");
1862 if (targetNodeDef != null && targetNode != null) {
1863 if (targetNodeIsTemplate) {
1864 //if the target is node template, it must be compatible with the
1865 //node type specification in the requirement defintion
1866 String targetNodeType = (String)
1867 catalog.getTemplate(theContext.target(), Construct.Node, targetNode).get("type");
1868 if (!catalog.isDerivedFrom(
1869 Construct.Node, targetNodeType, targetNodeDef)) {
1870 theContext.addError("The required target node type '" + targetNodeType + "' of target node " + targetNode + " is not compatible with the target node type found in the requirement definition: " + targetNodeDef, null);
1874 //if the target is a node type it must be compatible (= or derived
1875 //from) with the node type specification in the requirement definition
1876 if (!catalog.isDerivedFrom(
1877 Construct.Node, targetNode, targetNodeDef)) {
1878 theContext.addError("The required target node type '" + targetNode + "' is not compatible with the target node type found in the requirement definition: " + targetNodeDef, null);
1885 String targetNodeType = targetNodeIsTemplate ?
1886 (String) catalog.getTemplate(theContext.target(), Construct.Node, targetNode).get("type") :
1889 //capability assignment
1890 boolean targetCapabilityIsType = false;
1891 String targetCapability = (String) theAssignment.get(CAPABILITY);
1892 if (targetCapability == null) {
1893 targetCapability = (String) theDefinition.get(CAPABILITY);
1894 //in a requirement definition the target capability can only be a
1895 //capability type (and not a capability name within some target node
1897 targetCapabilityIsType = true;
1899 targetCapabilityIsType = isTypeReference(Construct.Capability, targetCapability);
1901 //check compatibility with the target compatibility type specified
1902 //in the requirement definition, if any
1903 String targetCapabilityDef = (String) theDefinition.get(CAPABILITY);
1904 if (targetCapabilityDef != null && targetCapability != null) {
1905 if (targetCapabilityIsType) {
1906 if (!catalog.isDerivedFrom(
1907 Construct.Capability, targetCapability, targetCapabilityDef)) {
1908 theContext.addError("The required target capability type '" + targetCapability + "' is not compatible with the target capability type found in the requirement definition: " + targetCapabilityDef, null);
1912 //the capability is from a target node. Find its definition and
1913 //check that its type is compatible with the capability type
1914 //from the requirement definition
1916 //check target capability compatibility with target node
1917 if (targetNode == null) {
1918 theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', which was not specified", null);
1921 if (!targetNodeIsTemplate) {
1922 theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', but there you specified a node type", null);
1925 //check that the targetNode (its type) indeed has the
1928 Map<String, Object> targetNodeCapabilityDef =
1929 findTypeFacetByName(
1930 Construct.Node, targetNodeType,
1931 Facet.capabilities, targetCapability);
1932 if (targetNodeCapabilityDef == null) {
1933 theContext.addError("No capability '" + targetCapability + "' was specified in the node " + targetNode + " of type " + targetNodeType, null);
1937 String targetNodeCapabilityType = (String) targetNodeCapabilityDef.get("type");
1939 if (!catalog.isDerivedFrom(Construct.Capability,
1940 targetNodeCapabilityType,
1941 targetCapabilityDef)) {
1942 theContext.addError("The required target capability type '" + targetCapabilityDef + "' is not compatible with the target capability type found in the target node type capability definition : " + targetNodeCapabilityType + ", targetNode " + targetNode + ", capability name " + targetCapability, null);
1949 //relationship assignment
1950 Map targetRelationship = (Map) theAssignment.get("relationship");
1951 if (targetRelationship != null) {
1952 //this has to be compatible with the relationship with the same name
1953 //from the node type
1957 //node_filter; used jxpath to simplify the navigation somewhat
1958 //this is too cryptic
1959 JXPathContext jxPath = JXPathContext.newContext(theAssignment);
1960 jxPath.setLenient(true);
1962 List<Map> propertiesFilter =
1963 (List<Map>) jxPath.getValue("/node_filter/properties");
1964 if (propertiesFilter != null) {
1965 for (Map propertyFilter : propertiesFilter) {
1966 if (targetNode != null) {
1967 //if we have a target node or node template then it must have
1968 //have these properties
1969 for (Object propertyName : propertyFilter.keySet()) {
1970 if (null == findTypeFacetByName(Construct.Node,
1973 propertyName.toString())) {
1974 theContext.addError("The node_filter property " + propertyName + " is invalid: requirement target node " + targetNode + " does not have such a property", null);
1981 List<Map> capabilitiesFilter =
1982 (List<Map>) jxPath.getValue("node_filter/capabilities");
1983 if (capabilitiesFilter != null) {
1984 for (Map capabilityFilterDef : capabilitiesFilter) {
1985 assert capabilityFilterDef.size() == 1;
1986 Map.Entry<String, Map> capabilityFilterEntry =
1987 (Map.Entry<String, Map>) capabilityFilterDef.entrySet().iterator().next();
1988 String targetFilterCapability = capabilityFilterEntry.getKey();
1989 Map<String, Object> targetFilterCapabilityDef = null;
1991 //if we have a targetNode capabilityName must be a capability of
1992 //that node (type); or it can be simply capability type (but the node
1993 //must have a capability of that type)
1995 String targetFilterCapabilityType = null;
1996 if (targetNode != null) {
1997 targetFilterCapabilityDef =
1998 findTypeFacetByName(Construct.Node, targetNodeType,
1999 Facet.capabilities, targetFilterCapability);
2000 if (targetFilterCapabilityDef != null) {
2001 targetFilterCapabilityType =
2002 (String) targetFilterCapabilityDef/*.values().iterator().next()*/.get("type");
2004 Map<String, Map> targetFilterCapabilities =
2005 findTypeFacetByType(Construct.Node, targetNodeType,
2006 Facet.capabilities, targetFilterCapability);
2008 if (!targetFilterCapabilities.isEmpty()) {
2009 if (targetFilterCapabilities.size() > 1) {
2010 errLogger.log(LogLevel.WARN, this.getClass().getName(), "checkRequirementAssignmentDefinition: filter check, target node type '{}' has more than one capability of type '{}', not supported", targetNodeType, targetFilterCapability);
2012 //pick the first entry, it represents a capability of the required type
2013 Map.Entry<String, Map> capabilityEntry = targetFilterCapabilities.entrySet().iterator().next();
2014 targetFilterCapabilityDef = Collections.singletonMap(capabilityEntry.getKey(),
2015 capabilityEntry.getValue());
2016 targetFilterCapabilityType = targetFilterCapability;
2020 //no node (type) specified, it can be a straight capability type
2021 targetFilterCapabilityDef = catalog.getTypeDefinition(
2022 Construct.Capability, targetFilterCapability);
2023 //here comes the odd part: it can still be a just a name in which
2024 //case we should look at the requirement definition, see which
2025 //capability (type) it indicates
2026 assert targetCapabilityIsType; //cannot be otherwise, we'd need a node
2027 targetFilterCapabilityDef = catalog.getTypeDefinition(
2028 Construct.Capability, targetCapability);
2029 targetFilterCapabilityType = targetCapability;
2032 if (targetFilterCapabilityDef == null) {
2033 theContext.addError("Capability (name or type) " + targetFilterCapability + " is invalid: not a known capability (type) " +
2034 ((targetNodeType != null) ? (" of node type" + targetNodeType) : ""), null);
2038 for (Map propertyFilter :
2039 (List<Map>) jxPath.getValue("/node_filter/capabilities/" + targetFilterCapability + "/properties")) {
2040 //check that the properties are in the scope of the
2041 //capability definition
2042 for (Object propertyName : propertyFilter.keySet()) {
2043 if (null == findTypeFacetByName(Construct.Capability,
2046 propertyName.toString())) {
2047 theContext.addError("The capability filter " + targetFilterCapability + " property " + propertyName + " is invalid: target capability " + targetFilterCapabilityType + " does not have such a property", null);
2060 private void checkCapabilitiesAssignmentDefinition(
2061 Map<String, Map> theCapabilities, CheckContext theContext) {
2062 theContext.enter(CAPABILITIES);
2064 if (!checkDefinition(CAPABILITIES, theCapabilities, theContext)) {
2068 //the node type for the node template enclosing these requirements
2069 String nodeType = (String) catalog.getTemplate(
2070 theContext.target(),
2072 theContext.enclosingConstruct(Construct.Node))
2075 for (Iterator<Map.Entry<String, Map>> ci =
2076 theCapabilities.entrySet().iterator();
2079 Map.Entry<String, Map> ce = ci.next();
2081 String capabilityName = ce.getKey();
2082 Map capabilityDef = findTypeFacetByName(Construct.Node, nodeType,
2083 Facet.capabilities, capabilityName);
2084 if (capabilityDef == null) {
2085 theContext.addError("No capability " + capabilityName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null);
2089 checkCapabilityAssignmentDefinition(
2090 capabilityName, ce.getValue(), capabilityDef, theContext);
2097 private void checkCapabilityAssignmentDefinition(
2098 String theCapabilityName,
2101 CheckContext theContext) {
2103 theContext.enter(theCapabilityName, Construct.Capability);
2105 String capabilityType = (String) theDefinition.get("type");
2106 //list of property and attributes assignments
2107 checkFacet(Construct.Capability, theAssignment, capabilityType,
2108 Facet.properties, theContext);
2109 checkFacet(Construct.Capability, theAssignment, capabilityType,
2110 Facet.attributes, theContext);
2116 private void checkTemplateInterfacesDefinition(
2117 Map<String, Map> theInterfaces,
2118 CheckContext theContext) {
2119 theContext.enter(INTERFACES);
2121 if (!checkDefinition(INTERFACES, theInterfaces, theContext)) {
2125 //the node type for the node template enclosing these requirements
2126 String nodeType = (String) catalog.getTemplate(
2127 theContext.target(),
2129 theContext.enclosingConstruct(Construct.Node))
2132 for (Iterator<Map.Entry<String, Map>> ii =
2133 theInterfaces.entrySet().iterator();
2136 Map.Entry<String, Map> ie = ii.next();
2138 String interfaceName = ie.getKey();
2139 Map interfaceDef = findTypeFacetByName(Construct.Node, nodeType,
2140 Facet.interfaces, interfaceName);
2142 if (interfaceDef == null) {
2143 /* this is subject to augmentation: this could be a warning but not an error */
2144 theContext.addError("No interface " + interfaceName + WAS_DEFINED_FOR_THE_NODE_TYPE + nodeType, null);
2148 checkTemplateInterfaceDefinition(
2149 interfaceName, ie.getValue(), interfaceDef, theContext);
2156 private void checkTemplateInterfaceDefinition(
2157 String theInterfaceName,
2160 CheckContext theContext) {
2162 theContext.enter(theInterfaceName, Construct.Interface);
2164 //check the assignment of the common inputs
2165 checkFacet(Construct.Interface,
2167 (String) theDefinition.get("type"),
2176 @Checks(path = "/topology_template/artifacts")
2177 protected void check_template_artifacts_definition(
2178 Map<String, Object> theDefinition,
2179 CheckContext theContext) {
2180 theContext.enter(ARTIFACTS);
2184 //generic checking actions, not related to validation rules
2186 /* will check the validity of the type specification for any construct containing a 'type' entry */
2187 private boolean checkType(Construct theCategory, Map theSpec, CheckContext theContext) {
2188 String type = (String) theSpec.get("type");
2190 theContext.addError("Missing type specification", null);
2194 if (!catalog.hasType(theCategory, type)) {
2195 theContext.addError(UNKNOWN + theCategory + " type: " + type, null);
2203 * a known type: predefined or user-defined
2204 * a collection (list or map) and then check that the entry_schema points to one of the first two cases (is that it?)
2206 private boolean checkDataType(Map theSpec, CheckContext theContext) {
2208 if (!checkType(Construct.Data, theSpec, theContext)) {
2212 String type = (String) theSpec.get("type");
2213 if (/*isCollectionType(type)*/
2214 "list".equals(type) || "map".equals(type)) {
2215 Map entrySchema = (Map) theSpec.get("entry_schema");
2216 if (entrySchema == null) {
2217 //maybe issue a warning ?? or is 'string' the default??
2221 if (!catalog.hasType(Construct.Data, (String) entrySchema.get("type"))) {
2222 theContext.addError("Unknown entry_schema type: " + entrySchema, null);
2229 /* Check that a particular facet (properties, attributes) of a construct type
2230 * (node type, capability type, etc) is correctly (consistenly) defined
2231 * across a type hierarchy
2233 private boolean checkTypeConstructFacet(Construct theConstruct,
2237 CheckContext theContext) {
2238 Map<String, Map> defs =
2239 (Map<String, Map>) theTypeSpec.get(theFacet.name());
2246 //given that the type was cataloged there will be at least one entry
2247 Iterator<Map.Entry<String, Map>> i =
2248 catalog.hierarchy(theConstruct, theTypeName);
2250 theContext.addError(
2251 "The type " + theTypeName + " needs to be cataloged before attempting 'checkTypeConstruct'", null);
2254 i.next(); //skip self
2255 while (i.hasNext()) {
2256 Map.Entry<String, Map> e = i.next();
2257 Map<String, Map> superDefs = (Map<String, Map>) e.getValue()
2258 .get(theFacet.name());
2259 if (null == superDefs) {
2262 //this computes entries that appear on both collections but with different values, i.e. the re-defined properties
2263 Map<String, MapDifference.ValueDifference<Map>> diff = Maps.difference(defs, superDefs).entriesDiffering();
2265 for (Iterator<Map.Entry<String, MapDifference.ValueDifference<Map>>> di = diff.entrySet().iterator(); di.hasNext(); ) {
2266 Map.Entry<String, MapDifference.ValueDifference<Map>> de = di.next();
2267 MapDifference.ValueDifference<Map> dediff = de.getValue();
2268 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} type {}: {} has been re-defined between the {} types {} and {}", theConstruct, theFacet, de.getKey(), theConstruct, e.getKey(), theTypeName);
2269 //for now we just check that the type is consistenly re-declared
2270 if (!this.catalog.isDerivedFrom(theFacet.construct(),
2271 (String) dediff.leftValue().get("type"),
2272 (String) dediff.rightValue().get("type"))) {
2273 theContext.addError(
2274 theConstruct + TYPE + theFacet + ", redefiniton changed its type: " + de.getKey() + " has been re-defined between the " + theConstruct + " types " + e.getKey() + " and " + theTypeName + " in an incompatible manner", null);
2284 * Checks the validity of a certain facet of a construct
2285 * (properties of a node) across a type hierarchy.
2286 * For now the check is limited to a verifying that a a facet was declared
2287 * somewhere in the construct type hierarchy (a node template property has
2288 * been declared in the node type hierarchy).
2290 * 2 versions with the more generic allowing the specification of the type
2291 * to be done explicitly.
2293 private boolean checkFacet(Construct theConstruct,
2296 CheckContext theContext) {
2297 return checkFacet(theConstruct, theSpec, null, theFacet, theContext);
2301 * We walk the hierarchy and verify the assignment of a property with respect to its definition.
2302 * We also collect the names of those properties defined as required but for which no assignment was provided.
2304 private boolean checkFacet(Construct theConstruct,
2308 CheckContext theContext) {
2310 Map<String, Map> defs = (Map<String, Map>) theSpec.get(theFacet.name());
2314 defs = Maps.newHashMap(defs); //
2317 if (theSpecType == null) {
2318 theSpecType = (String) theSpec.get("type");
2320 if (theSpecType == null) {
2321 theContext.addError("No specification type available", null);
2325 Map<String, Byte> missed = new HashMap<>(); //keeps track of the missing required properties, the value is
2326 //false if a default was found along the hierarchy
2327 Iterator<Map.Entry<String, Map>> i =
2328 catalog.hierarchy(theConstruct, theSpecType);
2329 while (i.hasNext() && !defs.isEmpty()) {
2330 Map.Entry<String, Map> type = i.next();
2332 Map<String, Map> typeDefs = (Map<String, Map>) type.getValue()
2333 .get(theFacet.name());
2334 if (null == typeDefs) {
2338 MapDifference<String, Map> diff = Maps.difference(defs, typeDefs);
2340 //this are the ones this type and the spec have in common (same key,
2342 Map<String, MapDifference.ValueDifference<Map>> facetDefs =
2343 diff.entriesDiffering();
2344 //TODO: this assumes the definition of the facet is not cumulative, i.e.
2345 //subtypes 'add' something to the definition provided by the super-types
2346 //it considers the most specialized definition stands on its own
2347 for (MapDifference.ValueDifference<Map> valdef : facetDefs.values()) {
2348 checkDataValuation(valdef.leftValue(), valdef.rightValue(), theContext);
2351 //remove from properties all those that appear in this type: unfortunately this returns an unmodifiable map ..
2352 defs = Maps.newHashMap(diff.entriesOnlyOnLeft());
2355 if (!defs.isEmpty()) {
2356 theContext.addError(UNKNOWN + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + defs, null);
2360 if (!missed.isEmpty()) {
2364 .filter(e -> e.getValue().byteValue() == (byte) 1)
2365 .map(e -> e.getKey())
2366 .collect(Collectors.toList());
2367 if (!missedNames.isEmpty()) {
2368 theContext.addError(theConstruct + " " + theFacet + " missing required values for: " + missedNames, null);
2376 /* Augmentation occurs in cases such as the declaration of capabilities within a node type.
2377 * In such cases the construct facets (the capabilitity's properties) can redefine (augment) the
2378 * specification found in the construct type.
2380 private boolean checkFacetAugmentation(Construct theConstruct,
2383 CheckContext theContext) {
2384 return checkFacetAugmentation(theConstruct, theSpec, null, theFacet, theContext);
2387 private boolean checkFacetAugmentation(Construct theConstruct,
2391 CheckContext theContext) {
2393 Map<String, Map> augs = (Map<String, Map>) theSpec.get(theFacet.name());
2399 if (theSpecType == null) {
2400 theSpecType = (String) theSpec.get("type");
2402 if (theSpecType == null) {
2403 theContext.addError("No specification type available", null);
2407 for (Iterator<Map.Entry<String, Map>> ai = augs.entrySet().iterator(); ai.hasNext(); ) {
2408 Map.Entry<String, Map> ae = ai.next();
2410 //make sure it was declared by the type
2411 Map facetDef = catalog.getFacetDefinition(theConstruct, theSpecType, theFacet, ae.getKey());
2412 if (facetDef == null) {
2413 theContext.addError(UNKNOWN + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + ae.getKey(), null);
2418 //check the compatibility of the augmentation: only the type cannot be changed
2419 //can the type be changed in a compatible manner ??
2420 if (!facetDef.get("type").equals(ae.getValue().get("type"))) {
2421 theContext.addError(theConstruct + " " + theFacet + " " + ae.getKey() + " has a different type than its definition: " + ae.getValue().get("type") + " instead of " + facetDef.get("type"), null);
2426 //check any valuation (here just defaults)
2427 Object defaultValue = ae.getValue().get(DEFAULT);
2428 if (defaultValue != null) {
2429 checkDataValuation(defaultValue, ae.getValue(), theContext);
2436 private boolean catalogTypes(Construct theConstruct, Map<String, Map> theTypes, CheckContext theContext) {
2439 for (Map.Entry<String, Map> typeEntry : theTypes.entrySet()) {
2440 res &= catalogType(theConstruct, typeEntry.getKey(), typeEntry.getValue(), theContext);
2446 private boolean catalogType(Construct theConstruct,
2449 CheckContext theContext) {
2451 if (!catalog.addType(theConstruct, theName, theDef)) {
2452 theContext.addError(theConstruct + TYPE + theName + " re-declaration", null);
2455 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "{} type {} has been cataloged", theConstruct, theName);
2457 String parentType = (String) theDef.get("derived_from");
2458 if (parentType != null && !catalog.hasType(theConstruct, parentType)) {
2459 theContext.addError(
2460 theConstruct + TYPE + theName + " indicates a supertype that has not (yet) been declared: " + parentType, null);
2466 private boolean checkTypeReference(Construct theConstruct,
2467 CheckContext theContext,
2468 String... theTypeNames) {
2470 for (String typeName : theTypeNames) {
2471 if (!isTypeReference(theConstruct, typeName)) {
2472 theContext.addError("Reference to " + theConstruct + " type '" + typeName + "' points to unknown type", null);
2479 private boolean isTypeReference(Construct theConstruct,
2480 String theTypeName) {
2481 return this.catalog.hasType(theConstruct, theTypeName);
2484 /* node or relationship templates */
2485 private boolean checkTemplateReference(Construct theConstruct,
2486 CheckContext theContext,
2487 String... theTemplateNames) {
2489 for (String templateName : theTemplateNames) {
2490 if (!isTemplateReference(theConstruct, theContext, templateName)) {
2491 theContext.addError("Reference to " + theConstruct + " template '" + templateName + "' points to unknown template", null);
2498 private boolean isTemplateReference(Construct theConstruct,
2499 CheckContext theContext,
2500 String theTemplateName) {
2501 return this.catalog.hasTemplate(theContext.target(), theConstruct, theTemplateName);
2505 * For inputs/properties/attributes/(parameters). It is the caller's
2506 * responsability to provide the value (from a 'default', inlined, ..)
2508 * @param theDef the definition of the given construct/facet as it appears in
2509 * its enclosing type definition.
2512 private boolean checkDataValuation(Object theExpr,
2513 Map<String, ?> theDef,
2514 CheckContext theContext) {
2515 //first check if the expression is a function, if not handle it as a value assignment
2516 Data.Function f = Data.function(theExpr);
2518 return f.evaluator()
2519 .eval(theExpr, theDef, theContext);
2521 Data.Type type = Data.typeByName((String) theDef.get("type"));
2523 Data.Evaluator evaluator;
2525 evaluator = type.evaluator();
2526 if (evaluator == null) {
2527 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No value evaluator available for type {}", type);
2529 if ((theExpr != null) && (!evaluator.eval(theExpr, theDef, theContext))) {
2535 evaluator = type.constraintsEvaluator();
2536 if (evaluator == null) {
2537 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "No constraints evaluator available for type {}", type);
2539 if (theExpr != null) {
2540 if (!evaluator.eval(theExpr, theDef, theContext)) {
2544 //should have a null value validatorT
2550 theContext.addError("Expression " + theExpr + " of " + theDef + " could not be evaluated", null);
2557 * Given the type of a certain construct (node type for example), look up
2558 * in one of its facets (properties, capabilities, ..) for one of the given
2559 * facet type (if looking in property, one of the given data type).
2561 * @return a map of all facets of the given type, will be empty to signal
2564 * Should we look for a facet construct of a compatible type: any type derived
2565 * from the given facet's construct type??
2567 private Map<String, Map>
2568 findTypeFacetByType(Construct theTypeConstruct,
2571 String theFacetType) {
2573 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType {}, {}: {} {}", theTypeName, theTypeConstruct, theFacetType, theFacet);
2574 Map<String, Map> res = new HashMap<>();
2575 Iterator<Map.Entry<String, Map>> i =
2576 catalog.hierarchy(theTypeConstruct, theTypeName);
2577 while (i.hasNext()) {
2578 Map.Entry<String, Map> typeSpec = i.next();
2579 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, Checking {} type {}", theTypeConstruct, typeSpec.getKey());
2580 Map<String, Map> typeFacet =
2581 (Map<String, Map>) typeSpec.getValue().get(theFacet.name());
2582 if (typeFacet == null) {
2585 Iterator<Map.Entry<String, Map>> fi = typeFacet.entrySet().iterator();
2586 while (fi.hasNext()) {
2587 Map.Entry<String, Map> facet = fi.next();
2588 String facetType = (String) facet.getValue().get("type");
2589 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, Checking {} type {}", facet.getKey(), facetType);
2591 //here is the question: do we look for an exact match or ..
2592 //now we check that the type has a capability of a type compatible
2593 //(equal or derived from) the given capability type.
2594 if (catalog.isDerivedFrom(
2595 theFacet.construct(), facetType, theFacetType)) {
2596 res.putIfAbsent(facet.getKey(), facet.getValue());
2600 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByType, found {}", res);
2605 private Map<String, Object>
2606 findTypeFacetByName(Construct theTypeConstruct,
2609 String theFacetName) {
2610 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByName {} {}", theTypeConstruct, theTypeName);
2611 Iterator<Map.Entry<String, Map>> i =
2612 catalog.hierarchy(theTypeConstruct, theTypeName);
2613 while (i.hasNext()) {
2614 Map.Entry<String, Map> typeSpec = i.next();
2615 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findTypeFacetByName, Checking {} type {}", theTypeConstruct, typeSpec.getKey());
2616 Map<String, Map> typeFacet =
2617 (Map<String, Map>) typeSpec.getValue().get(theFacet.name());
2618 if (typeFacet == null) {
2621 Map<String, Object> facet = typeFacet.get(theFacetName);
2622 if (facet != null) {
2629 /* Requirements are the odd ball as they are structured as a sequence .. */
2630 private Map<String, Map> findNodeTypeRequirementByName(
2631 String theNodeType, String theRequirementName) {
2632 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findNodeTypeRequirementByName {}/{}", theNodeType, theRequirementName);
2633 Iterator<Map.Entry<String, Map>> i =
2634 catalog.hierarchy(Construct.Node, theNodeType);
2635 while (i.hasNext()) {
2636 Map.Entry<String, Map> nodeType = i.next();
2637 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "findNodeTypeRequirementByName, Checking node type {}", nodeType.getKey());
2638 List<Map<String, Map>> nodeTypeRequirements =
2639 (List<Map<String, Map>>) nodeType.getValue().get(REQUIREMENTS);
2640 if (nodeTypeRequirements == null) {
2644 for (Map<String, Map> requirement : nodeTypeRequirements) {
2645 Map requirementDef = requirement.get(theRequirementName);
2646 if (requirementDef != null) {
2647 return requirementDef;
2655 * Additional generics checks to be performed on any definition: construct,
2656 * construct types, etc ..
2658 public boolean checkDefinition(String theName,
2660 CheckContext theContext) {
2661 if (theDefinition == null) {
2662 theContext.addError("Missing definition for " + theName, null);
2666 if (theDefinition.isEmpty()) {
2667 theContext.addError("Empty definition for " + theName, null);
2674 private boolean checkDefinition(String theName,
2676 CheckContext theContext) {
2677 if (theDefinition == null) {
2678 theContext.addError("Missing definition for " + theName, null);
2682 if (theDefinition.isEmpty()) {
2683 theContext.addError("Empty definition for " + theName, null);
2690 /* plenty of one entry maps around */
2691 private Map.Entry mapEntry(Object theMap) {
2692 return (Map.Entry) ((Map) theMap).entrySet().iterator().next();
2696 * Given that we remembered the canonical forms that were needed during
2697 * validation to replace the short forms we can apply them to the target
2699 * We take advantage here of the fact that the context path maintained
2700 * during validation is compatible with (j)xpath, with the exception of
2701 * sequence/array indentation ..
2704 private String patchIndexes(CharSequence thePath) {
2705 Matcher m = indexPattern.matcher(thePath);
2706 StringBuffer path = new StringBuffer();
2708 String index = m.group();
2709 index = "[" + (Integer.valueOf(index.substring(1)).intValue() + 1) + "]";
2710 m.appendReplacement(path, Matcher.quoteReplacement(index));
2713 return path.toString();
2716 private String patchWhitespaces(String thePath) {
2717 String[] elems = thePath.split("/");
2718 StringBuffer path = new StringBuffer();
2719 for (int i = 0; i < elems.length; i++) {
2720 if (spacePattern.matcher(elems[i]).find()) {
2721 path.append("[@name='")
2729 return path.toString();
2732 private void applyCanonicals(Object theTarget,
2733 Map<String, Object> theCanonicals) {
2734 if (theCanonicals.isEmpty()) {
2737 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "applying canonicals: {}", theCanonicals);
2738 applyCanonicals(theTarget, theCanonicals, "/", false);
2742 * applies canonicals selectively
2744 private void applyCanonicals(Object theTarget,
2745 Map<String, Object> theCanonicals,
2749 JXPathContext jxPath = JXPathContext.newContext(theTarget);
2750 for (Iterator<Map.Entry<String, Object>> ces =
2751 theCanonicals.entrySet().iterator();
2753 Map.Entry<String, Object> ce = ces.next();
2754 //should we check prefix before or after normalization ??
2755 String path = ce.getKey();
2756 if (path.startsWith(thePrefix)) {
2757 path = patchWhitespaces(
2758 patchIndexes(path));
2760 jxPath.setValue(path, ce.getValue());
2761 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Applied canonical form (prefix '{}') at: {}", thePrefix, path);
2766 } catch (JXPathException jxpx) {
2767 errLogger.log(LogLevel.WARN, this.getClass().getName(), "Failed to apply canonical to {} {}", theTarget, jxpx);
2774 * commons are built-in and supposed to be bulletproof so any error in here
2777 private static Catalog commonsCatalog() {
2779 synchronized (Catalog.class) {
2781 if (commonsCatalogInstance != null) {
2782 return commonsCatalogInstance;
2785 //if other templates are going to be part of the common type system
2786 //add them to this list. order is relevant.
2787 final String[] commons = new String[]{
2788 "tosca/tosca-common-types.yaml"};
2790 Checker commonsChecker;
2792 commonsChecker = new Checker();
2794 for (String common : commons) {
2795 commonsChecker.check(common, buildCatalog(false));
2796 Report commonsReport = commonsChecker.targets().iterator().next().getReport();
2798 if (commonsReport.hasErrors()) {
2799 throw new RuntimeException("Failed to process commons:\n" +
2803 } catch (CheckerException cx) {
2804 throw new RuntimeException("Failed to process commons", cx);
2806 commonsCatalogInstance = commonsChecker.catalog;
2807 return commonsCatalogInstance;
2811 public static Catalog buildCatalog() {
2812 return buildCatalog(true);
2815 private static Catalog buildCatalog(boolean doCommons) {
2817 Catalog catalog = new Catalog(doCommons ? commonsCatalog() : null);
2819 //add core TOSCA types
2820 for (Data.CoreType type : Data.CoreType.class.getEnumConstants()) {
2821 catalog.addType(Construct.Data, type.toString(), Collections.emptyMap());
2827 private boolean invokeHook(String theHookName,
2828 Class[] theArgTypes,
2829 Object... theArgs) {
2831 Invokable hookHandler = null;
2833 Method m = Checker.class.getDeclaredMethod(
2834 theHookName, theArgTypes);
2835 m.setAccessible(true);
2836 hookHandler = Invokable.from(m);
2837 } catch (NoSuchMethodException nsmx) {
2838 //that's ok, not every rule has to have a handler
2839 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "That's ok, not every rule has to have a handler. Method name =", theHookName);
2842 if (hookHandler != null) {
2844 hookHandler.invoke(this, theArgs);
2845 } catch (InvocationTargetException | IllegalAccessException itx) {
2846 errLogger.log(LogLevel.WARN, this.getClass().getName(), "Invocation failed for hook handler {} {}", theHookName, itx);
2847 } catch (Exception x) {
2848 errLogger.log(LogLevel.WARN, this.getClass().getName(), "Hook handler failed {} {}", theHookName, x);
2852 return hookHandler != null;
2855 private void validationHook(String theTiming,
2858 Validator.ValidationContext theContext) {
2860 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "looking up validation handler for {}, {} {}", theRule.getName(), theTiming, theContext.getPath());
2861 if (!invokeHook(theRule.getName() + "_" + theTiming + "_validation_handler",
2862 validationHookArgTypes,
2863 theTarget, theRule, theContext)) {
2864 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "no validation handler for {}", theRule.getName() + "_" + theTiming);
2868 private void checks(String theName,
2870 CheckContext theContext) {
2871 Map<Method, Object> handlers = checks.row(/*theName*/theContext.getPath(theName));
2872 if (handlers != null) {
2873 for (Map.Entry<Method, Object> handler : handlers.entrySet()) {
2875 handler.getKey().invoke(handler.getValue(), new Object[]{theTarget, theContext});
2876 } catch (Exception x) {
2877 errLogger.log(LogLevel.WARN, this.getClass().getName(), "Check {} with {} failed {}", theName, handler.getKey(), x);
2881 boolean hasHook = false;
2882 for (Class[] argTypes : checkHookArgTypes) {
2883 hasHook |= invokeHook("check_" + theName,
2885 theTarget, theContext);
2886 //shouldn't we stop as soon as hasHook is true??
2890 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "no check handler for {}", theName);
2895 private void catalogs(String theName,
2897 CheckContext theContext) {
2899 Map<Method, Object> handlers = catalogs.row(/*theName*/theContext.getPath(theName));
2900 if (handlers != null) {
2901 for (Map.Entry<Method, Object> handler : handlers.entrySet()) {
2903 handler.getKey().invoke(handler.getValue(), new Object[]{theTarget, theContext});
2904 } catch (Exception x) {
2905 errLogger.log(LogLevel.WARN, this.getClass().getName(), "Cataloging {} with {} failed {}", theName, handler.getKey(), x);
2911 private class TOSCAValidator extends Validator {
2913 //what were validating
2914 private Target target;
2916 /* Some of the TOSCA entries accept a 'short form/notation' instead of the canonical map representation.
2917 * kwalify cannot easily express these alternatives and as such we handle them here. In the pre-validation phase we detect the presence of a short notation
2918 and compute the canonical form and validate it. In the post-validation phase we
2919 substitute the canonical form for the short form so that checking does not have to deal with it.
2922 private Map<String, Object> canonicals = new TreeMap<>();
2924 TOSCAValidator(Target theTarget, Object theSchema) {
2926 this.target = theTarget;
2929 public Target getTarget() {
2933 /* hook method called by Validator#validate()
2936 protected boolean preValidationHook(Object value, Rule rule, ValidationContext context) {
2938 validationHook("pre", value, rule, context);
2939 //short form handling
2940 String hint = rule.getShort();
2941 if (value != null &&
2944 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Attempting canonical at {}, rule {}", context.getPath(), rule.getName());
2946 Object canonical = null;
2947 //if the canonical form requires a collection
2948 if (Types.isCollectionType(rule.getType())) {
2949 //and the actual value isn't one
2950 if (!(value instanceof Map || value instanceof List)) {
2951 //used to use singleton map/list here (was good for catching errors)
2952 //but there is the possibility if short forms within short forms so
2953 //the created canonicals need to accomodate other values.
2954 if (Types.isMapType(rule.getType())) {
2955 canonical = new HashMap();
2956 ((Map) canonical).put(hint, value);
2958 //the hint is irrelevant here but we should impose a value when the target is a list
2959 canonical = new LinkedList();
2960 ((List) canonical).add(value);
2963 //we can accomodate:
2964 // map to list of map transformation
2965 if (!Types.isMapType(rule.getType()) /* a seq */ &&
2966 value instanceof Map) {
2967 canonical = new LinkedList();
2968 ((List) canonical).add(value);
2970 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Grammar for rule {} (at {}) would require unsupported short form transformation: {} to {}", rule.getName(), context.getPath(), value.getClass(), rule.getType());
2975 int errc = context.errorCount();
2976 validateRule(canonical, rule, context);
2977 if (errc != context.errorCount()) {
2978 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Short notation for {} through {} at {} failed validation", rule.getName(), hint, context.getPath());
2980 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Short notation for {} through {} at {} passed validation. Canonical form is {}", rule.getName(), hint, context.getPath(), canonical);
2981 //replace the short notation with the canonicall one so we don't
2982 //have to deal it again during checking
2983 this.canonicals.put(context.getPath(), canonical);
2987 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Grammar for rule {} (at {}) would require unsupported short form transformation: {} to {}", rule.getName(), context.getPath(), value.getClass(), rule.getType());
2991 //perform default validation process
2996 * Only gets invoked once the value was succesfully verified against the syntax indicated by the given rule.
2999 protected void postValidationHook(Object value,
3001 ValidationContext context) {
3002 validationHook("post", value, rule, context);
3008 * Maintains state across the checking process.
3010 public class CheckContext {
3012 private Target target;
3013 private ArrayList<String> elems = new ArrayList<>(10);
3014 private ArrayList<Construct> constructs = new ArrayList<>(10);
3016 CheckContext(Target theTarget) {
3017 this.target = theTarget;
3020 public CheckContext enter(String theName) {
3021 return enter(theName, null);
3024 public CheckContext enter(String theName, Construct theConstruct) {
3025 this.elems.add(theName);
3026 this.constructs.add(theConstruct);
3027 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering check {} {}", theName, getPath());
3031 public CheckContext exit() {
3032 String path = getPath();
3033 String name = this.elems.remove(this.elems.size() - 1);
3034 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "exiting check {} {}", name, path);
3035 this.constructs.remove(this.constructs.size() - 1);
3039 public String getPath() {
3040 return buildPath(null);
3043 String getPath(String theNextElem) {
3044 return buildPath(theNextElem);
3047 String buildPath(String theElem) {
3048 StringBuilder sb = new StringBuilder();
3049 for (String e : this.elems) {
3053 if (theElem != null) {
3058 return sb.substring(0, sb.length() - 1);
3061 public String enclosingConstruct(Construct theConstruct) {
3062 for (int i = this.constructs.size() - 1; i > 0; i--) {
3063 Construct c = this.constructs.get(i);
3064 if (c != null && c.equals(theConstruct)) {
3065 return this.elems.get(i);
3071 public CheckContext addError(String theMessage, Throwable theCause) {
3072 this.target.report(new TargetError("", getPath(), theMessage, theCause));
3076 public Checker checker() {
3077 return Checker.this;
3080 public Catalog catalog() {
3081 return Checker.this.catalog;
3084 public Target target() {
3088 public String toString() {
3089 return "CheckContext(" + this.target.getLocation() + "," + getPath() + ")";
3093 // -------------------------------------------------------------------------------------------------- //
3095 private String errorReport(List<Throwable> theErrors) {
3096 StringBuilder sb = new StringBuilder(theErrors.size() + " errors");
3097 for (Throwable x : theErrors) {
3099 if (x instanceof ValidationException) {
3100 ValidationException vx = (ValidationException) x;
3102 // .append(error.getLineNumber())
3104 sb.append("[").append(vx.getPath()).append("] ");
3105 } else if (x instanceof TargetError) {
3106 TargetError tx = (TargetError) x;
3107 sb.append("[").append(tx.getLocation()).append("] ");
3109 sb.append(x.getMessage());
3110 if (x.getCause() != null) {
3111 sb.append("\n\tCaused by:\n").append(x.getCause());
3115 return sb.toString();
3118 protected void range_definition_post_validation_handler(Object theValue, Rule theRule,
3119 Validator.ValidationContext theContext) {
3120 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), getClass().getName(), "entering range_definition {}",
3121 theContext.getPath());
3123 assert theRule.getType().equals("seq");
3124 List bounds = (List) theValue;
3126 if (bounds.size() != 2) {
3127 theContext.addError("Too many values in bounds specification", theRule, theValue, null);
3132 Double.parseDouble(bounds.get(0).toString());
3133 } catch (NumberFormatException nfe) {
3134 theContext.addError("Lower bound not a number", theRule, theValue, null);
3138 Double.parseDouble(bounds.get(1).toString());
3139 } catch (NumberFormatException nfe) {
3140 if (!"UNBOUNDED".equals(bounds.get(1).toString())) {
3141 theContext.addError("Upper bound not a number or 'UNBOUNDED'", theRule, theValue, null);
3148 * early processing (validation time) of the imports allows us to catalog
3149 * their types before those declared in the main document.
3151 protected void imports_post_validation_handler(Object theValue, Rule theRule,
3152 Validator.ValidationContext theContext) {
3153 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering imports {}", theContext.getPath());
3154 assert theRule.getType().equals("seq");
3156 Target tgt = ((TOSCAValidator) theContext.getValidator()).getTarget();
3158 applyCanonicals(tgt.getTarget(), ((TOSCAValidator) theContext.getValidator()).canonicals, "/imports", true);
3160 for (ListIterator li = ((List) theValue).listIterator(); li.hasNext();) {
3162 Map.Entry importEntry = mapEntry(li.next());
3164 Map def = (Map) importEntry.getValue();
3165 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Processing import {}", def);
3167 String tfile = (String) def.get("file");
3168 Target tgti = this.locator.resolve(tfile);
3170 theContext.addError("Failure to resolve import '" + def + "', imported from " + tgt, theRule, null,
3174 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Import {} located at {}", def,
3175 tgti.getLocation());
3177 if (this.catalog.addTarget(tgti, tgt)) {
3178 // we've never seen this import (location) before
3181 List<Target> tgtis = parseTarget(tgti);
3182 if (tgtis.isEmpty())
3185 if (tgtis.size() > 1) {
3186 theContext.addError(
3187 "Import '" + tgti + "', imported from " + tgt + ", contains multiple yaml documents",
3188 theRule, null, null);
3192 tgti = tgtis.get(0);
3194 // tgti = parseTarget(tgti);
3195 if (tgt.getReport().hasErrors()) {
3196 theContext.addError("Failure parsing import '" + tgti + "',imported from " + tgt, theRule, null,
3201 validateTarget(tgti);
3202 if (tgt.getReport().hasErrors()) {
3203 theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule,
3207 } catch (CheckerException cx) {
3208 theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule, cx,
3213 // replace with the actual location (also because this is what they
3215 // index by .. bad, this exposed catalog inner workings)
3217 def.put("file", tgti.getLocation());
3221 protected void node_templates_post_validation_handler(Object theValue, Rule theRule,
3222 Validator.ValidationContext theContext) {
3223 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering node_templates_post_validation_handler {}",
3224 theContext.getPath());
3225 assert theRule.getType().equals("map");
3226 Map<String, Map> nodeTemplates = (Map<String, Map>) theValue;
3227 for (Iterator<Map.Entry<String, Map>> i = nodeTemplates.entrySet().iterator(); i.hasNext();) {
3228 Map.Entry<String, Map> node = i.next();
3230 catalog.addTemplate(((TOSCAValidator) theContext.getValidator()).getTarget(), Construct.Node,
3231 node.getKey(), node.getValue());
3232 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Node template {} has been cataloged",
3234 } catch (CatalogException cx) {
3235 theContext.addError(cx.toString(), theRule, node, null);
3240 protected void inputs_post_validation_handler(Object theValue, Rule theRule,
3241 Validator.ValidationContext theContext) {
3242 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "entering inputs_post_validation_handler {}",
3243 theContext.getPath());
3244 assert theRule.getType().equals("map");
3246 // we'll repeat this test during checking but because we index inputs
3248 // we need it here too
3249 if (theValue == null) {
3253 Map<String, Map> inputs = (Map<String, Map>) theValue;
3254 for (Iterator<Map.Entry<String, Map>> i = inputs.entrySet().iterator(); i.hasNext();) {
3255 Map.Entry<String, Map> input = i.next();
3257 catalog.addTemplate(((TOSCAValidator) theContext.getValidator()).getTarget(), Construct.Data,
3258 input.getKey(), input.getValue());
3259 debugLogger.log(LogLevel.DEBUG, this.getClass().getName(), "Input {} has been cataloged",
3261 } catch (CatalogException cx) {
3262 theContext.addError(cx.toString(), theRule, input, null);
3267 private void process(String theProcessorSpec) throws CheckerException {
3269 String[] spec = theProcessorSpec.split(" ");
3270 if (spec.length == 0)
3271 throw new IllegalArgumentException("Incomplete processor specification");
3273 Class processorClass = null;
3275 processorClass = Class.forName(spec[0]);
3276 } catch (ClassNotFoundException cnfx) {
3277 throw new CheckerException("Cannot find processor implementation", cnfx);
3280 Processor proc = null;
3282 proc = (Processor) ConstructorUtils.invokeConstructor(processorClass,
3283 Arrays.copyOfRange(spec, 1, spec.length));
3284 } catch (Exception x) {
3285 throw new CheckerException("Cannot instantiate processor", x);
3291 protected void check_artifact_definition(String theName, Map theDef, CheckContext theContext) {
3292 theContext.enter(theName, Construct.Artifact);
3295 if (!checkDefinition(theName, theDef, theContext)) {
3298 // check artifact type
3299 if (!checkType(Construct.Artifact, theDef, theContext))
3307 protected void check_policy_type_definition(String theName, Map theDefinition, CheckContext theContext) {
3308 theContext.enter(theName, Construct.Policy);
3311 if (!checkDefinition(theName, theDefinition, theContext)) {
3315 if (theDefinition.containsKey("properties")) {
3316 check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
3317 checkTypeConstructFacet(Construct.Policy, theName, theDefinition, Facet.properties, theContext);
3320 // the targets can be known node types or group types
3321 List<String> targets = (List<String>) theDefinition.get("targets");
3322 if (targets != null) {
3323 if (checkDefinition("targets", targets, theContext)) {
3324 for (String target : targets) {
3325 if (!(this.catalog.hasType(Construct.Node, target)
3326 || this.catalog.hasType(Construct.Group, target))) {
3327 theContext.addError(
3328 "The 'targets' entry must contain a reference to a node type or group type, '"
3329 + target + "' is none of those",
3342 protected void check_group_type_definition(String theName, Map theDefinition, CheckContext theContext) {
3343 theContext.enter(theName, Construct.Group);
3346 if (!checkDefinition(theName, theDefinition, theContext)) {
3350 if (theDefinition.containsKey("properties")) {
3351 check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
3352 checkTypeConstructFacet(Construct.Group, theName, theDefinition, Facet.properties, theContext);
3355 if (theDefinition.containsKey("targets")) {
3356 checkTypeReference(Construct.Node, theContext,
3357 ((List<String>) theDefinition.get("targets")).toArray(EMPTY_STRING_ARRAY));
3361 Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get("interfaces");
3362 if (interfaces != null) {
3364 theContext.enter("interfaces");
3365 for (Iterator<Map.Entry<String, Map>> i = interfaces.entrySet().iterator(); i.hasNext();) {
3366 Map.Entry<String, Map> e = i.next();
3367 check_type_interface_definition(e.getKey(), e.getValue(), theContext);
3380 protected void check_node_type_definition(String theName, Map theDefinition, CheckContext theContext) {
3381 theContext.enter(theName, Construct.Node);
3384 if (!checkDefinition(theName, theDefinition, theContext)) {
3388 if (theDefinition.containsKey("properties")) {
3389 check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
3390 checkTypeConstructFacet(Construct.Node, theName, theDefinition, Facet.properties, theContext);
3393 if (theDefinition.containsKey("attributes")) {
3394 check_properties((Map<String, Map>) theDefinition.get("attributes"), theContext);
3395 checkTypeConstructFacet(Construct.Node, theName, theDefinition, Facet.attributes, theContext);
3399 if (theDefinition.containsKey("requirements")) {
3400 check_requirements((List<Map>) theDefinition.get("requirements"), theContext);
3404 if (theDefinition.containsKey("capabilities")) {
3405 check_capabilities((Map<String, Map>) theDefinition.get("capabilities"), theContext);
3409 Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get("interfaces");
3410 if (interfaces != null) {
3412 theContext.enter("interfaces");
3413 for (Iterator<Map.Entry<String, Map>> i = interfaces.entrySet().iterator(); i.hasNext();) {
3414 Map.Entry<String, Map> e = i.next();
3415 check_type_interface_definition(e.getKey(), e.getValue(), theContext);
3430 protected void check_interface_type_definition(String theName, Map theDefinition, CheckContext theContext) {
3431 theContext.enter(theName, Construct.Interface);
3433 if (!checkDefinition(theName, theDefinition, theContext)) {
3437 // not much else here: a list of operation_definitions, each with
3439 // implementation and inputs
3441 // check that common inputs are re-defined in a compatible manner
3443 // check that the interface operations are overwritten in a
3444 // compatible manner
3445 // for (Iterator<Map.Entry<String,Map>> i = theDefinition.entrySet()
3453 protected void check_artifact_type_definition(String theName, Map theDefinition, CheckContext theContext) {
3454 theContext.enter(theName, Construct.Artifact);
3456 if (!checkDefinition(theName, theDefinition, theContext)) {
3466 protected void check_relationship_type_definition(String theName, Map theDefinition, CheckContext theContext) {
3467 theContext.enter(theName, Construct.Relationship);
3469 if (!checkDefinition(theName, theDefinition, theContext)) {
3473 if (theDefinition.containsKey("properties")) {
3474 check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
3475 checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, Facet.properties, theContext);
3478 if (theDefinition.containsKey("attributes")) {
3479 check_properties((Map<String, Map>) theDefinition.get("attributes"), theContext);
3480 checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, Facet.attributes, theContext);
3483 Map<String, Map> interfaces = (Map<String, Map>) theDefinition.get("interfaces");
3484 if (interfaces != null) {
3485 theContext.enter("interfaces");
3486 for (Iterator<Map.Entry<String, Map>> i = interfaces.entrySet().iterator(); i.hasNext();) {
3487 Map.Entry<String, Map> e = i.next();
3488 check_type_interface_definition(e.getKey(), e.getValue(), theContext);
3493 if (theDefinition.containsKey("valid_target_types")) {
3494 checkTypeReference(Construct.Capability, theContext,
3495 ((List<String>) theDefinition.get("valid_target_types")).toArray(EMPTY_STRING_ARRAY));
3503 protected void check_capability_type_definition(String theName, Map theDefinition, CheckContext theContext) {
3504 theContext.enter(theName, Construct.Capability);
3507 if (!checkDefinition(theName, theDefinition, theContext)) {
3511 if (theDefinition.containsKey("properties")) {
3512 check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
3513 checkTypeConstructFacet(Construct.Capability, theName, theDefinition, Facet.properties, theContext);
3516 if (theDefinition.containsKey("attributes")) {
3517 check_attributes((Map<String, Map>) theDefinition.get("attributes"), theContext);
3518 checkTypeConstructFacet(Construct.Capability, theName, theDefinition, Facet.attributes, theContext);
3521 // valid_source_types: see capability_type_definition
3522 // unclear: how is the valid_source_types list definition eveolving
3524 // the type hierarchy: additive, overwriting, ??
3525 if (theDefinition.containsKey("valid_source_types")) {
3526 checkTypeReference(Construct.Node, theContext,
3527 ((List<String>) theDefinition.get("valid_source_types")).toArray(EMPTY_STRING_ARRAY));
3535 protected void check_data_type_definition(String theName, Map theDefinition, CheckContext theContext) {
3536 theContext.enter(theName, Construct.Data);
3538 if (!checkDefinition(theName, theDefinition, theContext)) {
3542 if (theDefinition.containsKey("properties")) {
3543 check_properties((Map<String, Map>) theDefinition.get("properties"), theContext);
3544 checkTypeConstructFacet(Construct.Data, theName, theDefinition, Facet.properties, theContext);
3552 * top level rule, we collected the whole information set. this is where
3555 protected void check_service_template_definition(Map<String, Object> theDef, CheckContext theContext) {
3556 theContext.enter("");
3558 if (theDef == null) {
3559 theContext.addError("Empty template", null);
3563 // !!! imports need to be processed first now that catalogging takes
3564 // place at check time!!
3566 // first catalog whatever it is there to be cataloged so that the checks
3567 // can perform cross-checking
3568 for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator(); ri.hasNext();) {
3569 Map.Entry<String, Object> e = ri.next();
3570 catalogs(e.getKey(), e.getValue(), theContext);
3573 for (Iterator<Map.Entry<String, Object>> ri = theDef.entrySet().iterator(); ri.hasNext();) {
3574 Map.Entry<String, Object> e = ri.next();
3575 checks(e.getKey(), e.getValue(), theContext);
3580 protected void check_attribute_definition(String theName, Map theDefinition, CheckContext theContext) {
3581 theContext.enter(theName);
3583 if (!checkDefinition(theName, theDefinition, theContext)) {
3586 if (!checkDataType(theDefinition, theContext)) {
3594 public void check_attributes(Map<String, Map> theDefinitions, CheckContext theContext) {
3595 theContext.enter("attributes");
3597 if (!checkDefinition("attributes", theDefinitions, theContext))
3600 for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext();) {
3601 Map.Entry<String, Map> e = i.next();
3602 check_attribute_definition(e.getKey(), e.getValue(), theContext);
3609 protected void check_property_definition(String theName, Map theDefinition, CheckContext theContext) {
3610 theContext.enter(theName);
3611 if (!checkDefinition(theName, theDefinition, theContext)) {
3615 if (!checkDataType(theDefinition, theContext)) {
3618 // check default value is compatible with type
3619 Object defaultValue = theDefinition.get("default");
3620 if (defaultValue != null) {
3621 checkDataValuation(defaultValue, theDefinition, theContext);
3627 public void check_properties(Map<String, Map> theDefinitions, CheckContext theContext) {
3628 theContext.enter("properties");
3630 if (!checkDefinition("properties", theDefinitions, theContext))
3633 for (Iterator<Map.Entry<String, Map>> i = theDefinitions.entrySet().iterator(); i.hasNext();) {
3634 Map.Entry<String, Map> e = i.next();
3635 check_property_definition(e.getKey(), e.getValue(), theContext);