1 # ============LICENSE_START====================================================
3 # =============================================================================
4 # Copyright (c) 2019-2020 AT&T Intellectual Property. All rights reserved.
5 # Copyright (c) 2021 highstreet technologies GmbH. All rights reserved.
6 # =============================================================================
7 # Licensed under the Apache License, Version 2.0 (the "License");
8 # you may not use this file except in compliance with the License.
9 # You may obtain a copy of the License at
11 # http://www.apache.org/licenses/LICENSE-2.0
13 # Unless required by applicable law or agreed to in writing, software
14 # distributed under the License is distributed on an "AS IS" BASIS,
15 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 # See the License for the specific language governing permissions and
17 # limitations under the License.
18 # ============LICENSE_END======================================================
20 from subprocess import PIPE, Popen
22 from jsonschema import validate
23 from aoconversion import utils, exceptions
26 def _protobuf_to_js(proto_path):
28 Converts a protobuf to jsonschema and returns the generated schema as a JSON object.
30 cmd = ["protobuf-jsonschema", proto_path]
31 p = Popen(cmd, stderr=PIPE, stdout=PIPE)
33 asjson = json.loads(out)
35 # change the defintion names to remove the random package name that acumos generates
36 defs = asjson["definitions"]
37 defns = list(defs.keys())
39 # https://stackoverflow.com/questions/16475384/rename-a-dictionary-key
40 defs[defn.split(".")[1]] = defs.pop(defn)
42 # make sure what we got out is a valid jsonschema
43 draft4 = utils.schema_schema.get()
44 validate(instance=asjson, schema=draft4)
49 def _get_needed_formats(meta):
51 Read the metadata and figure out what the principle data formats are.
52 We cannot determine this from the proto because the proto may list "submessages" in a flat namespace; some of them may not coorespond to a data format but rather a referenced defintion in another.
53 We don't want to generate a data format for submessages though; instead they should be included in definitions as part of the relevent data format
55 # we use a dict because multiple methods may reuse names
57 for method in meta["methods"]:
58 needed_formats[utils.validate_format(meta, method, "input")] = 1
59 needed_formats[utils.validate_format(meta, method, "output")] = 1
60 return list(needed_formats.keys())
63 def _generate_dcae_data_formats(proto_path, meta, dcae_df_schema, draft_4_schema):
65 Generates a collection of data formats from the model .proto
66 This helper function is broken out for the ease of unit testing; this can be unit tested easily because all deps are parameters,
67 but generate_dcae_data_formats requires some mocking etc.
69 js = _protobuf_to_js(proto_path)
70 needed_formats = _get_needed_formats(meta)
76 # iterate over and convert
77 for nf in needed_formats:
78 defn = js["definitions"][nf]
82 # check for the case where we have an array of other defns
83 for prop in defn["properties"]:
84 if defn["properties"][prop]["type"] == "array" and "$ref" in defn["properties"][prop]["items"]:
85 unclean_ref_name = defn["properties"][prop]["items"]["$ref"]
86 clean_ref_name = unclean_ref_name.split(".")[1]
87 if clean_ref_name in js["definitions"]:
88 defn["properties"][prop]["items"]["$ref"] = "#/definitions/{0}".format(clean_ref_name)
89 definitions[clean_ref_name] = js["definitions"][clean_ref_name]
90 used_defns.append(clean_ref_name)
91 else: # this is bad/unsupported, investigate
92 raise exceptions.UnsupportedFormatScenario()
94 # the defns created by this tool do not include a schema field.
95 # I created an issue: https://github.com/devongovett/protobuf-jsonschema/issues/12
96 defn["$schema"] = "http://json-schema.org/draft-04/schema#"
98 # Include the definitions, which may be empty {}
99 defn["definitions"] = definitions
101 # Validate that our resulting jsonschema is valid jsonschema
102 validate(instance=defn, schema=draft_4_schema)
104 # we currently hardcode dataformatversion, since it is the latest and has been for years https://gerrit.onap.org/r/gitweb?p=dcaegen2/platform/cli.git;a=blob_plain;f=component-json-schemas/data-format/dcae-cli-v1/data-format-schema.json;hb=HEAD
105 dcae_df = {"self": {"name": nf, "version": "1.0.0"}, "dataformatversion": "1.0.1", "jsonschema": defn}
107 # make sure the schema validates against the DCAE data format schema
108 validate(instance=dcae_df, schema=dcae_df_schema)
110 # if we've passed the validation and exc raising so far, we are good, append this to output list of dcae data formats
111 data_formats.append(dcae_df)
113 # make sure every definitin we got out was used. Otherwise, this requires investigation!!
114 if sorted(needed_formats + used_defns) != sorted(list(js["definitions"].keys())):
115 raise exceptions.UnsupportedFormatScenario()
123 def generate_dcae_data_formats(model_repo_path, model_name):
125 Generates a collection of data formats from the model .proto
127 Returns them as the return of this call so this can be fed directly into spec gen
129 data_formats = _generate_dcae_data_formats(
130 "{0}/{1}/model.proto".format(model_repo_path, model_name),
131 utils.get_metadata(model_repo_path, model_name),
132 utils.dataformat_schema.get(),
133 utils.schema_schema.get()
136 # now we iterate over these and write a file to disk for each, since the dcae cli seems to want that
137 for df in data_formats:
138 # name_version seems like a reasonable filename
139 fname = "{0}_{1}_dcae_data_format.json".format(df["self"]["name"], df["self"]["version"])
140 with open("{0}/{1}".format(model_repo_path, fname), "w") as f:
141 f.write(json.dumps(df))