1 # ============LICENSE_START====================================================
3 # =============================================================================
4 # Copyright (c) 2019-2020 AT&T Intellectual Property. All rights reserved.
5 # =============================================================================
6 # Licensed under the Apache License, Version 2.0 (the "License");
7 # you may not use this file except in compliance with the License.
8 # You may obtain a copy of the License at
10 # http://www.apache.org/licenses/LICENSE-2.0
12 # Unless required by applicable law or agreed to in writing, software
13 # distributed under the License is distributed on an "AS IS" BASIS,
14 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 # See the License for the specific language governing permissions and
16 # limitations under the License.
17 # ============LICENSE_END======================================================
19 from subprocess import PIPE, Popen
21 from jsonschema import validate
22 from aoconversion import utils, exceptions
25 def _protobuf_to_js(proto_path):
27 Converts a protobuf to jsonschema and returns the generated schema as a JSON object.
29 cmd = ["protobuf-jsonschema", proto_path]
30 p = Popen(cmd, stderr=PIPE, stdout=PIPE)
32 asjson = json.loads(out)
34 # change the defintion names to remove the random package name that acumos generates
35 defs = asjson["definitions"]
36 defns = list(defs.keys())
38 # https://stackoverflow.com/questions/16475384/rename-a-dictionary-key
39 defs[defn.split(".")[1]] = defs.pop(defn)
41 # make sure what we got out is a valid jsonschema
42 draft4 = utils.schema_schema.get()
43 validate(instance=asjson, schema=draft4)
48 def _get_needed_formats(meta):
50 Read the metadata and figure out what the principle data formats are.
51 We cannot determine this from the proto because the proto may list "submessages" in a flat namespace; some of them may not coorespond to a data format but rather a referenced defintion in another.
52 We don't want to generate a data format for submessages though; instead they should be included in definitions as part of the relevent data format
54 # we use a dict because multiple methods may reuse names
56 for method in meta["methods"]:
57 needed_formats[meta["methods"][method]["input"]] = 1
58 needed_formats[meta["methods"][method]["output"]] = 1
59 return list(needed_formats.keys())
62 def _generate_dcae_data_formats(proto_path, meta, dcae_df_schema, draft_4_schema):
64 Generates a collection of data formats from the model .proto
65 This helper function is broken out for the ease of unit testing; this can be unit tested easily because all deps are parameters,
66 but generate_dcae_data_formats requires some mocking etc.
68 js = _protobuf_to_js(proto_path)
69 needed_formats = _get_needed_formats(meta)
75 # iterate over and convert
76 for nf in needed_formats:
77 defn = js["definitions"][nf]
81 # check for the case where we have an array of other defns
82 for prop in defn["properties"]:
83 if defn["properties"][prop]["type"] == "array" and "$ref" in defn["properties"][prop]["items"]:
84 unclean_ref_name = defn["properties"][prop]["items"]["$ref"]
85 clean_ref_name = unclean_ref_name.split(".")[1]
86 if clean_ref_name in js["definitions"]:
87 defn["properties"][prop]["items"]["$ref"] = "#/definitions/{0}".format(clean_ref_name)
88 definitions[clean_ref_name] = js["definitions"][clean_ref_name]
89 used_defns.append(clean_ref_name)
90 else: # this is bad/unsupported, investigate
91 raise exceptions.UnsupportedFormatScenario()
93 # the defns created by this tool do not include a schema field.
94 # I created an issue: https://github.com/devongovett/protobuf-jsonschema/issues/12
95 defn["$schema"] = "http://json-schema.org/draft-04/schema#"
97 # Include the definitions, which may be empty {}
98 defn["definitions"] = definitions
100 # Validate that our resulting jsonschema is valid jsonschema
101 validate(instance=defn, schema=draft_4_schema)
103 # we currently hardcode dataformatversion, since it is the latest and has been for years https://gerrit.onap.org/r/gitweb?p=dcaegen2/platform/cli.git;a=blob_plain;f=component-json-schemas/data-format/dcae-cli-v1/data-format-schema.json;hb=HEAD
104 dcae_df = {"self": {"name": nf, "version": "1.0.0"}, "dataformatversion": "1.0.1", "jsonschema": defn}
106 # make sure the schema validates against the DCAE data format schema
107 validate(instance=dcae_df, schema=dcae_df_schema)
109 # if we've passed the validation and exc raising so far, we are good, append this to output list of dcae data formats
110 data_formats.append(dcae_df)
112 # make sure every definitin we got out was used. Otherwise, this requires investigation!!
113 if sorted(needed_formats + used_defns) != sorted(list(js["definitions"].keys())):
114 raise exceptions.UnsupportedFormatScenario()
122 def generate_dcae_data_formats(model_repo_path, model_name):
124 Generates a collection of data formats from the model .proto
126 Returns them as the return of this call so this can be fed directly into spec gen
128 data_formats = _generate_dcae_data_formats(
129 "{0}/{1}/model.proto".format(model_repo_path, model_name),
130 utils.get_metadata(model_repo_path, model_name),
131 utils.dataformat_schema.get(),
132 utils.schema_schema.get()
135 # now we iterate over these and write a file to disk for each, since the dcae cli seems to want that
136 for df in data_formats:
137 # name_version seems like a reasonable filename
138 fname = "{0}_{1}_dcae_data_format.json".format(df["self"]["name"], df["self"]["version"])
139 with open("{0}/{1}".format(model_repo_path, fname), "w") as f:
140 f.write(json.dumps(df))