2 * ============LICENSE_START=======================================================
3 * Copyright (C) 2018 Ericsson. All rights reserved.
4 * ================================================================================
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
9 * http://www.apache.org/licenses/LICENSE-2.0
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
17 * SPDX-License-Identifier: Apache-2.0
18 * ============LICENSE_END=========================================================
21 package org.onap.policy.distribution.reception.decoding.pdpx;
23 import com.google.gson.Gson;
24 import com.google.gson.GsonBuilder;
26 import java.io.FileWriter;
27 import java.io.Writer;
28 import java.util.ArrayList;
29 import java.util.List;
30 import java.util.regex.Matcher;
31 import java.util.regex.Pattern;
33 import org.onap.policy.common.logging.flexlogger.FlexLogger;
34 import org.onap.policy.common.logging.flexlogger.Logger;
35 import org.onap.policy.distribution.reception.decoding.PolicyDecodingException;
36 import org.onap.sdc.tosca.parser.api.ISdcCsarHelper;
37 import org.onap.sdc.toscaparser.api.CapabilityAssignment;
38 import org.onap.sdc.toscaparser.api.CapabilityAssignments;
39 import org.onap.sdc.toscaparser.api.NodeTemplate;
42 * Extract concerned info from NodeTemplate, currently ONLY HPA Feature.
44 * @author Libo Zhu (libo.zhu@intel.com)
46 public class ExtractFromNode {
48 private static final Logger LOGGER = FlexLogger.getLogger(ExtractFromNode.class);
49 private static final String VDU_TYPE = "tosca.nodes.nfv.Vdu.Compute";
50 private static final String VDU_CP_TYPE = "tosca.nodes.nfv.VduCp";
51 private static final String VIRTUAL_MEM_SIZE_PATH = "virtual_memory#virtual_mem_size";
52 private static final String NUM_VIRTUAL_CPU_PATH = "virtual_cpu#num_virtual_cpu";
53 private static final String CPU_ARCHITECTURE_PATH = "virtual_cpu#cpu_architecture";
54 private static final String BASIC_CAPABILITIES = "BasicCapabilities";
56 ISdcCsarHelper sdcCsarHelper;
57 final Gson gson = new GsonBuilder().serializeNulls().setPrettyPrinting().disableHtmlEscaping().create();
59 public void setSdcCsarHelper(final ISdcCsarHelper sdcCsarHelper) {
60 this.sdcCsarHelper = sdcCsarHelper;
64 * ExtractInfo from VNF , each VNF may includes more than one VDUs and CPs return new generated PdpxPolicy if it has
65 * got Hpa feature info or else return null.
67 * @param node the NodeTemplate
68 * @return the extracted info from input node
69 * @throws PolicyDecodingException if extract fails
71 public PdpxPolicy extractInfo(final NodeTemplate node) throws PolicyDecodingException {
73 LOGGER.debug("the meta data of this nodetemplate = " + sdcCsarHelper.getNodeTemplateMetadata(node));
74 final List<NodeTemplate> lnodeChild = sdcCsarHelper.getNodeTemplateChildren(node);
75 LOGGER.debug("the size of lnodeChild = " + lnodeChild.size());
77 // Store all the VDUs under one VNF
78 final List<NodeTemplate> lnodeVdu = new ArrayList<>();
79 // Store all the Cps under one VNF
80 final List<NodeTemplate> lnodeVduCp = new ArrayList<>();
81 for (final NodeTemplate nodeChild : lnodeChild) {
82 final String type = sdcCsarHelper.getTypeOfNodeTemplate(nodeChild);
83 LOGGER.debug("the type of this nodeChild = " + type);
84 LOGGER.debug("the meta data of this nodetemplate = " + sdcCsarHelper.getNodeTemplateMetadata(nodeChild));
85 if (type.equalsIgnoreCase(VDU_TYPE)) {
86 lnodeVdu.add(nodeChild);
87 } else if (type.equalsIgnoreCase(VDU_CP_TYPE)) {
88 lnodeVduCp.add(nodeChild);
91 LOGGER.debug("the size of vdu is =" + lnodeVdu.size());
92 LOGGER.debug("the size of cp is =" + lnodeVduCp.size());
94 final PdpxPolicy pdpxPolicy = new PdpxPolicy();
95 final Content content = pdpxPolicy.getContent();
96 extractInfoVdu(lnodeVdu, content);
97 extractInfoVduCp(lnodeVduCp, content);
98 if (content.getFlavorFeatures().isEmpty()) {
101 String outputFile = sdcCsarHelper.getNodeTemplateMetadata(node).getValue("name");
102 outputFile += ".json";
103 LOGGER.debug("outputFile = " + outputFile);
104 try (Writer writer = new FileWriter(outputFile)) {
105 gson.toJson(pdpxPolicy, writer);
106 } catch (final Exception exp) {
107 final String message = "Failed writing generated policies to file";
108 LOGGER.error(message, exp);
109 throw new PolicyDecodingException(message, exp);
116 * ExtractInfofromVdu, supported hpa features, All under the capability of tosca.nodes.nfv.Vdu.Compute.
118 * @param lnodeVdu the list of Vdu node
119 * @param content to be change based on lnodeVdu
121 public void extractInfoVdu(final List<NodeTemplate> lnodeVdu, final Content content) {
122 // each VDU <=> FlavorFeature
123 for (final NodeTemplate node : lnodeVdu) {
124 final Attribute flavorAttribute = new Attribute();
125 flavorAttribute.setAttributeName("flavorName");
126 flavorAttribute.setAttributeValue("");
127 final Directive flavorDirective = new Directive();
128 flavorDirective.setType("flavor_directive");
129 flavorDirective.getAttributes().add(flavorAttribute);
130 final FlavorFeature flavorFeature = new FlavorFeature();
131 flavorFeature.setId(sdcCsarHelper.getNodeTemplatePropertyLeafValue(node, "name"));
132 flavorFeature.getDirectives().add(flavorDirective);
134 final CapabilityAssignments capabilityAssignments = sdcCsarHelper.getCapabilitiesOf(node);
135 final CapabilityAssignment capabilityAssignment =
136 capabilityAssignments.getCapabilityByName("virtual_compute");
137 if (capabilityAssignment != null) {
138 generateBasicCapability(capabilityAssignment, flavorFeature);
139 generateHugePages(capabilityAssignment);
141 content.getFlavorFeatures().add(flavorFeature);
146 * GenerateBasicCapability, supported hpa features, All under the capability of tosca.nodes.nfv.Vdu.Compute.
148 * @param capabilityAssignment represents the capability of node
149 * @param flavorFeature represents all the features of specified flavor
151 private void generateBasicCapability(final CapabilityAssignment capabilityAssignment,
152 final FlavorFeature flavorFeature) {
153 // the format is xxx MB/GB like 4096 MB
154 final String virtualMemSize =
155 sdcCsarHelper.getCapabilityPropertyLeafValue(capabilityAssignment, VIRTUAL_MEM_SIZE_PATH);
156 if (virtualMemSize != null) {
157 LOGGER.debug("the virtualMemSize = " + virtualMemSize);
158 final HpaFeatureAttribute hpaFeatureAttribute =
159 generateHpaFeatureAttribute("virtualMemSize", virtualMemSize);
160 final FlavorProperty flavorProperty = new FlavorProperty();
161 flavorProperty.setHpaFeature(BASIC_CAPABILITIES);
162 flavorProperty.getHpaFeatureAttributes().add(hpaFeatureAttribute);
163 flavorFeature.getFlavorProperties().add(flavorProperty);
166 // the format is int like 2
167 final String numVirtualCpu =
168 sdcCsarHelper.getCapabilityPropertyLeafValue(capabilityAssignment, NUM_VIRTUAL_CPU_PATH);
169 if (numVirtualCpu != null) {
170 LOGGER.debug("the numVirtualCpu = " + numVirtualCpu);
171 final HpaFeatureAttribute hpaFeatureAttribute = generateHpaFeatureAttribute("numVirtualCpu", numVirtualCpu);
172 final String cpuArchitecture =
173 sdcCsarHelper.getCapabilityPropertyLeafValue(capabilityAssignment, CPU_ARCHITECTURE_PATH);
174 final FlavorProperty flavorProperty = new FlavorProperty();
175 flavorProperty.setHpaFeature(BASIC_CAPABILITIES);
176 if (cpuArchitecture != null) {
177 flavorProperty.setArchitecture(cpuArchitecture);
179 flavorProperty.getHpaFeatureAttributes().add(hpaFeatureAttribute);
180 flavorFeature.getFlavorProperties().add(flavorProperty);
185 * GenerateHpaFeatureAttribute based on the value of featureValue. the format: "hpa-attribute-key": "pciVendorId",
186 * "hpa-attribute-value": "1234", "operator": "=", "unit": "xxx".
188 * @param hpaAttributeKey get from the high layer tosca DM
189 * @param featureValue get from the high layer tosca DM
191 private HpaFeatureAttribute generateHpaFeatureAttribute(final String hpaAttributeKey, final String featureValue) {
193 final HpaFeatureAttribute hpaFeatureAttribute = new HpaFeatureAttribute();
194 hpaFeatureAttribute.setHpaAttributeKey(hpaAttributeKey);
195 final String tmp = featureValue.replace(" ", "");
196 final String pattern = "(\\D*)(\\d+)(\\D*)";
197 final Pattern r = Pattern.compile(pattern);
198 final Matcher m = r.matcher(tmp);
200 LOGGER.debug("operator = " + m.group(1));
201 LOGGER.debug("value = " + m.group(2));
202 LOGGER.debug("unit = " + m.group(3));
203 hpaFeatureAttribute.setOperator(m.group(1));
204 hpaFeatureAttribute.setHpaAttributeValue(m.group(2));
205 hpaFeatureAttribute.setUnit(m.group(3));
207 return hpaFeatureAttribute;
211 * GenerateHugePages, supported hpa features, All under the capability of tosca.nodes.nfv.Vdu.Compute. The format is
212 * a map like: {"schema-version": "0", "schema-location": "", "platform-id": "generic", "mandatory": true,
213 * "configuration-value": "2 MB"}
215 * @param capabilityAssignment represents the capability of node
216 * @param flavorFeature represents all the features of specified flavor
218 private void generateHugePages(final CapabilityAssignment capabilityAssignment) {
219 // add HugePages support
223 * ExtractInfoVduCp, supported hpa features, under the virtual_network_interface_requirements of
224 * tosca.nodes.nfv.VduCp.
226 * @param lnodeVduCp the list of VduCp node
227 * @param content to be change based on lnodeVduCp
229 public void extractInfoVduCp(final List<NodeTemplate> lnodeVduCp, final Content content) {
230 // to add VDU cp Hpa feature extract