heatClient = getHeatClient(cloudSiteId, tenantId);
} catch (MsoTenantNotFound e) {
// Tenant doesn't exist, so stack doesn't either
- logger.debug("Tenant with id " + tenantId + "not found.", e);
+ logger.debug("Tenant with id {} not found.", tenantId, e);
return new StackInfo(stackName, HeatStatus.NOTFOUND);
} catch (MsoException me) {
// Got an Openstack error. Propagate it
updateStack = queryHeatStack(heatClient, canonicalName);
logger.debug("{} ({}) ", updateStack.getStackStatus(), canonicalName);
try {
- logger.debug(
- "Current stack {}" + this.getOutputsAsStringBuilderWithUpdate(heatStack).toString());
+ logger.debug("Current stack {}",
+ this.getOutputsAsStringBuilderWithUpdate(heatStack).toString());
} catch (Exception e) {
logger.debug("an error occurred trying to print out the current outputs of the stack", e);
}
} else {
try {
logger.debug(
- "Create Stack error - unable to query for stack status - attempting to delete stack: "
- + instanceId
- + " - This will likely fail and/or we won't be able to query to see if delete worked");
+ "Create Stack error - unable to query for stack status - attempting to delete stack: {} - This will likely fail and/or we won't be able to query to see if delete worked",
+ instanceId);
StackInfo deleteInfo = deleteStack(cloudSiteId, cloudOwner, tenantId, instanceId);
// this may be a waste of time - if we just got an exception trying to query the stack -
// we'll just
"Create Stack error, stack deletion suppressed"));
} else {
try {
- logger.debug("Create Stack errored - attempting to DELETE stack: " + instanceId);
- logger.debug("deletePollInterval=" + deletePollInterval + ", deletePollTimeout="
- + deletePollTimeout);
+ logger.debug("Create Stack errored - attempting to DELETE stack: {}", instanceId);
+ logger.debug("deletePollInterval={}, deletePollTimeout={}", deletePollInterval,
+ deletePollTimeout);
StackInfo deleteInfo = deleteStack(cloudSiteId, cloudOwner, tenantId, instanceId);
boolean deleted = false;
while (!deleted) {
"Create Stack errored, stack deletion FAILED", "", "",
ErrorCode.BusinessProcessError.getValue(),
"Create Stack error, stack deletion FAILED"));
- logger.debug("Stack deletion FAILED on a rollback of a create - " + instanceId
- + ", status=" + queryInfo.getStatus() + ", reason="
- + queryInfo.getStatusMessage());
+ logger.debug(
+ "Stack deletion FAILED on a rollback of a create - {}, status={}, reason={}",
+ instanceId, queryInfo.getStatus(), queryInfo.getStatusMessage());
break;
}
} catch (MsoException me2) {
// Just log this one. We will report the original exception.
- logger.debug("Exception thrown trying to delete " + instanceId
- + " on a create->rollback: " + me2.getContextMessage(), me2);
+ logger.debug("Exception thrown trying to delete {} on a create->rollback: {}",
+ instanceId, me2.getContextMessage(), me2);
logger.warn(String.format("%s %s %s %s %d %s",
MessageEnum.RA_CREATE_STACK_ERR.toString(),
"Create Stack errored, then stack deletion FAILED - exception thrown", "", "",
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* If a network already exists with the same name, an exception will be thrown. Note that this is an MSO-imposed
* restriction. Openstack does not require uniqueness on network names.
* <p>
- *
+ *
* @param cloudSiteId The cloud identifier (may be a region) in which to create the network.
* @param tenantId The tenant in which to create the network
* @param type The type of network to create (Basic, Provider, Multi-Provider)
* Whenever possible, the network ID should be used as it is much more efficient. Query by name requires retrieval
* of all networks for the tenant and search for matching name.
* <p>
- *
+ *
* @param networkNameOrId The network to query
* @param tenantId The Openstack tenant to look in for the network
* @param cloudSiteId The cloud identifier (may be a region) in which to query the network.
public Optional<Port> getNeutronPort(String neutronPortId, String tenantId, String cloudSiteId) {
try {
- logger.debug("Finding Neutron port:" + neutronPortId);
+ logger.debug("Finding Neutron port: {}", neutronPortId);
CloudSite cloudSite =
cloudConfig.getCloudSite(cloudSiteId).orElseThrow(() -> new MsoCloudSiteNotFound(cloudSiteId));
Quantum neutronClient = getNeutronClient(cloudSite, tenantId);
/**
* Delete the specified Network (by ID) in the given cloud. If the network does not exist, success is returned.
* <p>
- *
+ *
* @param networkId Openstack ID of the network to delete
* @param tenantId The Openstack tenant.
* @param cloudSiteId The cloud identifier (may be a region) from which to delete the network.
* name.
*
* @param adminClient an authenticated Keystone object
- *
+ *
* @param tenantName the tenant name or ID to query
- *
+ *
* @return a Tenant object or null if not found
*/
public Network findNetworkByNameOrId(Quantum neutronClient, String networkNameOrId) {
* Find a network (or query its existence) by its Id.
*
* @param neutronClient an authenticated Quantum object
- *
+ *
* @param networkId the network ID to query
- *
+ *
* @return a Network object or null if not found
*/
private Network findNetworkById(Quantum neutronClient, String networkId) {
return port;
} catch (OpenStackResponseException e) {
if (e.getStatus() == 404) {
- logger.warn("Neutron port not found: " + neutronPortId, "Neutron port not found: " + neutronPortId);
+ logger.warn("Neutron port not found: {}", "Neutron port not found: " + neutronPortId, neutronPortId);
return null;
} else {
logger.error("{} {} Openstack Error, GET Neutron Port By ID ({}): ",
* probably needs to return an error (instead of returning the first match).
*
* @param neutronClient an authenticated Quantum object
- *
+ *
* @param networkName the network name to query
- *
+ *
* @return a Network object or null if not found
*/
public Network findNetworkByName(Quantum neutronClient, String networkName) {
public Response getProcessingFlagsImpl(String flag) {
ProcessingFlags processingFlags = null;
- logger.debug("Flag is: " + flag);
+ logger.debug("Flag is: {}", flag);
int respStatus = HttpStatus.SC_OK;
try {
processingFlags = processingFlagsRepo.findByFlag(flag);
public Response updateProcessingFlagsValueImpl(String flag, ProcessingFlags updatedProcessingFlag) {
ProcessingFlags processingFlags = null;
- logger.debug("Flag is: " + flag);
+ logger.debug("Flag is: {}", flag);
int respStatus = HttpStatus.SC_OK;
try {
if (updatedProcessingFlag == null) {
}
String value = updatedProcessingFlag.getValue();
if (value == null || (!value.equalsIgnoreCase("YES") && !value.equalsIgnoreCase("NO"))) {
- logger.debug("Value " + value + " is invalid, only yes/no are allowed");
+ logger.debug("Value {} is invalid, only yes/no are allowed", value);
throw new RuntimeException("Invalid value specified");
}
processingFlags = processingFlagsRepo.findByFlag(flag);
collectionInstanceGroupList = instanceGroup.getCollectionInstanceGroupCustomizations();
CollectionNetworkResourceCustomization collectionNetworkCust =
instanceGroup.getCollectionNetworkResourceCustomizations().get(0);
- logger.debug("Found Collection Network Resource Customization: "
- + collectionNetworkCust.getModelCustomizationUUID());
+ logger.debug("Found Collection Network Resource Customization: {}",
+ collectionNetworkCust.getModelCustomizationUUID());
} else {
logger.debug(
"No Network Collection found. toscaNodeType does not contain NetworkCollection");
logger.debug(Integer.toString(port));
String buildingBlockFlowName = "CreateNetworkCollectionBB";
BuildingBlockDetail buildingBlockDetail = client.getBuildingBlockDetail(buildingBlockFlowName);
- logger.debug("" + buildingBlockDetail.getResourceType());
+ logger.debug("{}", buildingBlockDetail.getResourceType());
assertNotNull(buildingBlockDetail);
}
catalogDBClient.postCloudSite(cloudSite);
}
} catch (Exception e) {
- logger.error("Error creating cloud site in Catalog Adapter: " + e.getMessage(), e);
+ logger.error("Error creating cloud site in Catalog Adapter: {}", e.getMessage(), e);
throw new CloudException("Error creating cloud site in Catalog Adapter", e);
}
}
CloudSite cloudSite = cloudConfig.getCloudSite(cloudInformation.getRegionId())
.orElseThrow(() -> new MsoCloudSiteNotFound(cloudInformation.getRegionId()));
if (cloudSite.getOrchestrator() != null && MULTICLOUD_MODE.equalsIgnoreCase(cloudSite.getOrchestrator())) {
- logger.debug("Skipping Heatbridge as CloudSite orchestrator is: " + MULTICLOUD_MODE);
+ logger.debug("Skipping Heatbridge as CloudSite orchestrator is: {}", MULTICLOUD_MODE);
return;
}
CloudIdentity cloudIdentity = cloudSite.getIdentityService();
CloudSite cloudSite = cloudConfig.getCloudSite(cloudInformation.getRegionId())
.orElseThrow(() -> new MsoCloudSiteNotFound(cloudInformation.getRegionId()));
if (cloudSite.getOrchestrator() != null && MULTICLOUD_MODE.equalsIgnoreCase(cloudSite.getOrchestrator())) {
- logger.debug("Skipping Heatbridge as CloudSite orchestrator is: " + MULTICLOUD_MODE);
+ logger.debug("Skipping Heatbridge as CloudSite orchestrator is: {}", MULTICLOUD_MODE);
return;
}
CloudIdentity cloudIdentity = cloudSite.getIdentityService();
String str = this.convertNode((JsonNode) obj);
stringMap.put(key, str);
} catch (Exception e) {
- logger.debug("DANGER WILL ROBINSON: unable to convert value for JsonNode " + key, e);
+ logger.debug("DANGER WILL ROBINSON: unable to convert value for JsonNode {}", key, e);
// okay in this instance - only string values (fqdn) are expected to be needed
}
} else if (obj instanceof java.util.LinkedHashMap) {
String str = JSON_MAPPER.writeValueAsString(obj);
stringMap.put(key, str);
} catch (Exception e) {
- logger.debug("DANGER WILL ROBINSON: unable to convert value for LinkedHashMap " + key, e);
+ logger.debug("DANGER WILL ROBINSON: unable to convert value for LinkedHashMap {}", key, e);
}
} else if (obj instanceof Integer) {
try {
String str = "" + obj;
stringMap.put(key, str);
} catch (Exception e) {
- logger.debug("DANGER WILL ROBINSON: unable to convert value for Integer " + key, e);
+ logger.debug("DANGER WILL ROBINSON: unable to convert value for Integer {}", key, e);
}
} else {
try {
String str = obj.toString();
stringMap.put(key, str);
} catch (Exception e) {
- logger.debug(
- "DANGER WILL ROBINSON: unable to convert value " + key + " (" + e.getMessage() + ")",
- e);
+ logger.debug("DANGER WILL ROBINSON: unable to convert value {} ({})", key, e.getMessage(), e);
}
}
}
boolean useMCUuid = false;
if (mcu != null && !mcu.isEmpty()) {
if ("null".equalsIgnoreCase(mcu)) {
- logger.debug("modelCustomizationUuid: passed in as the string 'null' - will ignore: "
- + modelCustomizationUuid);
+ logger.debug("modelCustomizationUuid: passed in as the string 'null' - will ignore: {}",
+ modelCustomizationUuid);
useMCUuid = false;
mcu = "";
} else {
isVolumeRequest = true;
}
- logger.debug("requestTypeString = " + requestTypeString + ", nestedStackId = " + nestedStackId
- + ", nestedBaseStackId = " + nestedBaseStackId);
+ logger.debug("requestTypeString = {}, nestedStackId = {}, nestedBaseStackId = {}", requestTypeString,
+ nestedStackId, nestedBaseStackId);
// handle a nestedStackId if sent- this one would be for the volume - so applies to both Vf and Vnf
StackInfo nestedHeatStack = null;
if (!doNotTest) {
if ((moreThanMin || equalToMin) // aic >= min
&& (equalToMax || !(moreThanMax))) { // aic <= max
- logger.debug("VNF Resource " + vnfResource.getModelName() + ", ModelUuid="
- + vnfResource.getModelUUID() + " " + VERSION_MIN + " =" + minVersionVnf + " "
- + VERSION_MAX + " :" + maxVersionVnf + " supported on Cloud: " + cloudSiteId
- + " with AIC_Version:" + cloudSiteOpt.get().getCloudVersion());
+ logger.debug(
+ "VNF Resource {}, ModelUuid={} {} ={} {} :{} supported on Cloud: {} with AIC_Version:{}",
+ vnfResource.getModelName(), vnfResource.getModelUUID(), VERSION_MIN,
+ minVersionVnf, VERSION_MAX, maxVersionVnf, cloudSiteId,
+ cloudSiteOpt.get().getCloudVersion());
} else {
// ERROR
String error = "VNF Resource type: " + vnfResource.getModelName() + ", ModelUuid="
if (oldWay) {
// This will handle old Gamma BrocadeVCE VNF
- logger.debug("No environment parameter found for this Type " + vfModuleType);
+ logger.debug("No environment parameter found for this Type {}", vfModuleType);
} else {
if (heatEnvironment == null) {
String error = "Update VNF: undefined Heat Environment. VF=" + vfModuleType
}
}
- logger.debug("In MsoVnfAdapterImpl, about to call db.getNestedTemplates avec templateId="
- + heatTemplate.getArtifactUuid());
+ logger.debug("In MsoVnfAdapterImpl, about to call db.getNestedTemplates avec templateId={}",
+ heatTemplate.getArtifactUuid());
List<HeatTemplate> nestedTemplates = heatTemplate.getChildTemplates();
logger.debug("AddGetFilesOnVolumeReq - setting to true! {}", propertyString);
}
} catch (Exception e) {
- logger.debug("An error occured trying to get property " + MsoVnfAdapterImpl.ADD_GET_FILES_ON_VOLUME_REQ
- + " - default to false", e);
+ logger.debug("An error occured trying to get property {} - default to false",
+ MsoVnfAdapterImpl.ADD_GET_FILES_ON_VOLUME_REQ, e);
}
if (!isVolumeRequest || addGetFilesOnVolumeReq) {
} else {
// now use VF_MODULE_TO_HEAT_FILES table
logger.debug(
- "In MsoVnfAdapterImpl createVfModule, about to call db.getHeatFilesForVfModule avec vfModuleId="
- + vf.getModelUUID());
+ "In MsoVnfAdapterImpl createVfModule, about to call db.getHeatFilesForVfModule avec vfModuleId={}",
+ vf.getModelUUID());
heatFiles = vf.getHeatFiles();
}
if (heatFiles != null && !heatFiles.isEmpty()) {
logger.debug("Contents of heatFiles - to be added to files: on stack");
for (HeatFiles heatfile : heatFiles) {
- logger.debug(heatfile.getFileName() + " -> " + heatfile.getFileBody());
+ logger.debug("{} -> {}", heatfile.getFileName(), heatfile.getFileBody());
heatFilesObjects.put(heatfile.getFileName(), heatfile.getFileBody());
}
} else {
String propertyString = this.environment.getProperty(MsoVnfAdapterImpl.CHECK_REQD_PARAMS);
if ("false".equalsIgnoreCase(propertyString) || "n".equalsIgnoreCase(propertyString)) {
checkRequiredParameters = false;
- logger.debug("CheckRequiredParameters is FALSE. Will still check but then skip blocking..."
- + MsoVnfAdapterImpl.CHECK_REQD_PARAMS);
+ logger.debug("CheckRequiredParameters is FALSE. Will still check but then skip blocking...{}",
+ MsoVnfAdapterImpl.CHECK_REQD_PARAMS);
}
} catch (Exception e) {
// No problem - default is true
msoHeatUtils.copyBaseOutputsToInputs(goldenInputs, nestedVolumeOutputs, parameterNames, aliasToParam);
for (HeatTemplateParam parm : heatTemplate.getParameters()) {
- logger.debug("Parameter:'" + parm.getParamName() + "', isRequired=" + parm.isRequired() + ", alias="
- + parm.getParamAlias());
+ logger.debug("Parameter:'{}', isRequired={}, alias={}", parm.getParamName(), parm.isRequired(),
+ parm.getParamAlias());
if (parm.isRequired() && (goldenInputs == null || !goldenInputs.containsKey(parm.getParamName()))) {
// The check for an alias was moved to the method in MsoHeatUtils - when we converted the
// Map<String, String> to Map<String, Object>
- logger.debug("**Parameter " + parm.getParamName() + " is required and not in the inputs...check "
- + "environment");
+ logger.debug("**Parameter {} is required and not in the inputs...check " + "environment",
+ parm.getParamName());
if (mhee != null && mhee.containsParameter(parm.getParamName())) {
logger.debug("Required parameter {} appears to be in environment - do not count as missing",
parm.getParamName());
StackInfo currentStack =
msoHeatUtils.deleteStack(tenantId, cloudOwner, cloudSiteId, stackId, false, timeoutMinutes);
if (currentStack != null && outputs != null && outputs.value != null) {
- logger.debug("Adding canonical stack id to outputs " + currentStack.getCanonicalName());
+ logger.debug("Adding canonical stack id to outputs {}", currentStack.getCanonicalName());
outputs.value.put("canonicalStackId", currentStack.getCanonicalName());
}
msoHeatUtils.updateResourceStatus(msoRequest.getRequestId(),
vfModuleName = this.getVfModuleNameFromModuleStackId(vfModuleStackId);
}
- logger.debug("Updating VFModule: " + vfModuleName + " of type " + vfModuleType + "in " + cloudOwner + "/"
- + cloudSiteId + "/" + tenantId);
- logger.debug("requestTypeString = " + requestTypeString + ", nestedVolumeStackId = " + nestedStackId
- + ", nestedBaseStackId = " + nestedBaseStackId);
+ logger.debug("Updating VFModule: {} of type {}in {}/{}/{}", vfModuleName, vfModuleType, cloudOwner, cloudSiteId,
+ tenantId);
+ logger.debug("requestTypeString = {}, nestedVolumeStackId = {}, nestedBaseStackId = {}", requestTypeString,
+ nestedStackId, nestedBaseStackId);
// Build a default rollback object (no actions performed)
VnfRollback vfRollback = new VnfRollback();
if (!doNotTest) {
if ((moreThanMin || equalToMin) // aic >= min
&& ((equalToMax) || !(moreThanMax))) { // aic <= max
- logger.debug("VNF Resource " + vnfResource.getModelName() + " " + VERSION_MIN + " ="
- + minVersionVnf + " " + VERSION_MAX + " :" + maxVersionVnf + " supported on Cloud: "
- + cloudSiteId + " with AIC_Version:" + aicV);
+ logger.debug("VNF Resource {} {} ={} {} :{} supported on Cloud: {} with AIC_Version:{}",
+ vnfResource.getModelName(), VERSION_MIN, minVersionVnf, VERSION_MAX, maxVersionVnf,
+ cloudSiteId, aicV);
} else {
// ERROR
String error = "VNF Resource type: " + vnfResource.getModelName() + " " + VERSION_MIN + " ="
for (HeatTemplate entry : nestedTemplates) {
nestedTemplatesChecked.put(entry.getTemplateName(), entry.getTemplateBody());
- logger.debug(entry.getTemplateName() + " -> " + entry.getTemplateBody());
+ logger.debug("{} -> {}", entry.getTemplateName(), entry.getTemplateBody());
}
} else {
logger.debug("No nested templates found - nothing to do here");
// this will match the nested templates format
logger.debug("Contents of heatFiles - to be added to files: on stack:");
for (HeatFiles heatfile : heatFiles) {
- logger.debug(heatfile.getFileName() + " -> " + heatfile.getFileBody());
+ logger.debug("{} -> {}", heatfile.getFileName(), heatfile.getFileBody());
heatFilesObjects.put(heatfile.getFileName(), heatfile.getFileBody());
}
} else {
boolean hasJson = false;
for (HeatTemplateParam parm : heatTemplate.getParameters()) {
- logger.debug("Parameter:'" + parm.getParamName() + "', isRequired=" + parm.isRequired() + ", alias="
- + parm.getParamAlias());
+ logger.debug("Parameter:'{}', isRequired={}, alias={}", parm.getParamName(), parm.isRequired(),
+ parm.getParamAlias());
// handle json
String parameterType = parm.getParamType();
if (parameterType == null || "".equals(parameterType.trim())) {
// TODO - what to do here?
// for now - send the error to debug, but just leave it as a String
String errorMessage = jpe.getMessage();
- logger.debug("Json Error Converting " + parm.getParamName() + " - " + errorMessage, jpe);
+ logger.debug("Json Error Converting {} - {}", parm.getParamName(), errorMessage, jpe);
hasJson = false;
jsonNode = null;
} catch (Exception e) {
// or here?
- logger.debug("Json Error Converting " + parm.getParamName() + " " + e.getMessage(), e);
+ logger.debug("Json Error Converting {} {}", parm.getParamName(), e.getMessage(), e);
hasJson = false;
jsonNode = null;
}
String realParamName = parm.getParamName();
String alias = parm.getParamAlias();
Object value = inputs.get(alias);
- logger.debug("*Found an Alias: paramName=" + realParamName + ",alias=" + alias + ",value=" + value);
+ logger.debug("*Found an Alias: paramName={},alias={},value={}", realParamName, alias, value);
inputs.remove(alias);
inputs.put(realParamName, value);
logger.debug("{} entry removed from inputs, added back using {}", alias, realParamName);
public void queryVnf(String cloudSiteId, String cloudOwner, String tenantId, String vnfNameOrId,
MsoRequest msoRequest, Holder<Boolean> vnfExists, Holder<String> vnfId, Holder<VnfStatus> status,
Holder<Map<String, String>> outputs) throws VnfException {
- logger.debug("Querying VNF " + vnfNameOrId + " in " + cloudOwner + "/" + cloudSiteId + "/" + tenantId);
+ logger.debug("Querying VNF {} in {}/{}/{}", vnfNameOrId, cloudOwner, cloudSiteId, tenantId);
// Will capture execution time for metrics
long startTime = System.currentTimeMillis();
String vfModuleId = rollback.getVfModuleStackId();
- logger.debug("Rolling Back VF Module " + vfModuleId + " in " + cloudOwner + "/" + cloudSiteId + "/" + tenantId);
+ logger.debug("Rolling Back VF Module {} in {}/{}/{}", vfModuleId, cloudOwner, cloudSiteId, tenantId);
VduInstance vduInstance = null;
try {
return Integer.valueOf(inputValue.toString());
} catch (Exception e) {
- logger.debug("Unable to convert " + inputValue + " to an integer!", e);
+ logger.debug("Unable to convert {} to an integer!", inputValue, e);
return null;
}
} else if (type.equalsIgnoreCase("json")) {
JsonNode jsonNode = JSON_MAPPER.readTree(JSON_MAPPER.writeValueAsString(inputValue));
return jsonNode;
} catch (Exception e) {
- logger.debug("Unable to convert " + inputValue + " to a JsonNode!", e);
+ logger.debug("Unable to convert {} to a JsonNode!", inputValue, e);
return null;
}
} else if (type.equalsIgnoreCase("boolean")) {
String str = stackOutputs.get(key).toString();
stringOutputs.put(key, str);
} catch (Exception e) {
- logger.debug("Unable to add " + key + " to outputs - unable to call .toString() " + e.getMessage(),
- e);
+ logger.debug("Unable to add {} to outputs - unable to call .toString() {}", key, e.getMessage(), e);
}
}
}
final String json = JSON_MAPPER.writeValueAsString(obj);
return json;
} catch (JsonParseException jpe) {
- logger.debug("Error converting json to string " + jpe.getMessage());
+ logger.debug("Error converting json to string {}", jpe.getMessage());
} catch (Exception e) {
- logger.debug("Error converting json to string " + e.getMessage());
+ logger.debug("Error converting json to string {}", e.getMessage());
}
return "[Error converting json to string]";
}
String str = this.convertNode((JsonNode) obj);
stringMap.put(key, str);
} catch (Exception e) {
- logger.debug("DANGER WILL ROBINSON: unable to convert value for JsonNode " + key, e);
+ logger.debug("DANGER WILL ROBINSON: unable to convert value for JsonNode {}", key, e);
// okay in this instance - only string values (fqdn) are expected to be needed
}
} else if (obj instanceof java.util.LinkedHashMap) {
String str = JSON_MAPPER.writeValueAsString(obj);
stringMap.put(key, str);
} catch (Exception e) {
- logger.debug("DANGER WILL ROBINSON: unable to convert value for LinkedHashMap " + key, e);
+ logger.debug("DANGER WILL ROBINSON: unable to convert value for LinkedHashMap {}", key, e);
}
} else if (obj instanceof Integer) {
try {
String str = "" + obj;
stringMap.put(key, str);
} catch (Exception e) {
- logger.debug("DANGER WILL ROBINSON: unable to convert value for Integer " + key, e);
+ logger.debug("DANGER WILL ROBINSON: unable to convert value for Integer {}", key, e);
}
} else {
try {
String str = obj.toString();
stringMap.put(key, str);
} catch (Exception e) {
- logger.debug(
- "DANGER WILL ROBINSON: unable to convert value " + key + " (" + e.getMessage() + ")",
- e);
+ logger.debug("DANGER WILL ROBINSON: unable to convert value {} ({})", key, e.getMessage(), e);
}
}
}
isVolumeRequest = true;
}
- logger.debug("requestType = " + requestType + ", volumeGroupStackId = " + volumeGroupId + ", baseStackId = "
- + baseVfModuleId);
+ logger.debug("requestType = {}, volumeGroupStackId = {}, baseStackId = {}", requestType, volumeGroupId,
+ baseVfModuleId);
// Get the VNF/VF Module definition from the Catalog DB first.
// There are three relevant records: VfModule, VfModuleCustomization, VnfResource
vnfResource = vfModuleCust.getVfModule().getVnfResources();
} catch (Exception e) {
- logger.debug("unhandled exception in create VF - [Query]" + e.getMessage());
+ logger.debug("unhandled exception in create VF - [Query]{}", e.getMessage());
throw new VnfException("Exception during create VF " + e.getMessage());
}
// More precise handling/messaging if the Module already exists
if (vduInstance != null && !(vduInstance.getStatus().getState() == VduStateType.NOTFOUND)) {
VduStateType status = vduInstance.getStatus().getState();
- logger.debug("Found Existing VDU, status=" + status);
+ logger.debug("Found Existing VDU, status={}", status);
if (status == VduStateType.INSTANTIATED) {
if (failIfExists != null && failIfExists) {
logger.debug(error);
throw new VnfException(error, MsoExceptionCategory.INTERNAL);
} else {
- logger.debug("Got HEAT Template from DB: " + heatTemplate.getHeatTemplate());
+ logger.debug("Got HEAT Template from DB: {}", heatTemplate.getHeatTemplate());
}
if (heatEnvironment == null) {
"OpenStack", ErrorCode.DataError.getValue(), error);
throw new VnfException(error, MsoExceptionCategory.INTERNAL);
} else {
- logger.debug("Got Heat Environment from DB: " + heatEnvironment.getEnvironment());
+ logger.debug("Got Heat Environment from DB: {}", heatEnvironment.getEnvironment());
}
HashMap<String, HeatTemplateParam> params = new HashMap<String, HeatTemplateParam>();
Set<HeatTemplateParam> paramSet = heatTemplate.getParameters();
- logger.debug("paramSet has " + paramSet.size() + " entries");
+ logger.debug("paramSet has {} entries", paramSet.size());
for (HeatTemplateParam htp : paramSet) {
params.put(htp.getParamName(), htp);
if (value != null) {
goldenInputs.put(key, value);
} else {
- logger.debug("Failed to convert input " + key + "='" + inputs.get(key) + "' to "
- + params.get(key).getParamType());
+ logger.debug("Failed to convert input {}='{}' to {}", key, inputs.get(key),
+ params.get(key).getParamType());
}
} else {
extraInputs.add(key);
}
}
}
- logger.debug("Ignoring extra inputs: " + extraInputs);
+ logger.debug("Ignoring extra inputs: {}", extraInputs);
}
// Next add in Volume Group Outputs if there are any. Copy directly without conversions.
String propertyString = this.environment.getProperty(MsoVnfPluginAdapterImpl.CHECK_REQD_PARAMS);
if ("false".equalsIgnoreCase(propertyString) || "n".equalsIgnoreCase(propertyString)) {
checkRequiredParameters = false;
- logger.debug("CheckRequiredParameters is FALSE. Will still check but then skip blocking..."
- + MsoVnfPluginAdapterImpl.CHECK_REQD_PARAMS);
+ logger.debug("CheckRequiredParameters is FALSE. Will still check but then skip blocking...{}",
+ MsoVnfPluginAdapterImpl.CHECK_REQD_PARAMS);
}
} catch (Exception e) {
// No problem - default is true
- logger.debug("An exception occured trying to get property " + MsoVnfPluginAdapterImpl.CHECK_REQD_PARAMS,
- e);
+ logger.debug("An exception occured trying to get property {}",
+ MsoVnfPluginAdapterImpl.CHECK_REQD_PARAMS, e);
}
// Do the actual parameter checking.
for (HeatTemplateParam parm : heatTemplate.getParameters()) {
if (parm.isRequired() && (!goldenInputs.containsKey(parm.getParamName()))) {
if (mhee != null && mhee.containsParameter(parm.getParamName())) {
- logger.debug("Required parameter " + parm.getParamName()
- + " appears to be in environment - do not count as missing");
+ logger.debug("Required parameter {} appears to be in environment - do not count as missing",
+ parm.getParamName());
} else {
- logger.debug("adding to missing parameters list: " + parm.getParamName());
+ logger.debug("adding to missing parameters list: {}", parm.getParamName());
if (missingParams == null) {
missingParams = parm.getParamName();
} else {
logger.debug(error);
throw new VnfException(error, MsoExceptionCategory.USERDATA);
} else {
- logger.debug("found missing parameters [" + missingParams
- + "] - but checkRequiredParameters is false - " + "will not block");
+ logger.debug("found missing parameters [{}] - but checkRequiredParameters is false - "
+ + "will not block", missingParams);
}
} else {
logger.debug("No missing parameters found - ok to proceed");
vnfId.value = vduInstance.getVduInstanceId();
- logger.debug("VF Module " + vfModuleName + " successfully created");
+ logger.debug("VF Module {} successfully created", vfModuleName);
return;
}
public void deleteVfModule(String cloudSiteId, String cloudOwner, String tenantId, String vfModuleId,
MsoRequest msoRequest, Holder<Map<String, String>> outputs) throws VnfException {
- logger.debug("Deleting VF Module " + vfModuleId + " in " + cloudOwner + "/" + cloudSiteId + "/" + tenantId);
+ logger.debug("Deleting VF Module {} in {}/{}/{}", vfModuleId, cloudOwner, cloudSiteId, tenantId);
// Will capture execution time for metrics
long startTime = System.currentTimeMillis();
// cloudsite, use that. Otherwise, the default is the (original) HEAT-based
// impl.
- logger.debug("Entered GetVnfAdapterImpl: mode=" + mode + ", cloudSite=" + cloudSiteId);
+ logger.debug("Entered GetVnfAdapterImpl: mode={}, cloudSite={}", mode, cloudSiteId);
if (mode == null) {
// Didn't get an explicit mode type requested.
// has a CloudifyManager assigned to it
Optional<CloudSite> cloudSite = cloudConfig.getCloudSite(cloudSiteId);
if (cloudSite.isPresent()) {
- logger.debug("Got CloudSite: " + cloudSite.toString());
+ logger.debug("Got CloudSite: {}", cloudSite.toString());
if (MULTICLOUD_MODE.equalsIgnoreCase(cloudSite.get().getOrchestrator())) {
- logger.debug("GetVnfAdapterImpl: mode=" + MULTICLOUD_MODE);
+ logger.debug("GetVnfAdapterImpl: mode={}", MULTICLOUD_MODE);
return true;
}
}
Preconditions.checkState(!Strings.isNullOrEmpty(heatStackId), "Invalid heatStackId!");
List<Resource> stackBasedResources =
osClient.getStackBasedResources(heatStackId, HeatBridgeConstants.OS_DEFAULT_HEAT_NESTING);
- logger.debug(stackBasedResources.size() + " heat stack resources are extracted for stack: " + heatStackId);
+ logger.debug("{} heat stack resources are extracted for stack: {}", stackBasedResources.size(), heatStackId);
return stackBasedResources;
}
.cloudRegion(cloudOwner, cloudRegionId).image(aaiImage.getImageId()));
if (!resourcesClient.exists(uri)) {
transaction.create(uri, aaiImage);
- logger.debug("Queuing AAI command to add image: " + aaiImage.getImageId());
+ logger.debug("Queuing AAI command to add image: {}", aaiImage.getImageId());
} else {
- logger.debug("Nothing to add since image: " + aaiImage.getImageId() + "already exists in AAI.");
+ logger.debug("Nothing to add since image: {}already exists in AAI.", aaiImage.getImageId());
}
} catch (WebApplicationException e) {
throw new HeatBridgeException(
}
protected String getInterfaceType(NodeType nodeType, String nicType) {
- logger.debug("nicType: " + nicType + "nodeType: " + nodeType);
+ logger.debug("nicType: {}nodeType: {}", nicType, nodeType);
if (DIRECT.equalsIgnoreCase(nicType)) {
return SRIOV;
} else {
for (Server server : osServers) {
Pserver pserver = aaiHelper.buildPserver(server);
if (pserver != null) {
- logger.debug("Adding Pserver: " + server.getHost());
+ logger.debug("Adding Pserver: {}", server.getHost());
pserverMap.put(server.getHost(), pserver);
}
}
transaction.execute(dryrun);
} catch (BulkProcessFailed e) {
String msg = "Failed to commit transaction";
- logger.debug(msg + " with error: " + e);
+ logger.debug("{} with error: {}", msg, e);
throw new HeatBridgeException(msg, e);
}
}
} catch (NotFoundException e) {
String msg = "Failed to commit delete heatbridge data transaction";
- logger.debug(msg + " with error: " + e);
+ logger.debug("{} with error: {}", msg, e);
throw new HeatBridgeException(msg, e);
} catch (Exception e) {
String msg = "Failed to commit delete heatbridge data transaction";
- logger.debug(msg + " with error: " + e);
+ logger.debug("{} with error: {}", msg, e);
throw new HeatBridgeException(msg, e);
}
}
pIf -> pIf.getSriovPfs() != null && CollectionUtils.isNotEmpty(pIf.getSriovPfs().getSriovPf()))
.forEach(pIf -> pIf.getSriovPfs().getSriovPf().forEach(sriovPf -> {
if (pciIds.contains(sriovPf.getPfPciId())) {
- logger.debug("creating transaction to delete SR-IOV PF: " + pIf.getInterfaceName()
- + " from PServer: " + pserverName);
+ logger.debug("creating transaction to delete SR-IOV PF: {} from PServer: {}",
+ pIf.getInterfaceName(), pserverName);
if (env.getProperty("heatBridgeDryrun", Boolean.class, false)) {
logger.debug("Would delete Sriov Pf: {}",
AAIUriFactory.createResourceUri(AAIFluentTypeBuilder.cloudInfrastructure()
calendar.add(Calendar.DATE, -archivedPeriod);
Date archivingDate = calendar.getTime();
- logger.debug("Date before 6 months: " + (calendar.get(Calendar.MONTH) + 1) + "-" + calendar.get(Calendar.DATE)
- + "-" + calendar.get(Calendar.YEAR));
+ logger.debug("Date before 6 months: {}-{}-{}", (calendar.get(Calendar.MONTH) + 1), calendar.get(Calendar.DATE),
+ calendar.get(Calendar.YEAR));
List<InfraActiveRequests> requestsByEndTime = new ArrayList<>();
+ reqTunable.getAction();
}
}
- logger.debug(GENERATED_KEY + key);
+ logger.debug("{}{}", GENERATED_KEY, key);
String value;
value = env.getProperty(key, "");
if (Constants.MSO_ACTION_LCM.equals(msoAction) && (value == null || value.length() == 0)) {
key = Constants.REQUEST_TUNABLES + "." + msoAction + ".default";
- logger.debug("Can not find key of " + reqTunable.getOperation() + ", use default: " + key);
+ logger.debug("Can not find key of {}, use default: {}", reqTunable.getOperation(), key);
value = env.getProperty(key, "");
}
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/**
* Sends a message to the BPMN workflow message service. The URL path is constructed using the specified message
* type and correlator.
- *
+ *
* @param workflowMessageUrl the base BPMN WorkflowMessage URL
* @param messageType the message type
* @param correlator the message correlator
* @return true if the message was consumed successfully by the endpoint
*/
public boolean send(String workflowMessageUrl, String messageType, String correlator, String message) {
- logger.debug(getClass().getSimpleName() + ".send(" + "workflowMessageUrl=" + workflowMessageUrl
- + " messageType=" + messageType + " correlator=" + correlator + " message=" + message + ")");
+ logger.debug("{}.send(workflowMessageUrl={} messageType={} correlator={} message={})",
+ getClass().getSimpleName(), workflowMessageUrl, messageType, correlator, message);
while (workflowMessageUrl.endsWith("/")) {
workflowMessageUrl = workflowMessageUrl.substring(0, workflowMessageUrl.length() - 1);
/**
* Sends a message to the BPMN workflow message service. The specified URL must have the message type and correlator
* already embedded in it.
- *
+ *
* @param url the endpoint URL
* @param message the JSON content
* @return true if the message was consumed successfully by the endpoint
*/
public boolean send(String url, String message) {
- logger.debug(getClass().getSimpleName() + ".send(" + "url=" + url + " message=" + message + ")");
+ logger.debug("{}.send(" + "url={} message={})", getClass().getSimpleName(), url, message);
logger.info(LoggingAnchor.THREE, MessageEnum.RA_CALLBACK_BPEL.toString(),
message == null ? "[no content]" : message, CAMUNDA);
Status status = appCSupport.getStatusFromGenericResponse(response);
- logger.info("Status code is: " + status.getCode());
- logger.info("Status message is: " + status.getMessage());
+ logger.info("Status code is: {}", status.getCode());
+ logger.info("Status message is: {}", status.getMessage());
if (appCSupport.getFinalityOf(status)) {
logger.debug("Obtained final status, complete the task");
public void onException(AppcClientException exception) {
logger.info("ON EXCEPTION IN CALLBACK");
- logger.info("Exception from APPC: " + exception.getMessage());
+ logger.info("Exception from APPC: {}", exception.getMessage());
Status exceptionStatus = appCSupport.buildStatusFromAppcException(exception);
completeExternalTask(externalTask, externalTaskService, exceptionStatus);
}
requestObject.getClass().getDeclaredMethod("setActionIdentifiers", ActionIdentifiers.class)
.invoke(requestObject, identifier);
if (payload != null) {
- logger.info("payload in RunCommand: " + payload.getValue());
+ logger.info("payload in RunCommand: {}", payload.getValue());
requestObject.getClass().getDeclaredMethod("setPayload", Payload.class).invoke(requestObject, payload);
}
} catch (IllegalAccessException | NoSuchMethodException | InvocationTargetException e) {
return downloadResult;
}
} catch (RuntimeException e) {
- logger.debug("Not able to download the artifact due to an exception: " + artifact.getArtifactURL());
+ logger.debug("Not able to download the artifact due to an exception: {}", artifact.getArtifactURL());
this.sendASDCNotification(NotificationType.DOWNLOAD, artifact.getArtifactURL(), asdcConfig.getConsumerID(),
distributionId, DistributionStatusEnum.DOWNLOAD_ERROR, e.getMessage(), System.currentTimeMillis());
protected void sendFinalDistributionStatus(String distributionID, DistributionStatusEnum status,
String errorReason) {
- logger.debug("Enter sendFinalDistributionStatus with DistributionID " + distributionID + " and Status of "
- + status.name() + " and ErrorReason " + errorReason);
+ logger.debug("Enter sendFinalDistributionStatus with DistributionID {} and Status of {} and ErrorReason {}",
+ distributionID, status.name(), errorReason);
long subStarttime = System.currentTimeMillis();
try {
try {
if (!this.checkResourceAlreadyDeployed(resourceStructure, serviceDeployed)) {
- logger.debug("Processing Resource Type: " + resourceType + " and Model UUID: "
- + resourceStructure.getResourceInstance().getResourceUUID());
+ logger.debug("Processing Resource Type: {} and Model UUID: {}", resourceType,
+ resourceStructure.getResourceInstance().getResourceUUID());
if ("VF".equals(resourceType)) {
}
if (ASDCConfiguration.VF_MODULES_METADATA.equals(artifact.getArtifactType())) {
- logger.debug("VF_MODULE_ARTIFACT: "
- + new String(resultArtifact.getArtifactPayload(), StandardCharsets.UTF_8));
+ logger.debug("VF_MODULE_ARTIFACT: {}",
+ new String(resultArtifact.getArtifactPayload(), StandardCharsets.UTF_8));
logger.debug(ASDCNotificationLogging
.dumpVfModuleMetaDataList(((VfResourceStructure) resourceStructure)
.decodeVfModuleArtifact(resultArtifact.getArtifactPayload())));
resultArtifact);
} else {
writeArtifactToFile(artifact, resultArtifact);
- logger.debug(
- "Adding workflow artifact to structure: " + artifact.getArtifactName());
+ logger.debug("Adding workflow artifact to structure: {}",
+ artifact.getArtifactName());
resourceStructure.addWorkflowArtifactToStructure(artifact, resultArtifact);
}
if (!hasVFResource) {
- logger.debug("No resources found for Service: " + iNotif.getServiceUUID());
+ logger.debug("No resources found for Service: {}", iNotif.getServiceUUID());
logger.debug("Preparing to deploy Service: {}", iNotif.getServiceUUID());
try {
.filter(e -> e.getArtifactType().equalsIgnoreCase("WORKFLOW")).findFirst();
if (artifactOpt.isPresent()) {
IArtifactInfo artifactInfo = artifactOpt.get();
- logger.debug("Ready to parse this serviceArtifactUUID: " + artifactInfo.getArtifactUUID());
+ logger.debug("Ready to parse this serviceArtifactUUID: {}", artifactInfo.getArtifactUUID());
String filePath = Paths.get(getMsoConfigPath(), "ASDC", artifactInfo.getArtifactVersion(),
artifactInfo.getArtifactName()).normalize().toString();
ZipParser zipParserInstance = ZipParser.getInstance();
artifactContent = zipParserInstance.parseJsonForZip(filePath);
- logger.debug(
- "serviceArtifact parsing success! serviceArtifactUUID: " + artifactInfo.getArtifactUUID());
+ logger.debug("serviceArtifact parsing success! serviceArtifactUUID: {}",
+ artifactInfo.getArtifactUUID());
ResourceStructure resourceStructure = new VfResourceStructure(iNotif, new ResourceInstance());
resourceStructure.setResourceType(ResourceType.OTHER);
}
} catch (IOException e) {
- logger.error("serviceArtifact parse failure for service uuid: "
- + serviceMetadata.getValue(SdcPropertyNames.PROPERTY_NAME_CATEGORY));
+ logger.error("serviceArtifact parse failure for service uuid: {}",
+ serviceMetadata.getValue(SdcPropertyNames.PROPERTY_NAME_CATEGORY));
} catch (Exception e) {
logger.error("error NSST process resource failure ", e);
}
logger.info(LoggingAnchor.FOUR, MessageEnum.ASDC_ARTIFACT_DEPLOY_SUC.toString(),
statusData.getDistributionID(), "ASDC", "ASDC Updates Are Complete");
} catch (final Exception e) {
- logger.info("Error caught " + e.getMessage());
+ logger.info("Error caught {}", e.getMessage());
logger.error(LoggingAnchor.SIX, MessageEnum.ASDC_GENERAL_EXCEPTION.toString(),
"Exception caught during ASDCRestInterface", "ASDC", "invokeASDCService",
ErrorCode.BusinessProcessError.getValue(), "Exception in invokeASDCService", e);
VfModuleArtifact vfModuleArtifact = new VfModuleArtifact(artifactInfo, clientResult, modifiedHeatTemplate);
addArtifactByType(artifactInfo, clientResult, vfModuleArtifact);
if (ASDCConfiguration.VF_MODULES_METADATA.equals(artifactInfo.getArtifactType())) {
- logger.debug("VF_MODULE_ARTIFACT: " + new String(clientResult.getArtifactPayload(), "UTF-8"));
+ logger.debug("VF_MODULE_ARTIFACT: {}", new String(clientResult.getArtifactPayload(), "UTF-8"));
logger.debug(ASDCNotificationLogging.dumpVfModuleMetaDataList(vfModulesMetadataList));
}
}
for (String uuid : artifactsMapByUUID.keySet()) {
WorkflowArtifact artifactToInstall = artifactsMapByUUID.get(uuid);
if (isLatestVersionAvailable(artifactsMapByUUID, artifactToInstall)) {
- logger.debug("Installing the BPMN: " + artifactToInstall.getArtifactInfo().getArtifactName());
+ logger.debug("Installing the BPMN: {}", artifactToInstall.getArtifactInfo().getArtifactName());
deployWorkflowResourceToCamunda(artifactToInstall);
installWorkflowResource(artifactToInstall, vfResourceModelUuid);
} else {
- logger.debug("Skipping installing - not the latest version: "
- + artifactToInstall.getArtifactInfo().getArtifactName());
+ logger.debug("Skipping installing - not the latest version: {}",
+ artifactToInstall.getArtifactInfo().getArtifactName());
}
}
}
protected void deployWorkflowResourceToCamunda(WorkflowArtifact artifact) throws Exception {
String bpmnName = artifact.getArtifactInfo().getArtifactName();
String version = artifact.getArtifactInfo().getArtifactVersion();
- logger.debug("BPMN Name: " + bpmnName);
+ logger.debug("BPMN Name: {}", bpmnName);
try {
HttpResponse response = bpmnInstaller.sendDeploymentRequest(bpmnName, version);
logger.debug("Response status line: {}", response.getStatusLine());
logger.debug("resource request for resource customization id {}: {}", resourceCustomizationUuid, jsonStr);
return jsonStr;
} catch (JsonProcessingException e) {
- logger.error("resource input could not be deserialized for resource customization id ("
- + resourceCustomizationUuid + ")");
+ logger.error("resource input could not be deserialized for resource customization id ({})",
+ resourceCustomizationUuid);
throw new ArtifactInstallerException("resource input could not be parsed", e);
}
}
IVfModuleData vfMetadata = vfModuleStructure.getVfModuleMetadata();
- logger.debug("Comparing Vf_Modules_Metadata CustomizationUUID : "
- + vfMetadata.getVfModuleModelCustomizationUUID());
+ logger.debug("Comparing Vf_Modules_Metadata CustomizationUUID : {}",
+ vfMetadata.getVfModuleModelCustomizationUUID());
Optional<IEntityDetails> matchingObject = vfModuleEntityList.stream()
- .peek(group -> logger.debug("To Csar Group VFModuleModelCustomizationUUID "
- + group.getMetadata().getValue("vfModuleModelCustomizationUUID")))
+ .peek(group -> logger.debug("To Csar Group VFModuleModelCustomizationUUID {}",
+ group.getMetadata().getValue("vfModuleModelCustomizationUUID")))
.filter(group -> group.getMetadata().getValue("vfModuleModelCustomizationUUID")
.equals(vfMetadata.getVfModuleModelCustomizationUUID()))
.findFirst();
// add this vnfResource with existing vnfResource for this service
addVnfCustomization(service, vnfResource);
} else {
- logger.debug("Notification VF ResourceCustomizationUUID: "
- + vfNotificationResource.getResourceCustomizationUUID() + " doesn't match "
- + "Tosca VF Customization UUID: " + vfCustomizationUUID);
+ logger.debug(
+ "Notification VF ResourceCustomizationUUID: {} doesn't match " + "Tosca VF Customization UUID: {}",
+ vfNotificationResource.getResourceCustomizationUUID(), vfCustomizationUUID);
}
}
service.setOnapGeneratedNaming(generateNamingValue);
List<Input> serviceInputs = toscaResourceStructure.getSdcCsarHelper().getServiceInputs();
- logger.debug("serviceInputs: {} " + serviceInputs);
+ logger.debug("serviceInputs: {} {}", serviceInputs);
if (!serviceInputs.isEmpty()) {
serviceInputs.forEach(input -> {
if (CDS_MODEL_NAME.equalsIgnoreCase(input.getName())) {
serviceInput = mapper.writeValueAsString(serviceInputList);
serviceInput = serviceInput.replace("\"", "\\\"");
} catch (JsonProcessingException e) {
- logger.error("service input could not be deserialized for service uuid: "
- + sdcCsarHelper.getServiceMetadata().getValue(SdcPropertyNames.PROPERTY_NAME_UUID));
+ logger.error("service input could not be deserialized for service uuid: {}",
+ sdcCsarHelper.getServiceMetadata().getValue(SdcPropertyNames.PROPERTY_NAME_UUID));
}
} else {
logger.debug("serviceInput is null");
propertiesJson = mapper.writeValueAsString(serviceProperties);
propertiesJson = propertiesJson.replace("\"", "\\\"");
} catch (JsonProcessingException e) {
- logger.error("serviceProperties could not be deserialized for service uuid: " + serviceUUID);
+ logger.error("serviceProperties could not be deserialized for service uuid: {}", serviceUUID);
} catch (Exception ex) {
- logger.error("service properties parsing failed. service uuid:" + serviceUUID);
+ logger.error("service properties parsing failed. service uuid:{}", serviceUUID);
}
} else {
}
} catch (Exception e) {
logger.debug("Error looking up or creating cloudsite : {}", cloudSite.getId());
- logger.debug("CloudSite Lookup/Creation Error: " + e);
+ logger.debug("CloudSite Lookup/Creation Error: {}", e);
}
configuration.setModelInfoConfiguration(this.mapperLayer.mapCatalogConfigurationToConfiguration(
configurationResourceCustomization, vnfVfmoduleCvnfcConfigurationCustomization));
} else {
- logger.debug("for Fabric configuration mapping by VF MODULE CUST UUID: "
- + configurationResourceKeys.getVfModuleCustomizationUUID());
+ logger.debug("for Fabric configuration mapping by VF MODULE CUST UUID: {}",
+ configurationResourceKeys.getVfModuleCustomizationUUID());
vnfVfmoduleCvnfcConfigurationCustomization = findVnfVfmoduleCvnfcConfigurationCustomization(
service.getModelUUID(), configurationResourceKeys.getVnfResourceCustomizationUUID(),
configurationResourceKeys.getVfModuleCustomizationUUID(),
bbInputSetupUtils.getAAIVolumeGroup(cloudOwner, cloudRegionId, volumeGroup.getVolumeGroupId())
.getModelCustomizationId();
if (modelInfo.getModelCustomizationId().equalsIgnoreCase(volumeGroupCustId)) {
- logger.debug("Found volume group for vfModule: " + volumeGroup.getVolumeGroupId());
+ logger.debug("Found volume group for vfModule: {}", volumeGroup.getVolumeGroupId());
return Optional.of(volumeGroup.getVolumeGroupId());
}
}
ExecutionServiceInput executionServiceInput = prepareExecutionServiceInput(executionObject);
execution.setVariable(EXEC_INPUT, executionServiceInput);
- logger.debug("Input payload: " + executionServiceInput.getPayload());
+ logger.debug("Input payload: {}", executionServiceInput.getPayload());
} catch (Exception ex) {
exceptionUtil.buildAndThrowWorkflowException(execution, 7000, ex);
/*
* private JsonObject setSliceProfileProperties(Map<String, Object> userParamsMap) { JsonObject
* sliceProfilePropertiesNotPresentInAai = new JsonObject();
- *
+ *
* if (userParamsMap != null) { userParamsMap.forEach((k, v) -> { if (!AAI_SUPPORTED_SLICE_PROFILE.contains((String)
* k)) { sliceProfilePropertiesNotPresentInAai.addProperty(k, v.toString()); } }); }
- *
+ *
* return sliceProfilePropertiesNotPresentInAai; }
*/
sliceProfileInstancesOptional.get().getServiceInstance().get(0).getServiceInstanceId();
}
} catch (Exception e) {
- LOGGER.error("Error in getting sliceProfile Instance" + e.getMessage());
+ LOGGER.error("Error in getting sliceProfile Instance {}", e.getMessage());
}
return sliceProfileInstanceId;
}
nssiName = sliceProfileInstancesOptional.get().getServiceInstanceName();
}
} catch (Exception e) {
- LOGGER.error("Error in getting Nssi Instance" + e.getMessage());
+ LOGGER.error("Error in getting Nssi Instance{}", e.getMessage());
}
return nssiName;
}
timeout = "PT30M";
}
}
- logger.debug("Async Callback Timeout will be: " + timeout);
+ logger.debug("Async Callback Timeout will be: {}", timeout);
String msg = (String) execution.getVariable("sdnrRequest");
- logger.debug("msg to be sent on dmaap " + msg);
+ logger.debug("msg to be sent on dmaap {}", msg);
sdnrDmaapPublisher.get().send(msg);
}
try {
lcmDmaapResponse = mapper.readValue(message, LcmDmaapResponse.class);
} catch (Exception e) {
- logger.warn("Invalid SDNC LCM DMaaP response: " + message);
+ logger.warn("Invalid SDNC LCM DMaaP response: {}", message);
continue;
}
ObjectMapper mapper = new ObjectMapper();
String msgString = mapper.writeValueAsString(msgObject);
- logger.debug(msgObject.getClass().getSimpleName() + "\n" + msgString);
+ logger.debug("{}\n{}", msgObject.getClass().getSimpleName(), msgString);
return msgString;
}
ObjectMapper mapper = new ObjectMapper();
String msgString = mapper.writeValueAsString(msgObject);
- logger.debug(msgObject.getClass().getSimpleName() + "\n" + msgString);
+ logger.debug("{}\n{}", msgObject.getClass().getSimpleName(), msgString);
return msgString;
}
String method =
getClass().getSimpleName() + ".buildWorkflowException(" + "execution=" + execution.getId() + ")";
- logger.debug("Entered " + method);
+ logger.debug("Entered {}", method);
String prefix = (String) execution.getVariable("prefix");
String processKey = getProcessKey(execution);
- logger.debug("processKey=" + processKey);
+ logger.debug("processKey={}", processKey);
// See if there"s already a WorkflowException object in the execution.
WorkflowException theException = (WorkflowException) execution.getVariable(WORKFLOWEXCEPTION);
if (theException != null) {
- logger.debug("Exited " + method + " - propagated " + theException);
+ logger.debug("Exited {} - propagated {}", method, theException);
return theException;
}
String errorResponse = trimString(execution.getVariable(prefix + "ErrorResponse"), null);
String responseCode = trimString(execution.getVariable(prefix + "ResponseCode"), null);
- logger.debug("errorResponse=" + errorResponse);
- logger.debug("responseCode=" + responseCode);
+ logger.debug("errorResponse={}", errorResponse);
+ logger.debug("responseCode={}", responseCode);
if (errorResponse != null || !isOneOf(responseCode, null, "0", "200", "201", "202", "204")) {
// This is an error condition. We need to return a WorkflowExcpetion
String xmlErrorCode = trimString(getXMLTextElement(maybeXML, "ErrorCode"), null);
if (xmlErrorMessage != null || xmlErrorCode != null) {
- logger.debug("xmlErrorMessage=" + xmlErrorMessage);
- logger.debug("xmlErrorCode=" + xmlErrorCode);
+ logger.debug("xmlErrorMessage={}", xmlErrorMessage);
+ logger.debug("xmlErrorCode={}", xmlErrorCode);
if (xmlErrorMessage == null) {
errorResponse = "Received error code " + xmlErrorCode + " from " + processKey;
theException = new WorkflowException(processKey, intResponseCode, errorResponse);
execution.setVariable(WORKFLOWEXCEPTION, theException);
- logger.debug("Exited " + method + " - created " + theException);
+ logger.debug("Exited {} - created {}", method, theException);
return theException;
}
- logger.debug("Exited " + method + " - no WorkflowException");
+ logger.debug("Exited {} - no WorkflowException", method);
return null;
}
public Object buildWorkflowResponse(DelegateExecution execution) {
String method = getClass().getSimpleName() + ".buildWorkflowResponse(" + "execution=" + execution.getId() + ")";
- logger.debug("Entered " + method);
+ logger.debug("Entered {}", method);
String prefix = (String) execution.getVariable("prefix");
String processKey = getProcessKey(execution);
}
}
- logger.debug("Exited " + method);
+ logger.debug("Exited {}", method);
return theResponse;
}
}
return out.toString();
} catch (Exception e) {
- logger.debug("Exception at readResourceFile stream: " + e);
+ logger.debug("Exception at readResourceFile stream: {}", e);
return null;
}
}
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
long startTime = System.currentTimeMillis();
- logger.debug(logMarker + " " + method + " received message: " + (message == null ? "" : System.lineSeparator())
- + message);
+ logger.debug("{} {} received message: {}{}", logMarker, method, (message == null ? "" : System.lineSeparator()),
+ message);
try {
Map<String, Object> variables = new HashMap<>();
* exceptions are handled differently from process execution exceptions. Correlation exceptions are thrown so the
* client knows something went wrong with the delivery of the message. Process execution exceptions are logged but
* not thrown.
- *
+ *
* @param messageEventName the message event name
* @param correlationVariable the process variable used as the correlator
* @param correlationValue the correlation value
protected boolean correlate(String messageEventName, String correlationVariable, String correlationValue,
Map<String, Object> variables, String logMarker) {
try {
- logger.debug(logMarker + " Attempting to find process waiting" + " for " + messageEventName + " with "
- + correlationVariable + " = '" + correlationValue + "'");
+ logger.debug("{} Attempting to find process waiting" + " for {} with {} = '{}'", logMarker,
+ messageEventName, correlationVariable, correlationValue);
execInfoList.add(new ExecInfo(execution));
}
- logger.debug(logMarker + " Found " + count + " process(es) waiting" + " for " + messageEventName + " with "
- + correlationVariable + " = '" + correlationValue + "': " + execInfoList);
+ logger.debug("{} Found {} process(es) waiting for {} with {} = '{}': {}", logMarker, count,
+ messageEventName, correlationVariable, correlationValue, execInfoList);
if (count == 0) {
if (queryFailCount > 0) {
- String msg =
- queryFailCount + "/" + queryCount + " execution queries failed attempting to correlate "
- + messageEventName + " with " + correlationVariable + " = '" + correlationValue
- + "'; last exception was:" + queryException;
- logger.debug(msg);
- logger.error(LoggingAnchor.FOUR, MessageEnum.BPMN_GENERAL_EXCEPTION.toString(), "BPMN",
- ErrorCode.UnknownError.getValue(), msg, queryException);
+ if (logger.isWarnEnabled() || logger.isDebugEnabled()) {
+ String msg =
+ queryFailCount + "/" + queryCount + " execution queries failed attempting to correlate "
+ + messageEventName + " with " + correlationVariable + " = '" + correlationValue
+ + "'; last exception was:" + queryException;
+ logger.debug(msg);
+ logger.error(LoggingAnchor.FOUR, MessageEnum.BPMN_GENERAL_EXCEPTION.toString(), "BPMN",
+ ErrorCode.UnknownError.getValue(), msg, queryException);
+ }
}
return false;
// acknowledged the notification associated with request #1.
try {
- logger.debug(logMarker + " Running " + execInfoList.get(0) + " to receive " + messageEventName
- + " with " + correlationVariable + " = '" + correlationValue + "'");
+ logger.debug("{} Running {} to receive {} with {} = '{}'", logMarker, execInfoList.get(0),
+ messageEventName, correlationVariable, correlationValue);
@SuppressWarnings("unused")
MessageCorrelationResult result = runtimeService.createMessageCorrelation(messageEventName)
/**
* Records audit and metric events in the log for a callback success.
- *
+ *
* @param method the method name
* @param startTime the request start time
*/
/**
* Records error, audit and metric events in the log for a callback internal error.
- *
+ *
* @param method the method name
* @param startTime the request start time
* @param msg the error message
/**
* Records error, audit and metric events in the log for a callback internal error.
- *
+ *
* @param method the method name
* @param startTime the request start time
* @param msg the error message
String requestId = getRequestId(inputVariables);
long currentWaitTime = 0;
long waitTime = getWaitTime();
- logger.debug("WorkflowAsyncResource.waitForResponse using timeout: " + waitTime);
+ logger.debug("WorkflowAsyncResource.waitForResponse using timeout: {}", waitTime);
while (waitTime > currentWaitTime) {
Thread.sleep(workflowPollInterval);
currentWaitTime = currentWaitTime + workflowPollInterval;
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/**
* Generalized REST interface that injects a message event into a waiting BPMN process. Examples:
- *
+ *
* <pre>
* /WorkflowMessage/SDNCAResponse/6d10d075-100c-42d0-9d84-a52432681cae-1478486185286
* /WorkflowMessage/SDNCAEvent/USOSTCDALTX0101UJZZ01
String method = "receiveWorkflowMessage";
- logger.debug(LOGMARKER + " Received workflow message" + " type='" + messageType + "'" + " correlator='"
- + correlator + "'" + (contentType == null ? "" : " contentType='" + contentType + "'") + " message="
- + System.lineSeparator() + message);
+ logger.debug("{} Received workflow message type='{}' correlator='{}'{} message={}{}", LOGMARKER, messageType,
+ correlator, (contentType == null ? "" : " contentType='" + contentType + "'"), System.lineSeparator(),
+ message);
if (messageType == null || messageType.isEmpty()) {
String msg = "Missing message type";
- logger.debug(LOGMARKER + " " + msg);
+ logger.debug("{} {}", LOGMARKER, msg);
logger.error(LoggingAnchor.FOUR, MessageEnum.BPMN_GENERAL_EXCEPTION.toString(), "BPMN",
ErrorCode.DataError.getValue(), LOGMARKER + ":" + msg);
return Response.status(400).entity(msg).build();
if (correlator == null || correlator.isEmpty()) {
String msg = "Missing correlator";
- logger.debug(LOGMARKER + " " + msg);
+ logger.debug("{} {}", LOGMARKER, msg);
logger.error(LoggingAnchor.FOUR, MessageEnum.BPMN_GENERAL_EXCEPTION.toString(), "BPMN",
ErrorCode.DataError.getValue(), LOGMARKER + ":" + msg);
return Response.status(400).entity(msg).build();
runtimeService.startProcessInstanceByKey(processKey, businessKey, inputVariables);
processInstanceId = processInstance.getId();
- logger.debug(logMarker + "Process " + processKey + ":" + processInstanceId + " "
- + (processInstance.isEnded() ? "ENDED" : "RUNNING"));
+ logger.debug("{}Process {}:{} {}", logMarker, processKey, processInstanceId,
+ (processInstance.isEnded() ? "ENDED" : "RUNNING"));
} catch (Exception e) {
WorkflowResponse workflowResponse = new WorkflowResponse();
workflowResponse.setResponse("Error occurred while executing the process: " + e);
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* mso-service-request-timeout then it waits for the value specified in DEFAULT_WAIT_TIME Note: value specified in
* mso-service-request-timeout is in seconds During polling time, if there is an exception encountered in the
* process execution then polling is stopped and the error response is returned to the client
- *
+ *
* @param processKey
* @param variableMap
* @return
long timeToWaitAfterProcessEnded = uriInfo == null ? 5000 : 60000;
AtomicLong timeProcessEnded = new AtomicLong(0);
boolean endedWithNoResponse = false;
- logger.debug(LOGMARKER + "WorkflowResource.startProcessInstanceByKey using timeout: " + waitTime);
+ logger.debug("{} WorkflowResource.startProcessInstanceByKey using timeout: {}", LOGMARKER, waitTime);
while (now <= endTime) {
Thread.sleep(pollingInterval);
workflowResponse.setMessageCode(500);
return Response.status(500).entity(workflowResponse).build();
} catch (Exception ex) {
- logger.debug(LOGMARKER + "Exception in startProcessInstance by key", ex);
+ logger.debug("{} Exception in startProcessInstance by key", LOGMARKER, ex);
workflowResponse.setMessage("Fail");
workflowResponse.setResponse("Error occurred while executing the process: " + ex.getMessage());
if (processInstance != null)
/**
* Returns the wait time, this is used by the resource on how long it should wait to send a response If none
* specified DEFAULT_WAIT_TIME is used
- *
+ *
* @param inputVariables
* @return
*/
/**
* Checks to see if the specified process is ended.
- *
+ *
* @param processInstanceId the process instance ID
* @return true if the process is ended
*/
try {
workflowResponse.setMessageCode(Integer.parseInt(responseCode));
} catch (NumberFormatException nex) {
- logger.debug(LOGMARKER + "Failed to parse ResponseCode: " + responseCode);
+ logger.debug("{} Failed to parse ResponseCode: {}", LOGMARKER, responseCode);
workflowResponse.setMessageCode(-1);
}
} else if ("Fail".equalsIgnoreCase(String.valueOf(status))) {
workflowResponse.setMessage("Fail");
} else {
- logger.debug(LOGMARKER + "Unrecognized Status: " + responseCode);
+ logger.debug("{} Unrecognized Status: {}", LOGMARKER, responseCode);
workflowResponse.setMessage("Fail");
}
}
* If an exception occurs when starting the process instance, it may be obtained by calling this method. Note
* that exceptions are only recorded while the process is executing in its original thread. Once a process is
* suspended, exception recording stops.
- *
+ *
* @return the exception, or null if none has occurred
*/
public Exception getException() {
/**
* Sets the process instance exception.
- *
+ *
* @param exception the exception
*/
private void setException(Exception exception) {
processInstance = runtimeService.startProcessInstanceByKey(processKey, inputVariables);
} catch (Exception e) {
- logger.debug(LOGMARKER + "ProcessThread caught an exception executing " + processKey + ": " + e);
+ logger.debug("{} ProcessThread caught an exception executing {}: {}", LOGMARKER, processKey, e);
setException(e);
}
}
/**
* Attempts to get a response map from the specified process instance.
- *
+ *
* @return the response map, or null if it is unavailable
*/
private Map<String, Object> getResponseMap(ProcessInstance processInstance, String processKey,
/*
* RuntimeService runtimeService = getProcessEngineServices().getRuntimeService(); List<Execution> executions =
* runtimeService.createExecutionQuery() .processInstanceId(processInstanceId).list();
- *
+ *
* for (Execution execution : executions) {
- *
+ *
* @SuppressWarnings("unchecked") Map<String, Object> responseMap = (Map<String, Object>)
* getVariableFromExecution(runtimeService, execution.getId(), responseMapVariable);
- *
+ *
* if (responseMap != null) { msoLogger.debug(LOGMARKER + "Obtained " + responseMapVariable + " from process " +
* processInstanceId + " execution " + execution.getId()); return responseMap; } }
*/
processInstance.getId(), responseMapVariable);
if (responseMap != null) {
- logger.debug(LOGMARKER + "Obtained " + responseMapVariable + " from process " + processInstanceId
- + " history");
+ logger.debug("{}Obtained {} from process {} history", LOGMARKER, responseMapVariable,
+ processInstanceId);
return responseMap;
}
getVariableFromHistory(historyService, processInstanceId, "WorkflowResponse");
String workflowResponse =
workflowResponseObject == null ? null : String.valueOf(workflowResponseObject);
- logger.debug(LOGMARKER + "WorkflowResponse: " + workflowResponse);
+ logger.debug("{}WorkflowResponse: {}", LOGMARKER, workflowResponse);
if (workflowResponse != null) {
Object responseCodeObject =
getVariableFromHistory(historyService, processInstanceId, prefix + "ResponseCode");
String responseCode = responseCodeObject == null ? null : String.valueOf(responseCodeObject);
- logger.debug(LOGMARKER + prefix + "ResponseCode: " + responseCode);
+ logger.debug("{}{}ResponseCode: {}", LOGMARKER, prefix, responseCode);
responseMap = new HashMap<>();
responseMap.put("WorkflowResponse", workflowResponse);
responseMap.put("ResponseCode", responseCode);
}
}
- logger.debug(LOGMARKER + "WorkflowException: " + workflowExceptionText);
+ logger.debug("{}WorkflowException: {}", LOGMARKER, workflowExceptionText);
// BEGIN LEGACY SUPPORT. TODO: REMOVE THIS CODE
Object object = getVariableFromHistory(historyService, processInstanceId, processKey + "Response");
String response = object == null ? null : String.valueOf(object);
- logger.debug(LOGMARKER + processKey + "Response: " + response);
+ logger.debug("{}{}Response: {}", LOGMARKER, processKey, response);
if (response != null) {
object = getVariableFromHistory(historyService, processInstanceId, prefix + "ResponseCode");
String responseCode = object == null ? null : String.valueOf(object);
- logger.debug(LOGMARKER + prefix + "ResponseCode: " + responseCode);
+ logger.debug("{}{}ResponseCode: {}", LOGMARKER, prefix, responseCode);
responseMap = new HashMap<>();
responseMap.put("Response", response);
responseMap.put("ResponseCode", responseCode);
object = getVariableFromHistory(historyService, processInstanceId, prefix + "ErrorResponse");
String errorResponse = object == null ? null : String.valueOf(object);
- logger.debug(LOGMARKER + prefix + "ErrorResponse: " + errorResponse);
+ logger.debug("{}{}ErrorResponse: {}", LOGMARKER, prefix, errorResponse);
if (errorResponse != null) {
object = getVariableFromHistory(historyService, processInstanceId, prefix + "ResponseCode");
String responseCode = object == null ? null : String.valueOf(object);
- logger.debug(LOGMARKER + prefix + "ResponseCode: " + responseCode);
+ logger.debug("{}{}ResponseCode: {}", LOGMARKER, prefix, responseCode);
responseMap = new HashMap<>();
responseMap.put("Response", errorResponse);
responseMap.put("ResponseCode", responseCode);
/**
* Gets a variable value from the specified execution.
- *
+ *
* @return the variable value, or null if the variable could not be obtained
*/
private Object getVariableFromExecution(RuntimeService runtimeService, String executionId, String variableName) {
return runtimeService.getVariable(executionId, variableName);
} catch (ProcessEngineException e) {
// Most likely cause is that the execution no longer exists.
- logger.debug("Error retrieving execution " + executionId + " variable " + variableName + ": " + e);
+ logger.debug("Error retrieving execution {} variable {}: {}", executionId, variableName, e);
return null;
}
}
/**
* Gets a variable value from specified historical process instance.
- *
+ *
* @return the variable value, or null if the variable could not be obtained
*/
private Object getVariableFromHistory(HistoryService historyService, String processInstanceId,
variablesMap.put(variableInstance.getName(), variableInstance.getValue().toString());
}
- logger.debug(LOGMARKER + "***Received MSO getProcessVariables with processKey:" + processKey
- + " and variables: " + variablesMap.toString());
+ logger.debug("{}***Received MSO getProcessVariables with processKey:{} and variables: {}", LOGMARKER,
+ processKey, variablesMap.toString());
response.setVariables(variablesMap);
response.setMessage("Success");
response.setResponse("Successfully retrieved the variables");
response.setProcessInstanceID(processInstanceId);
- logger.debug(LOGMARKER + response.getMessage() + " for processKey: " + processKey + " with response: "
- + response.getResponse());
+ logger.debug("{}{} for processKey: {} with response: {}", LOGMARKER, response.getMessage(), processKey,
+ response.getResponse());
} catch (Exception ex) {
response.setMessage("Fail");
response.setResponse("Failed to retrieve the variables," + ex.getMessage());
*/
protected Object getProcessVariable(String processKey, String variable, long timeout) {
- logger.debug("Waiting " + timeout + "ms for " + processKey + "." + variable + " to be set");
+ logger.debug("Waiting {}ms for {}.{} to be set", timeout, processKey, variable);
long now = System.currentTimeMillis() + timeout;
long endTime = now + timeout;
while (value == null) {
if (now > endTime) {
if (processInstance == null) {
- logger.debug("Timed out waiting for " + processKey + " to start");
+ logger.debug("Timed out waiting for {} to start", processKey);
} else {
- logger.debug("Timed out waiting for " + processKey + "[" + processInstance.getId() + "]." + variable
- + " to be set");
+ logger.debug("Timed out waiting for {}[{}].{} to be set", processKey, processInstance.getId(),
+ variable);
}
return null;
now = System.currentTimeMillis();
}
- logger.debug(processKey + "[" + processInstance.getId() + "]." + variable + "=" + value);
+ logger.debug("{}[{}].{}={}", processKey, processInstance.getId(), variable, value);
return value;
}
logger.debug("Injecting SDNC adapter callback");
Response response = workflowMessageResource.deliver(contentType, "SDNCAResponse", sdncRequestId, content);
- logger.debug("Workflow response to SDNC adapter callback: " + response);
+ logger.debug("Workflow response to SDNC adapter callback: {}", response);
return true;
}
sdncAdapterCallbackRequest.setCallbackHeader(callbackHeader);
sdncAdapterCallbackRequest.setRequestData(content);
SDNCAdapterResponse sdncAdapterResponse = callbackService.sdncAdapterCallback(sdncAdapterCallbackRequest);
- logger.debug("Workflow response to SDNC adapter callback: " + sdncAdapterResponse);
+ logger.debug("Workflow response to SDNC adapter callback: {}", sdncAdapterResponse);
return true;
}
content = content.replace("((CORRELATOR))", correlator);
}
- logger.debug("Injecting " + messageType + " message");
+ logger.debug("Injecting {} message", messageType);
Response response = workflowMessageResource.deliver(contentType, messageType, correlator, content);
logger.debug("Workflow response to {} message: {}", messageType, response);
String activitySpecName = f.getName();
String errorMessage = deployActivitySpec(hostname, activitySpecName);
if (errorMessage == null) {
- logger.debug("Deployed Activity Spec: " + activitySpecName);
+ logger.debug("Deployed Activity Spec: {}", activitySpecName);
} else {
- logger.error("Error deploying Activity Spec: " + activitySpecName + " : " + errorMessage);
+ logger.error("Error deploying Activity Spec: {} : {}", activitySpecName, errorMessage);
}
}
} else {
return true;
}
String host = (String) tpInfoMap.get("host");
- logger.info("host string from tpinfo:" + host);
+ logger.info("host string from tpinfo:{}", host);
// host is empty means TP is in local, not empty means TP is in remote ONAP
if (!host.isEmpty()) {
return false;
tpInfoMap = tpJson;
// add resourceName
tpInfoMap.put("resourceName", vpnAttachmentResourceName);
- logger.info("*** we will try to find resourcename(" + vpnAttachmentResourceName
- + ") to add resource input ***");
+ logger.info("*** we will try to find resourcename({}) to add resource input ***",
+ vpnAttachmentResourceName);
break;
}
}
- logger.info("Get Terminal TP from InventoryOSS: " + tpInfoMap);
+ logger.info("Get Terminal TP from InventoryOSS: {}", tpInfoMap);
return tpInfoMap;
}
crossTps.put("local-access-ltp-id", localTPs.split(",")[0]);
crossTps.put("remote-access-ltp-id", remoteTPs.split(",")[0]);
}
- logger.info("cross TP info:" + crossTps);
+ logger.info("cross TP info:{}", crossTps);
}
@SuppressWarnings("unchecked")
}
msbPort = Integer.valueOf(strMsbPort);
- logger.info("AbstractSdncOperationTask.getGenericResourceApiClient msbIp = " + msbIp + " msbPort = " + msbPort);
+ logger.info("AbstractSdncOperationTask.getGenericResourceApiClient msbIp = {} msbPort = {}", msbIp, msbPort);
MSBServiceClient msbClient = new MSBServiceClient(msbIp, msbPort);
RestServiceCreater restServiceCreater = new RestServiceCreater(msbClient);
logger.info("AbstractSdncOperationTask.getGenericResourceApiClient end!");
private void checkWithActionName(ExecutionServiceInput executionServiceInput, String action, String msoRequestId) {
- logger.info("Checking the " + action + " request");
+ logger.info("Checking the {} request", action);
ActionIdentifiers actionIdentifiers = executionServiceInput.getActionIdentifiers();
/**
private void checkWithActionName(ExecutionServiceInput executionServiceInput, String action) {
- logger.info("Checking the " + action + " request");
+ logger.info("Checking the {} request", action);
ActionIdentifiers actionIdentifiers = executionServiceInput.getActionIdentifiers();
/**
private void checkWithActionName(ExecutionServiceInput executionServiceInput, String action) {
- logger.info("Checking the " + action + " request");
+ logger.info("Checking the {} request", action);
ActionIdentifiers actionIdentifiers = executionServiceInput.getActionIdentifiers();
/**
private void checkWithActionName(final ExecutionServiceInput executionServiceInput, final String action,
final String pnfName) {
- logger.info("Checking the " + action + " request");
+ logger.info("Checking the {} request", action);
ActionIdentifiers actionIdentifiers = executionServiceInput.getActionIdentifiers();
/**
// API Path
String apiPath = "/api/oof/v1/selection/nsst";
- LOGGER.debug("API path for NSST Selection: " + apiPath);
+ LOGGER.debug("API path for NSST Selection: {}", apiPath);
execution.setVariable("NSST_apiPath", apiPath);
// Setting correlator as requestId
profileInfo.put("areaTrafficCapDL", areaTrafficCapDL);
String oofRequest = oofUtils.buildSelectNSTRequest(requestId, messageType, profileInfo);
- LOGGER.debug("**** OOfRequest for NSST Selection: " + oofRequest);
+ LOGGER.debug("**** OOfRequest for NSST Selection: {}", oofRequest);
execution.setVariable("NSST_oofRequest", oofRequest);
}
LOGGER.debug(" **** Enter EnrichGBB ::: processOOFAsyncResponse ****");
String OOFResponse = (String) execution.getVariable("NSST_asyncCallbackResponse");
String requestStatus = jsonUtil.getJsonValue(OOFResponse, "requestStatus");
- LOGGER.debug("NSST OOFResponse is: " + OOFResponse);
+ LOGGER.debug("NSST OOFResponse is: {}", OOFResponse);
execution.setVariable("OOFResponse", OOFResponse);
String solutions = "";
if (requestStatus.equals("completed")) {
}
} else {
String statusMessage = jsonUtil.getJsonValue(OOFResponse, "statusMessage");
- LOGGER.error("received failed status from oof " + statusMessage);
- LOGGER.debug("received failed status from oof " + statusMessage);
+ LOGGER.error("received failed status from oof {}", statusMessage);
+ LOGGER.debug("received failed status from oof {}", statusMessage);
}
LOGGER.debug(">>>>>> solutions: {}", solutions);
if (!placementInfo.getPlacementDemands().isEmpty() || !licenseInfo.getLicenseDemands().isEmpty()) {
oofClient.postDemands(oofRequest);
} else {
- logger.debug(SERVICE_MISSING_DATA + " resources eligible for homing or licensing");
+ logger.debug("{} resources eligible for homing or licensing", SERVICE_MISSING_DATA);
throw new BpmnError(UNPROCESSABLE,
SERVICE_MISSING_DATA + " resources eligible for homing or licensing");
}
logger.trace("Completed Oof Homing Call Oof");
} catch (BpmnError e) {
- logger.debug(ERROR_WHILE_PREPARING_OOF_REQUEST + e.getStackTrace());
+ logger.debug("{}{}", ERROR_WHILE_PREPARING_OOF_REQUEST, e.getStackTrace());
exceptionUtil.buildAndThrowWorkflowException(execution, Integer.parseInt(e.getErrorCode()), e.getMessage());
} catch (BadResponseException e) {
- logger.debug(ERROR_WHILE_PREPARING_OOF_REQUEST + e.getStackTrace());
+ logger.debug("{}{}", ERROR_WHILE_PREPARING_OOF_REQUEST, e.getStackTrace());
exceptionUtil.buildAndThrowWorkflowException(execution, 400, e.getMessage());
} catch (Exception e) {
- logger.debug(ERROR_WHILE_PREPARING_OOF_REQUEST + e.getStackTrace());
+ logger.debug("{}{}", ERROR_WHILE_PREPARING_OOF_REQUEST, e.getStackTrace());
exceptionUtil.buildAndThrowWorkflowException(execution, INTERNAL, "Internal Error - occurred while "
+ "preparing oof request: " + e + " Stack:" + ExceptionUtils.getFullStackTrace(e));
}
*
*/
private LicenseDemand buildLicenseDemand(String id, ModelInfoMetadata metadata) {
- logger.debug("Building demand for service or resource: " + id);
+ logger.debug("Building demand for service or resource: {}", id);
LicenseDemand demand = new LicenseDemand();
if (isNotBlank(id) && isNotBlank(metadata.getModelInstanceName())) {
*
*/
private PlacementDemand buildDemand(String id, ModelInfoMetadata metadata) {
- logger.debug("Building demand for service or resource: " + id);
+ logger.debug("Building demand for service or resource: {}", id);
PlacementDemand placementDemand = new PlacementDemand();
if (isNotBlank(id) && isNotBlank(metadata.getModelInstanceName())) {
placementDemand.setServiceResourceId(id);
List<AllottedResource> allottes = serviceInstance.getAllottedResources();
List<GenericVnf> vnfs = serviceInstance.getVnfs();
- logger.debug("Processing placement solution " + i + 1);
+ logger.debug("Processing placement solution {}1", i);
for (int p = 0; p < placements.length(); p++) {
JSONObject placement = placements.getJSONObject(p);
SolutionInfo solutionInfo = new SolutionInfo();
}
}
} else {
- logger.debug(invalidMessage + IDENTIFIER_TYPE);
+ logger.debug("{}{}", invalidMessage, IDENTIFIER_TYPE);
throw new BpmnError(UNPROCESSABLE, invalidMessage + IDENTIFIER_TYPE);
}
} else if (type.equals("cloudRegionId")) {
solutionInfo.setTargetedCloudRegion(cloud);
si.setOrchestrationStatus(OrchestrationStatus.PRECREATED);
} else {
- logger.debug(invalidMessage + IDENTIFIER_TYPE);
+ logger.debug("{}{}", invalidMessage, IDENTIFIER_TYPE);
throw new BpmnError(UNPROCESSABLE, invalidMessage + IDENTIFIER_TYPE);
}
}
if (!placementDemands.isEmpty() || !licenseDemands.isEmpty()) {
client.postDemands(request);
} else {
- logger.debug(SERVICE_MISSING_DATA + "resources eligible for homing or licensing");
+ logger.debug("{}resources eligible for homing or licensing", SERVICE_MISSING_DATA);
throw new BpmnError(UNPROCESSABLE, SERVICE_MISSING_DATA + "resources eligible for homing or licensing");
}
*
*/
private Demand buildDemand(String id, ModelInfoMetadata metadata) {
- logger.debug("Building demand for service or resource: " + id);
+ logger.debug("Building demand for service or resource: {}", id);
Demand demand = new Demand();
if (isNotBlank(id) && isNotBlank(metadata.getModelInstanceName())) {
demand.setServiceResourceId(id);
List<GenericVnf> vnfs = serviceInstance.getVnfs();
List<ServiceProxy> serviceProxies = serviceInstance.getServiceProxies();
- logger.debug("Processing placement solution " + i + 1);
+ logger.debug("Processing placement solution {}1", i);
for (int p = 0; p < placements.length(); p++) {
JSONObject placement = placements.getJSONObject(p);
SolutionInfo solutionInfo = new SolutionInfo();
solutionInfo.setTargetedCloudRegion(cloud);
si.setOrchestrationStatus(OrchestrationStatus.PRECREATED);
} else {
- logger.debug(invalidMessage + IDENTIFIER_TYPE);
+ logger.debug("{}{}", invalidMessage, IDENTIFIER_TYPE);
throw new BpmnError(UNPROCESSABLE, invalidMessage + IDENTIFIER_TYPE);
}
si.setSolutionInfo(solutionInfo);
String vnfId = vnf.getVnfId();
inPserversLocked = aaiVnfResources.checkVnfPserversLockedFlag(vnfId);
} catch (Exception ex) {
- logger.warn("Exception on checking pservers: " + ex.getMessage());
+ logger.warn("Exception on checking pservers: {}", ex.getMessage());
}
if (inPserversLocked) {
exceptionUtil.buildAndThrowWorkflowException(execution, 7000, "VNF PServers in Locked in A&AI");
}
handlingCode = (String) variableMap.get(HANDLING_CODE);
- logger.debug("Handling code: " + handlingCode);
+ logger.debug("Handling code: {}", handlingCode);
execution.setVariable(WORKFLOW_EXCEPTION, workflowException);
} catch (Exception e) {
- logger.error("BPMN exception on activity execution: " + e.getMessage());
+ logger.error("BPMN exception on activity execution: {}", e.getMessage());
workflowException = new WorkflowException(EXECUTE_BUILDING_BLOCK, 7000, e.getMessage());
handlingCode = ABORT_HANDLING_CODE;
}
try {
getVserversForAppc(execution, vnf);
} catch (Exception e) {
- logger.warn("Unable to retrieve vservers for vnf: " + vnfId);
+ logger.warn("Unable to retrieve vservers for vnf: {}", vnfId);
}
}
break;
}
if (errorMessage != null) {
- logger.debug("verifyApplicationControllerTaskRequest() failed with " + errorMessage);
+ logger.debug("verifyApplicationControllerTaskRequest() failed with {}", errorMessage);
throw new ValidationException(errorMessage, false);
}
return;
logger.debug(String.format("requestId: %s, action: %s, pnfName: %s", requestId, requestAction, pnfName));
String requestPayload = String.valueOf(execution.getVariable(REQUEST_PAYLOAD));
- logger.debug("SO request payload: " + requestPayload);
+ logger.debug("SO request payload: {}", requestPayload);
String lcmAction;
String lcmPayload;
String blueprintName = genericVnf.getModelInfoGenericVnf().getBlueprintName();
String blueprintVersion = genericVnf.getModelInfoGenericVnf().getBlueprintVersion();
- logger.debug(" BlueprintName : " + blueprintName + " BlueprintVersion : " + blueprintVersion);
+ logger.debug(" BlueprintName : {} BlueprintVersion : {}", blueprintName, blueprintVersion);
AbstractCDSPropertiesBean abstractCDSPropertiesBean = new AbstractCDSPropertiesBean();
String blueprintName = vnf.getModelInfoGenericVnf().getBlueprintName();
String blueprintVersion = vnf.getModelInfoGenericVnf().getBlueprintVersion();
- logger.debug(" BlueprintName : " + blueprintName + " BlueprintVersion : " + blueprintVersion);
+ logger.debug(" BlueprintName : {} BlueprintVersion : {}", blueprintName, blueprintVersion);
AbstractCDSPropertiesBean abstractCDSPropertiesBean = new AbstractCDSPropertiesBean();
ErrorCode.UnknownError.getValue(), "APPC Error", e);
appcMessage = e.getMessage();
}
- logger.error("Error Message: " + appcMessage);
- logger.error("ERROR CODE: " + appcCode);
+ logger.error("Error Message: {}", appcMessage);
+ logger.error("ERROR CODE: {}", appcCode);
logger.trace("End of runAppCommand ");
if (appcCode != null && !("0").equals(appcCode)) {
exceptionUtil.buildAndThrowWorkflowException(execution, Integer.parseInt(appcCode), appcMessage);
}
}
- logger.error("Error Message: " + appcMessage);
- logger.error("ERROR CODE: " + appcCode);
+ logger.error("Error Message: {}", appcMessage);
+ logger.error("ERROR CODE: {}", appcCode);
if (appcCode != null && !("0").equals(appcCode)) {
exceptionUtil.buildAndThrowWorkflowException(execution, Integer.parseInt(appcCode), appcMessage);
}
try {
logger.debug("Setting fallout task variables:");
String taskId = task.getId();
- logger.debug("taskId is: " + taskId);
+ logger.debug("taskId is: {}", taskId);
String type = TASK_TYPE_FALLOUT;
BuildingBlockExecution gBuildingBlockExecution =
(BuildingBlockExecution) execution.getVariable(G_BUILDING_BLOCK_EXECUTION);
WorkflowException workflowException = (WorkflowException) execution.getVariable(WORKFLOW_EXCEPTION);
String nfRole = (String) execution.getVariable(RAINY_DAY_VNF_TYPE);
- logger.debug(TASK_VARIABLE_NFROLE + ": " + nfRole);
+ logger.debug("{}: {}", TASK_VARIABLE_NFROLE, nfRole);
String subscriptionServiceType = (String) execution.getVariable(RAINY_DAY_SERVICE_TYPE);
- logger.debug(TASK_VARIABLE_SUBSCRIPTION_SERVICE_TYPE + ": " + subscriptionServiceType);
+ logger.debug("{}: {}", TASK_VARIABLE_SUBSCRIPTION_SERVICE_TYPE, subscriptionServiceType);
String originalRequestId = (String) execution.getVariable(MSO_REQUEST_ID);
- logger.debug(TASK_VARIABLE_ORIGINAL_REQUEST_ID + ": " + originalRequestId);
+ logger.debug("{}: {}", TASK_VARIABLE_ORIGINAL_REQUEST_ID, originalRequestId);
String originalRequestorId =
gBuildingBlockExecution.getGeneralBuildingBlock().getRequestContext().getRequestorId();
- logger.debug(TASK_VARIABLE_ORIGINAL_REQUESTOR_ID + ": " + originalRequestorId);
+ logger.debug("{}: {}", TASK_VARIABLE_ORIGINAL_REQUESTOR_ID, originalRequestorId);
String description = "Manual user task to handle a failure of a BB execution";
- logger.debug(TASK_VARIABLE_DESCRIPTION + ": " + description);
+ logger.debug("{}: {}", TASK_VARIABLE_DESCRIPTION, description);
String taskTimeout = (String) gBuildingBlockExecution.getVariable(TASK_TIMEOUT);
String timeout = Date.from((new Date()).toInstant().plus(Duration.parse(taskTimeout))).toGMTString();
- logger.debug(TASK_VARIABLE_TIMEOUT + ": " + timeout);
+ logger.debug("{}: {}", TASK_VARIABLE_TIMEOUT, timeout);
String errorSource = ASTERISK;
if (workflowException != null && workflowException.getExtSystemErrorSource() != null) {
errorSource = workflowException.getExtSystemErrorSource().toString();
}
- logger.debug(TASK_VARIABLE_ERROR_SOURCE + ": " + errorSource);
+ logger.debug("{}: {}", TASK_VARIABLE_ERROR_SOURCE, errorSource);
String errorCode = ASTERISK;
if (workflowException != null) {
errorCode = workflowException.getErrorCode() + "";
}
- logger.debug(TASK_VARIABLE_ERROR_CODE + ": " + errorCode);
+ logger.debug("{}: {}", TASK_VARIABLE_ERROR_CODE, errorCode);
String errorMessage = ASTERISK;
if (workflowException != null) {
errorMessage = workflowException.getErrorMessage();
}
- logger.debug(TASK_VARIABLE_ERROR_MESSAGE + ": " + errorMessage);
+ logger.debug("{}: {}", TASK_VARIABLE_ERROR_MESSAGE, errorMessage);
String buildingBlockName = gBuildingBlockExecution.getFlowToBeCalled();
- logger.debug(TASK_VARIABLE_BUILDING_BLOCK_NAME + ": " + buildingBlockName);
+ logger.debug("{}: {}", TASK_VARIABLE_BUILDING_BLOCK_NAME, buildingBlockName);
String buildingBlockStep = ASTERISK;
if (workflowException != null) {
buildingBlockStep = workflowException.getWorkStep();
}
execution.setVariable(WORKSTEP, buildingBlockStep);
- logger.debug(TASK_VARIABLE_BUILDING_BLOCK_STEP + ": " + buildingBlockStep);
+ logger.debug("{}: {}", TASK_VARIABLE_BUILDING_BLOCK_STEP, buildingBlockStep);
String validResponses = this.environment.getProperty(validResponsesPath);
- logger.debug(TASK_VARIABLE_VALID_RESPONSES + ": " + validResponses);
+ logger.debug("{}: {}", TASK_VARIABLE_VALID_RESPONSES, validResponses);
Map<String, String> taskVariables = new HashMap<>();
taskVariables.put(TASK_VARIABLE_TYPE, type);
TaskService taskService = execution.getProcessEngineServices().getTaskService();
taskService.setVariablesLocal(taskId, taskVariables);
- logger.debug("successfully created fallout task: " + taskId);
+ logger.debug("successfully created fallout task: {}", taskId);
} catch (BpmnError e) {
- logger.debug(BPMN_EXCEPTION + e.getMessage());
+ logger.debug("{}{}", BPMN_EXCEPTION, e.getMessage());
throw e;
} catch (Exception ex) {
String msg = "Exception in setFalloutTaskVariables " + ex.getMessage();
try {
String taskId = task.getId();
- logger.debug("taskId is: " + taskId);
+ logger.debug("taskId is: {}", taskId);
String type = TASK_TYPE_PAUSE;
String nfRole = (String) execution.getVariable(VNF_TYPE);
TaskService taskService = execution.getProcessEngineServices().getTaskService();
taskService.setVariablesLocal(taskId, taskVariables);
- logger.debug("successfully created pause task: " + taskId);
+ logger.debug("successfully created pause task: {}", taskId);
} catch (BpmnError e) {
- logger.debug(BPMN_EXCEPTION + e.getMessage());
+ logger.debug("{}{}", BPMN_EXCEPTION, e.getMessage());
throw e;
} catch (Exception ex) {
String msg = "Exception in setPauseTaskVariables " + ex.getMessage();
try {
String taskId = task.getId();
- logger.debug("taskId is: " + taskId);
+ logger.debug("taskId is: {}", taskId);
TaskService taskService = execution.getProcessEngineServices().getTaskService();
Map<String, Object> taskVariables = taskService.getVariables(taskId);
String responseValue = (String) taskVariables.get(RESPONSE_VALUE);
- logger.debug("Received responseValue on completion: " + responseValue);
+ logger.debug("Received responseValue on completion: {}", responseValue);
// Have to set the first letter of the response to upper case
String responseValueUppercaseStart =
responseValue.substring(0, 1).toUpperCase() + responseValue.substring(1);
- logger.debug("ResponseValue to taskListener: " + responseValueUppercaseStart);
+ logger.debug("ResponseValue to taskListener: {}", responseValueUppercaseStart);
execution.setVariable(RESPONSE_VALUE_TASK, responseValueUppercaseStart);
} catch (BpmnError e) {
- logger.debug(BPMN_EXCEPTION + e.getMessage());
+ logger.debug("{}{}", BPMN_EXCEPTION, e.getMessage());
throw e;
} catch (Exception ex) {
String msg = "Exception in completeManualTask " + ex.getMessage();
}
}
} catch (SDNCErrorResponseException e) {
- logger.error("SDNC error response - " + e.getMessage());
+ logger.error("SDNC error response - {}", e.getMessage());
exceptionBuilder.buildAndThrowWorkflowException(execution, 7000, e.getMessage(), ONAPComponents.SDNC);
} catch (Exception e) {
logger.error("Error processing SDNC callback", e);
if (execution.hasVariable(ServiceLevelConstants.RESOURCE_TYPE)
&& execution.getVariable(ServiceLevelConstants.RESOURCE_TYPE) != null) {
final String controllerScope = (String) execution.getVariable(ServiceLevelConstants.RESOURCE_TYPE);
- LOG.debug("Scope retrieved from delegate execution: " + controllerScope);
+ LOG.debug("Scope retrieved from delegate execution: {}", controllerScope);
if (ServiceLevelConstants.VALID_CONTROLLER_SCOPE.contains(controllerScope)) {
final String wflName =
fetchWorkflowUsingScope(controllerScope, ServiceLevelConstants.HEALTH_CHECK_OPERATION);
.filter(data -> data.getRelationshipKey().contains("pnf.pnf-name"))
.map(x -> x.getRelationshipValue()).collect(Collectors.toList());
if (pnfNameList == null || pnfNameList.size() == 0) {
- logger.warn(
- "Unable to find the PNF for service instance id: " + serviceInstance.getServiceInstanceId());
+ logger.warn("Unable to find the PNF for service instance id: {}",
+ serviceInstance.getServiceInstanceId());
return;
}
delegateExecution.setVariable(ServiceLevelConstants.PNF_NAME_LIST, pnfNameList);
if (execution.hasVariable(ServiceLevelConstants.RESOURCE_TYPE)
&& execution.getVariable(ServiceLevelConstants.RESOURCE_TYPE) != null) {
final String controllerScope = (String) execution.getVariable(ServiceLevelConstants.RESOURCE_TYPE);
- LOG.debug("Scope retrieved from delegate execution: " + controllerScope);
+ LOG.debug("Scope retrieved from delegate execution: {}", controllerScope);
if (ServiceLevelConstants.VALID_CONTROLLER_SCOPE.contains(controllerScope)) {
final String wflName = fetchWorkflowUsingScope(controllerScope, ServiceLevelConstants.SW_UP_OPERATION);
LOG.debug("Software Upgrade workflow fetched for the scope: {} is: {}", controllerScope, wflName);
.append("flowName", orchFlow.getFlowName()).append("flowVersion", orchFlow.getFlowVersion())
.append("bpmnAction", orchFlow.getBpmnAction()).append("bpmnScope", orchFlow.getBpmnScope())
.toString();
- logger.info("Flow: " + flowDetails);
+ logger.info("Flow: {}", flowDetails);
buildExecuteBuildingBlockListPlan(orchFlow, plan, requestId, apiVersion, resourceId, requestAction, vnfType,
workflowResourceIds, requestDetails, replaceVnf);
}
public void changeCurrentGroup(ExecutionGroup group) {
if (currentGroup == null || !currentGroup.equals(group)) {
- logger.info("Change " + getName() + " group[" + group.getName() + "]");
+ logger.info("Change {} group[{}]", getName(), group.getName());
if (currentGroup != null)
currentGroup.flushBlocksFromCache(this.blocksBuiltCache);
}
String blocks =
blocksCache.stream().map(x -> x.getBuildingBlock().getBpmnFlowName() + ", ").reduce("", String::concat);
blocks = blocks.substring(0, blocks.length() - 2);
- logger.info("Push " + getName() + " (" + blocksCache.size() + ") blocks [" + blocks + "]");
+ logger.info("Push {} ({}) blocks [{}]", getName(), blocksCache.size(), blocks);
this.blocksBuiltCache.addAll(blocksCache);
}
String blocks = this.blocksBuiltCache.stream().map(x -> x.getBuildingBlock().getBpmnFlowName() + ", ")
.reduce("", String::concat);
blocks = blocks.substring(0, blocks.length() - 2);
- logger.info("Flush " + getName() + " (" + blocksBuiltCache.size() + ") blocks [" + blocks + "]");
+ logger.info("Flush {} ({}) blocks [{}]", getName(), blocksBuiltCache.size(), blocks);
blockList.addAll(this.blocksBuiltCache);
this.blocksBuiltCache.clear();
}
if (Boolean.TRUE.equals(execution.getVariable(IS_CHILD_PROCESS))) {
String parentCorrelationId = (String) execution.getVariable(PARENT_CORRELATION_ID);
- logger.info("Child service creation failed. Sending message to parent with correlationId: "
- + parentCorrelationId);
+ logger.info("Child service creation failed. Sending message to parent with correlationId: {}",
+ parentCorrelationId);
execution.getProcessEngineServices().getRuntimeService()
.createMessageCorrelation(CHILD_SVC_REQ_MESSAGE_NAME)
.setVariable(CHILD_SVC_REQ_STATUS, "FAILED").setVariable(CHILD_SVC_REQ_ERROR, childErrorMessage)
if (Boolean.TRUE.equals(execution.getVariable(IS_CHILD_PROCESS))) {
String parentCorrelationId = (String) execution.getVariable(PARENT_CORRELATION_ID);
- logger.info("Child service request completed. Sending message to parent process with correlationId: "
- + parentCorrelationId);
+ logger.info("Child service request completed. Sending message to parent process with correlationId: {}",
+ parentCorrelationId);
execution.getProcessEngineServices().getRuntimeService()
.createMessageCorrelation(CHILD_SVC_REQ_MESSAGE_NAME)
.setVariable(CHILD_SVC_REQ_STATUS, "COMPLETED").setVariable(CHILD_SVC_REQ_ERROR, "")
VnfResourceCustomization vrc = catalogDbClient
.getVnfResourceCustomizationByModelCustomizationUUID(resource.getModelCustomizationId());
if (vrc != null) {
- logger.debug("getSkipPostInstConf value: " + vrc.getSkipPostInstConf());
+ logger.debug("getSkipPostInstConf value: {}", vrc.getSkipPostInstConf());
boolean skipConfigVNF = vrc.getSkipPostInstConf();
currentSequenceSkipCheck(execution, skipConfigVNF);
}
VfModuleCustomization vfc = catalogDbClient
.getVfModuleCustomizationByModelCuztomizationUUID(resource.getModelCustomizationId());
if (null != vfc) {
- logger.debug("getSkipPostInstConf value: " + vfc.getSkipPostInstConf().booleanValue());
+ logger.debug("getSkipPostInstConf value: {}", vfc.getSkipPostInstConf().booleanValue());
boolean skipVfModule = vfc.getSkipPostInstConf();
currentSequenceSkipCheck(execution, skipVfModule);
}
.getPnfResourceCustomizationByModelCustomizationUUID(resource.getModelCustomizationId());
if (null != pnfResourceCustomization) {
- logger.debug("getSkipPostInstConf value: " + pnfResourceCustomization.getSkipPostInstConf());
+ logger.debug("getSkipPostInstConf value: {}", pnfResourceCustomization.getSkipPostInstConf());
boolean skipConfigPNF = pnfResourceCustomization.getSkipPostInstConf();
currentSequenceSkipCheck(execution, skipConfigPNF);
}
String networkTechnology = l3Network.getModelInfoNetwork().getNetworkTechnology();
if (networkTechnology == null) {
networkTechnology = l3Network.getNetworkTechnology();
- logger.warn("NetworkTechnology was null in CatalogDB. Using field from AAI: " + networkTechnology);
+ logger.warn("NetworkTechnology was null in CatalogDB. Using field from AAI: {}", networkTechnology);
}
if (networkTechnology != null) {
createNetworkRequest.setNetworkTechnology(networkTechnology.toUpperCase());
private void verifyRequestContentForAction(ExecutionServiceInput executionServiceInput) {
- logger.info("Checking the " + this.action + " request");
+ logger.info("Checking the {} request", this.action);
ActionIdentifiers actionIdentifiers = executionServiceInput.getActionIdentifiers();
assertThat(actionIdentifiers.getBlueprintName()).isEqualTo(TEST_PNF_RESOURCE_BLUEPRINT_NAME);
ActivateNESwPayload activateNESwPayload = SDNCLcmPayloadBuilder.buildActivateNESwPayload(execution);
try {
String payload = SDNCLcmPayloadBuilder.convertToSting(activateNESwPayload);
- logger.debug("ActivateNESwPayload:\n" + payload);
+ logger.debug("ActivateNESwPayload:\n{}", payload);
assertEquals(expectedPayload, payload);
} catch (JsonProcessingException e) {
DownloadNESwPayload downloadNESwPayload = SDNCLcmPayloadBuilder.buildDownloadNESwPayload(execution);
try {
String payload = SDNCLcmPayloadBuilder.convertToSting(downloadNESwPayload);
- logger.debug("DownloadNESwPayload:\n" + payload);
+ logger.debug("DownloadNESwPayload:\n{}", payload);
assertEquals(expectedPayload, payload);
} catch (JsonProcessingException e) {
UpgradePostCheckPayload upgradePostCheckPayload = SDNCLcmPayloadBuilder.buildUpgradePostCheckPayload(execution);
try {
String payload = SDNCLcmPayloadBuilder.convertToSting(upgradePostCheckPayload);
- logger.debug("UpgradePostCheckPayload:\n" + payload);
+ logger.debug("UpgradePostCheckPayload:\n{}", payload);
assertEquals(expectedPayload, payload);
} catch (JsonProcessingException e) {
UpgradePreCheckPayload upgradePreCheckPayload = SDNCLcmPayloadBuilder.buildUpgradePreCheckPayload(execution);
try {
String payload = SDNCLcmPayloadBuilder.convertToSting(upgradePreCheckPayload);
- logger.debug("UpgradePreCheckPayload:\n" + payload);
+ logger.debug("UpgradePreCheckPayload:\n{}", payload);
assertEquals(expectedPayload, payload);
} catch (JsonProcessingException e) {
if (isNotBlank(name) && isNotBlank(value)) {
headerMap.add("ALL", Pair.with(name, value));
} else {
- log.warn("Not adding " + name + " to headers.");
+ log.warn("Not adding {} to headers.", name);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
stopwatch.stop();
for (String message : itr) {
if (this.isAccepted(message)) {
- logger.info("accepted message found for " + this.getRequestId() + " on " + this.getTopic());
+ logger.info("accepted message found for {} on {}", this.getRequestId(), this.getTopic());
}
- logger.info("received dmaap message: " + message);
+ logger.info("received dmaap message: {}", message);
if (this.isFailure(message)) {
this.stopProcessingMessages();
final String errorMsg = "failure received from dmaap topic " + this.getTopic();
}
public void send(String json) {
- logger.info("publishing message to dmaap topic " + this.getTopic() + ": " + json);
+ logger.info("publishing message to dmaap topic {}: {}", this.getTopic(), json);
publisher.send(json);
// publisher.close(seconds, TimeUnit.SECONDS);
}
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
public List<String> get(String topic, String consumerGroup, String consumerId) {
- logger.info("consuming message from kafka topic : " + topic);
+ logger.info("consuming message from kafka topic: {}", topic);
this.properties.put("group.id", consumerGroup);
this.properties.put("client.id", consumerId);
if (consumer == null) {
msgs.add(rec.value());
}
logger.info("<<<<<<<<<<<<<<<<<<<<<<<<<<<<READING THE CONSUMED MESSAGES<<<<<<<<<<<<<<<<<<<<<<<<<<<");
- msgs.forEach(msg -> logger.info("MESSAGE CONSUMED FROM KAFKA : <<<<<" + msg + ">>>>>"));
+ msgs.forEach(msg -> logger.info("MESSAGE CONSUMED FROM KAFKA : <<<<<{}>>>>>", msg));
return msgs;
}
private void setProperties(String bootstrapServers) throws Exception {
if (bootstrapServers == null) {
- logger.error("Environment Variable " + kafkaBootstrapServers + " is missing");
+ logger.error("Environment Variable {} is missing", kafkaBootstrapServers);
throw new Exception("Environment Variable " + kafkaBootstrapServers + " is missing");
} else {
this.properties.put("bootstrap.servers", bootstrapServers);
PolicyConfig[] policyConfigList = client.post(configReqParameters, PolicyConfig[].class);
PolicyConfig policyConfig = null;
if (policyConfigList.length > 1) {
- logger.debug("Too many configs for policyName: " + policyName);
+ logger.debug("Too many configs for policyName: {}", policyName);
return null;
}
try {
}
AAIErrorFormatter formatter = new AAIErrorFormatter(error);
String outputMessage = formatter.getMessage();
- logger.error("part of a bulk action failed in A&AI: " + entry.getValue());
+ logger.error("part of a bulk action failed in A&AI: {}", entry.getValue());
errorMessages.add(outputMessage);
}
}
giRC = client.createClient(clone.resourceVersion(resourceVersion));
giRC.delete();
} else {
- logger.warn(clone.build() + " already does not exist in " + client.getGraphDBName()
- + " therefore delete call not executed");
+ logger.warn("{} already does not exist in {} therefore delete call not executed", clone.build(),
+ client.getGraphDBName());
}
}
httpHeaders.set(httpHeaders.ACCEPT, headers.get(httpHeaders.ACCEPT).get(0));
httpHeaders.set(httpHeaders.CONTENT_TYPE, headers.get(httpHeaders.CONTENT_TYPE).get(0));
}).body(BodyInserters.fromObject(msgJson.toString())).retrieve().bodyToFlux(String.class);
- flux.subscribe(res -> logger.debug("Send Camunda Message: " + res));
+ flux.subscribe(res -> logger.debug("Send Camunda Message: {}", res));
}
protected RestTemplate getRestTemplate(boolean retry) {
ErrorNumbers.SVC_BAD_PARAMETER, null, version);
logger.error(LoggingAnchor.FOUR, MessageEnum.APIH_REQUEST_VALIDATION_ERROR.toString(),
MSO_PROP_APIHANDLER_INFRA, ErrorCode.SchemaError.getValue(), requestJSON, e);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity().toString());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity().toString());
return response;
}
ErrorNumbers.SVC_NO_SERVER_RESOURCES, null, version);
logger.error("", MessageEnum.APIH_BPEL_COMMUNICATE_ERROR, MSO_PROP_APIHANDLER_INFRA, "", "",
ErrorCode.AvailabilityError, "Exception while communicate with BPMN engine", e);
- logger.debug(END_OF_THE_TRANSACTION + resp.getEntity().toString());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, resp.getEntity().toString());
return resp;
}
return postRequest(workflowUrl, postParam, version);
Response response =
msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND, MsoException.ServiceException,
e.getMessage(), ErrorNumbers.NO_COMMUNICATION_TO_REQUESTS_DB, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
logger.error(LoggingAnchor.FOUR, MessageEnum.APIH_BPEL_COMMUNICATE_ERROR.toString(),
MSO_PROP_APIHANDLER_INFRA, ErrorCode.BusinessProcessError.getValue(),
"Null response from RequestDB when searching by serviceId");
- logger.debug(END_OF_THE_TRANSACTION + resp.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, resp.getEntity());
return resp;
}
ErrorNumbers.SVC_BAD_PARAMETER, null, version);
logger.error(LoggingAnchor.FOUR, MessageEnum.APIH_REQUEST_VALIDATION_ERROR.toString(),
MSO_PROP_APIHANDLER_INFRA, ErrorCode.SchemaError.getValue(), requestJSON, e);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
msoRequest.createErrorRequestRecord(Status.FAILED, requestId,
"Exception while communciate with " + "Catalog DB", action, ModelType.service.name(), requestJSON,
null, null);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
if (recipeLookupResult == null) {
msoRequest.createErrorRequestRecord(Status.FAILED, requestId, "Recipe does not exist in catalog DB", action,
ModelType.service.name(), requestJSON, null, null);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
logger.error(LoggingAnchor.FOUR, MessageEnum.APIH_BPEL_COMMUNICATE_ERROR.toString(),
MSO_PROP_APIHANDLER_INFRA, ErrorCode.AvailabilityError.getValue(),
"Exception while communicate with BPMN engine");
- logger.debug("End of the transaction, the final response is: " + resp.getEntity());
+ logger.debug("End of the transaction, the final response is: {}", resp.getEntity());
return resp;
}
return postRequest(recipeLookupResult.getOrchestrationURI(), postParam, version);
ErrorNumbers.SVC_BAD_PARAMETER, null, version);
logger.error(LoggingAnchor.FOUR, MessageEnum.APIH_REQUEST_VALIDATION_ERROR.toString(),
MSO_PROP_APIHANDLER_INFRA, ErrorCode.SchemaError.getValue(), requestJSON, e);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
msoRequest.createErrorRequestRecord(Status.FAILED, requestId,
"Exception while communciate with " + "Catalog DB", action, ModelType.service.name(), requestJSON,
null, null);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
if (recipeLookupResult == null) {
msoRequest.createErrorRequestRecord(Status.FAILED, requestId, "Recipe does not exist in catalog DB", action,
ModelType.service.name(), requestJSON, null, null);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
String bpmnRequest = null;
logger.error(LoggingAnchor.FOUR, MessageEnum.APIH_BPEL_COMMUNICATE_ERROR.toString(),
MSO_PROP_APIHANDLER_INFRA, ErrorCode.AvailabilityError.getValue(),
"Exception while communicate with BPMN engine");
- logger.debug("End of the transaction, the final response is: " + resp.getEntity());
+ logger.debug("End of the transaction, the final response is: {}", resp.getEntity());
return resp;
}
ErrorNumbers.SVC_BAD_PARAMETER, null, version);
logger.error(LoggingAnchor.FOUR, MessageEnum.APIH_REQUEST_VALIDATION_ERROR.toString(),
MSO_PROP_APIHANDLER_INFRA, ErrorCode.SchemaError.getValue(), requestJSON, e);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
}
logger.error(LoggingAnchor.FOUR, MessageEnum.APIH_REQUEST_VALIDATION_ERROR.toString(),
MSO_PROP_APIHANDLER_INFRA, ErrorCode.SchemaError.getValue(), requestJSON, e);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
MsoException.ServiceException, "No communication to catalog DB " + e.getMessage(),
ErrorNumbers.SVC_NO_SERVER_RESOURCES, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "Recipe does not exist in catalog DB",
ErrorNumbers.SVC_GENERAL_SERVICE_ERROR, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
ErrorNumbers.SVC_BAD_PARAMETER, null, version);
logger.error(LoggingAnchor.FOUR, MessageEnum.APIH_REQUEST_VALIDATION_ERROR.toString(),
MSO_PROP_APIHANDLER_INFRA, ErrorCode.SchemaError.getValue(), requestJSON, e);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
}
logger.error(LoggingAnchor.FOUR, MessageEnum.APIH_REQUEST_VALIDATION_ERROR.toString(),
MSO_PROP_APIHANDLER_INFRA, ErrorCode.SchemaError.getValue(), requestJSON, e);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "No communication to catalog DB " + e.getMessage(),
ErrorNumbers.SVC_NO_SERVER_RESOURCES, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "Recipe does not exist in catalog DB",
ErrorNumbers.SVC_GENERAL_SERVICE_ERROR, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
ErrorNumbers.SVC_BAD_PARAMETER, null, version);
logger.error(LoggingAnchor.FOUR, MessageEnum.APIH_REQUEST_VALIDATION_ERROR.toString(),
MSO_PROP_APIHANDLER_INFRA, ErrorCode.SchemaError.getValue(), requestJSON, e);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
msoRequest.createErrorRequestRecord(Status.FAILED, requestId,
"No communication to catalog DB " + e.getMessage(), action, ModelType.service.name(), requestJSON,
null, null);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
if (recipeLookupResult == null) {
ErrorNumbers.SVC_GENERAL_SERVICE_ERROR, null, version);
msoRequest.createErrorRequestRecord(Status.FAILED, requestId, "No recipe found in DB", action,
ModelType.service.name(), requestJSON, null, null);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
String bpmnRequest = null;
logger.error(LoggingAnchor.FOUR, MessageEnum.APIH_BPEL_COMMUNICATE_ERROR.toString(),
MSO_PROP_APIHANDLER_INFRA, ErrorCode.AvailabilityError.getValue(),
"Exception while creating bpmnRequest", e);
- logger.debug(END_OF_THE_TRANSACTION + resp.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, resp.getEntity());
return resp;
}
return postRequest(recipeLookupResult.getOrchestrationURI(), postParam, version);
logger.error(LoggingAnchor.FOUR, MessageEnum.APIH_BPEL_COMMUNICATE_ERROR.toString(),
MSO_PROP_APIHANDLER_INFRA, ErrorCode.AvailabilityError.getValue(),
"Exception while communicate with BPMN engine");
- logger.debug(END_OF_THE_TRANSACTION + resp.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, resp.getEntity());
return resp;
} catch (Exception e) {
Response resp = msoRequest.buildServiceErrorResponse(HttpStatus.SC_BAD_GATEWAY,
logger.error(LoggingAnchor.FOUR, MessageEnum.APIH_BPEL_COMMUNICATE_ERROR.toString(),
MSO_PROP_APIHANDLER_INFRA, ErrorCode.AvailabilityError.getValue(),
"Exception while communicate with BPMN engine");
- logger.debug(END_OF_THE_TRANSACTION + resp.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, resp.getEntity());
return resp;
}
return bpelStatusUpdate(response, version);
responseHandler.acceptedResponse(responseEntity);
CamundaResponse camundaResponse = responseHandler.getCamundaResponse(responseEntity);
String response = camundaResponse.getResponse();
- logger.debug(END_OF_THE_TRANSACTION + response);
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response);
return builder.buildResponse(HttpStatus.SC_ACCEPTED, null, response, apiVersion);
}
RecipeLookupResult recipeLookupResult = getServiceURI(serviceModelUUID, action);
if (recipeLookupResult != null) {
- logger.debug("Orchestration URI is: " + recipeLookupResult.getOrchestrationURI() + ", recipe Timeout is: "
- + Integer.toString(recipeLookupResult.getRecipeTimeout()));
+ logger.debug("Orchestration URI is: {}, recipe Timeout is: {}", recipeLookupResult.getOrchestrationURI(),
+ Integer.toString(recipeLookupResult.getRecipeTimeout()));
} else {
logger.debug("No matching recipe record found");
}
try {
response = mapper.writeValueAsString(mOIResponse);
} catch (Exception exception) {
- LOGGER.error("Error while creating MOIResponse JSON" + exception.getMessage());
+ LOGGER.error("Error while creating MOIResponse JSON{}", exception.getMessage());
}
return builder.buildResponse(HttpStatus.SC_OK, null, response, version);
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "No communication to catalog DB " + e.getMessage(),
ErrorNumbers.SVC_NO_SERVER_RESOURCES, null, version);
- LOGGER.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ LOGGER.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "Recipe does not exist in catalog DB",
ErrorNumbers.SVC_GENERAL_SERVICE_ERROR, null, version);
- LOGGER.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ LOGGER.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
RecipeLookupResult recipeLookupResult = getServiceURI(serviceModelUUID, action, defaultServiceModelName);
if (recipeLookupResult != null) {
- LOGGER.debug("Orchestration URI is: " + recipeLookupResult.getOrchestrationURI() + ", recipe Timeout is: "
- + Integer.toString(recipeLookupResult.getRecipeTimeout()));
+ LOGGER.debug("Orchestration URI is: {}, recipe Timeout is: {}", recipeLookupResult.getOrchestrationURI(),
+ Integer.toString(recipeLookupResult.getRecipeTimeout()));
} else {
LOGGER.debug("No matching recipe record found");
}
String orchestrationURI, String requestScope) throws ApiException {
ResponseEntity<String> response =
requestHandlerUtils.postRequest(currentActiveReq, parameter, orchestrationURI);
- LOGGER.debug("BPEL response : " + response);
+ LOGGER.debug("BPEL response : {}", response);
int bpelStatus = responseHandler.setStatus(response.getStatusCodeValue());
String jsonResponse;
try {
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "No communication to catalog DB " + e.getMessage(),
ErrorNumbers.SVC_NO_SERVER_RESOURCES, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "Recipe does not exist in catalog DB",
ErrorNumbers.SVC_GENERAL_SERVICE_ERROR, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "No communication to catalog DB " + e.getMessage(),
ErrorNumbers.SVC_NO_SERVER_RESOURCES, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "Recipe does not exist in catalog DB",
ErrorNumbers.SVC_GENERAL_SERVICE_ERROR, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "No communication to catalog DB " + e.getMessage(),
ErrorNumbers.SVC_NO_SERVER_RESOURCES, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "Recipe does not exist in catalog DB",
ErrorNumbers.SVC_GENERAL_SERVICE_ERROR, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "No communication to catalog DB " + e.getMessage(),
ErrorNumbers.SVC_NO_SERVER_RESOURCES, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "Recipe does not exist in catalog DB",
ErrorNumbers.SVC_GENERAL_SERVICE_ERROR, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
RecipeLookupResult recipeLookupResult = getServiceURI(serviceModelUUID, action, defaultServiceModelName);
if (recipeLookupResult != null) {
- logger.debug("Orchestration URI is: " + recipeLookupResult.getOrchestrationURI() + ", recipe Timeout is: "
- + Integer.toString(recipeLookupResult.getRecipeTimeout()));
+ logger.debug("Orchestration URI is: {}, recipe Timeout is: {}", recipeLookupResult.getOrchestrationURI(),
+ Integer.toString(recipeLookupResult.getRecipeTimeout()));
} else {
logger.debug("No matching recipe record found");
}
String orchestrationURI, String requestScope) throws ApiException {
ResponseEntity<String> response =
requestHandlerUtils.postRequest(currentActiveReq, parameter, orchestrationURI);
- logger.debug("BPEL response : " + response);
+ logger.debug("BPEL response : {}", response);
int bpelStatus = responseHandler.setStatus(response.getStatusCodeValue());
String jsonResponse;
try {
try {
infraActiveRequest = requestsDbClient.getInfraActiveRequestbyRequestId(requestId);
} catch (HttpClientErrorException e) {
- logger.error("Error occurred while performing requestDb lookup by requestId: " + requestId, e);
+ logger.error("Error occurred while performing requestDb lookup by requestId: {}", requestId, e);
ErrorLoggerInfo errorLoggerInfo =
new ErrorLoggerInfo.Builder(MessageEnum.APIH_DB_ACCESS_EXC, ErrorCode.AvailabilityError).build();
throw new ValidateException.Builder("Exception while performing requestDb lookup by requestId",
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "No " + "communication to catalog DB " + e.getMessage(),
ErrorNumbers.SVC_NO_SERVER_RESOURCES, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
Response response = msoRequest.buildServiceErrorResponse(HttpStatus.SC_NOT_FOUND,
MsoException.ServiceException, "Recipe does " + "not exist in catalog DB",
ErrorNumbers.SVC_GENERAL_SERVICE_ERROR, null, version);
- logger.debug(END_OF_THE_TRANSACTION + response.getEntity());
+ logger.debug("{}{}", END_OF_THE_TRANSACTION, response.getEntity());
return response;
}
RecipeLookupResult recipeLookupResult = getServiceURI(serviceModelUUID, action, defaultServiceModelName);
if (recipeLookupResult != null) {
- logger.debug("Orchestration URI is: " + recipeLookupResult.getOrchestrationURI() + ", recipe Timeout is: "
- + Integer.toString(recipeLookupResult.getRecipeTimeout()));
+ logger.debug("Orchestration URI is: {}, recipe Timeout is: {}", recipeLookupResult.getOrchestrationURI(),
+ Integer.toString(recipeLookupResult.getRecipeTimeout()));
} else {
logger.debug("No matching recipe record found");
}
String orchestrationURI, String requestScope) throws ApiException {
ResponseEntity<String> response =
requestHandlerUtils.postRequest(currentActiveReq, parameter, orchestrationURI);
- logger.debug("BPEL response : " + response);
+ logger.debug("BPEL response : {}", response);
int bpelStatus = responseHandler.setStatus(response.getStatusCodeValue());
String jsonResponse;
try {
// 1. query workflow specifications for given vnfModelVersionId if need.
if (vnfModelVersionId != null) {
List<Workflow> vnfWorkflows = queryWorkflowSpecificationsForVnf(vnfModelVersionId);
- logger.debug("Retrieved " + vnfWorkflows.size() + " workflows for given vnfModelVersionId.");
+ logger.debug("Retrieved {} workflows for given vnfModelVersionId.", vnfWorkflows.size());
if (vnfWorkflows.size() > 0) {
workflows.addAll(vnfWorkflows);
}
// 2. query workflow specifications for given pnfModelVersionId if need.
if (pnfModelVersionId != null) {
List<Workflow> pnfWorkflows = queryWorkflowSpecificationsForPnf(pnfModelVersionId);
- logger.debug("Retrieved " + pnfWorkflows.size() + " workflows for given pnfModelVerionId.");
+ logger.debug("Retrieved {} workflows for given pnfModelVerionId.", pnfWorkflows.size());
if (pnfWorkflows.size() > 0) {
workflows.addAll(pnfWorkflows);
}
// 3. query workflow specifications for given resourceTarget
if (resourceTarget != null) {
List<Workflow> workflowsForResourceTarget = queryWorkflowsForResourceTarget(resourceTarget);
- logger.debug(
- "Retrieved " + workflowsForResourceTarget.size() + " workflows for given resource target.");
+ logger.debug("Retrieved {} workflows for given resource target.", workflowsForResourceTarget.size());
if (workflowsForResourceTarget.size() > 0) {
workflows.addAll(workflowsForResourceTarget);
}
if (activitySpec != null) {
ActivitySequence activitySequence = new ActivitySequence();
activitySequence.setName(activitySpec.getName());
- logger.debug("Adding activity: " + activitySpec.getName());
+ logger.debug("Adding activity: {}", activitySpec.getName());
activitySequence.setDescription(activitySpec.getDescription());
activitySequences.add(activitySequence);
}
int ctr = 0;
int total = serviceEndpointRequestList.size();
for (ServiceEndPointRequest requestList : serviceEndpointRequestList) {
- logger.debug("Creating endpoint " + ++ctr + " of " + total + ": "
- + requestList.getServiceEndPoint().getName());
+ logger.debug("Creating endpoint {} of {}: {}", ++ctr, total,
+ requestList.getServiceEndPoint().getName());
getGrmClient().addServiceEndPoint(requestList);
}
public void postDemands(OofRequest homingRequest) throws BadResponseException, JsonProcessingException {
logger.trace("Started oof Client Post Demands");
String url = oofProperties.getHost() + oofProperties.getUri();
- logger.debug("Post demands url: " + url);
- logger.debug("Post demands payload: " + homingRequest.toJsonString());
+ logger.debug("Post demands url: {}", url);
+ logger.debug("Post demands payload: {}", homingRequest.toJsonString());
HttpHeaders header = new HttpHeaders();
header.setContentType(MediaType.APPLICATION_JSON);