<parent>
<groupId>org.onap.aai.aai-common</groupId>
<artifactId>aai-parent</artifactId>
- <version>1.15.5</version>
+ <version>1.16.0-SNAPSHOT</version>
</parent>
<groupId>org.onap.aai.graphadmin</groupId>
<artifactId>aai-graphadmin</artifactId>
- <version>1.15.6-SNAPSHOT</version>
+ <version>1.16.0-SNAPSHOT</version>
<properties>
+ <maven.compiler.release>17</maven.compiler.release>
<!-- Start of Compiler Related Properties -->
- <java.version>11</java.version>
- <maven.compiler.source>11</maven.compiler.source>
- <maven.compiler.target>11</maven.compiler.target>
+ <java.version>17</java.version>
+ <maven.compiler.source>17</maven.compiler.source>
+ <maven.compiler.target>17</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<!-- End of Compiler Related Properties -->
<docker.push.registry>localhost:5000</docker.push.registry>
<aai.docker.version>1.0.0</aai.docker.version>
<aai.schema.service.version>1.12.7</aai.schema.service.version>
- <aai.common.version>1.15.5</aai.common.version>
+ <aai.common.version>1.16.0-SNAPSHOT</aai.common.version>
<aai.build.directory>${project.build.directory}/${project.artifactId}-${project.version}-build/
</aai.build.directory>
<aai.docker.namespace>onap</aai.docker.namespace>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
- <version>1.18.30</version>
+ <version>1.18.36</version>
<scope>provided</scope>
</dependency>
<dependency>
</exclusions>
</dependency>
<dependency>
- <groupId>javax.ws.rs</groupId>
- <artifactId>javax.ws.rs-api</artifactId>
- <version>2.1</version>
+ <groupId>jakarta.ws.rs</groupId>
+ <artifactId>jakarta.ws.rs-api</artifactId>
</dependency>
<dependency>
- <groupId>javax.xml.bind</groupId>
- <artifactId>jaxb-api</artifactId>
+ <groupId>jakarta.xml.bind</groupId>
+ <artifactId>jakarta.xml.bind-api</artifactId>
</dependency>
<!-- Start of Logback Dependencies -->
</dependency>
<!-- End of Tinkerpop Dependencies -->
<dependency>
- <groupId>com.fasterxml.jackson.jaxrs</groupId>
- <artifactId>jackson-jaxrs-json-provider</artifactId>
+ <groupId>com.fasterxml.jackson.jakarta.rs</groupId>
+ <artifactId>jackson-jakarta-rs-json-provider</artifactId>
</dependency>
<dependency>
<groupId>org.json</groupId>
with jersey 2 which we are using -->
<!-- Use this to make http requests instead of jersey 1.0 client -->
<dependency>
- <groupId>org.apache.httpcomponents</groupId>
- <artifactId>httpclient</artifactId>
+ <groupId>org.apache.httpcomponents.client5</groupId>
+ <artifactId>httpclient5</artifactId>
</dependency>
<!-- Start of Spring Framework Dependencies -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
- <exclusions>
- <exclusion>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-tomcat</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-jetty</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
-FROM eclipse-temurin:11-jre-alpine
+FROM eclipse-temurin:17-jre-alpine
USER root
ENV SERVER_PORT=8449
EXPOSE ${SERVER_PORT}
JAVA_CMD="exec java";
JVM_OPTS="${JVM_OPTS} -XX:HeapDumpPath=/opt/app/aai-graphadmin/logs/ajsc-jetty/heap-dump";
-JVM_OPTS="${JVM_OPTS} -XX:MaxRAMPercentage=${MAX_RAM_PERCENTAGE:-60}";
+JVM_OPTS="${JVM_OPTS} -XX:MaxRAMPercentage=${MAX_RAM_PERCENTAGE:-70}";
JVM_OPTS="${JVM_OPTS} -XX:MaxMetaspaceSize=${MAX_METASPACE_SIZE}";
JVM_OPTS="${JVM_OPTS} -server";
JVM_OPTS="${JVM_OPTS} -XX:SurvivorRatio=8";
JVM_OPTS="${JVM_OPTS} -XX:+DisableExplicitGC";
JVM_OPTS="${JVM_OPTS} -XX:+UseG1GC";
-JVM_OPTS="${JVM_OPTS} -XX:+CMSParallelRemarkEnabled";
-JVM_OPTS="${JVM_OPTS} -XX:+CMSClassUnloadingEnabled";
-JVM_OPTS="${JVM_OPTS} -XX:-UseBiasedLocking";
JVM_OPTS="${JVM_OPTS} -XX:ParallelGCThreads=4";
JVM_OPTS="${JVM_OPTS} -XX:LargePageSizeInBytes=128m";
JVM_OPTS="${JVM_OPTS} -Dsun.net.inetaddr.ttl=180";
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
-import javax.annotation.PostConstruct;
-import javax.annotation.PreDestroy;
+import jakarta.annotation.PostConstruct;
+import jakarta.annotation.PreDestroy;
@SpringBootApplication
// Scan the specific packages that has the beans/components
AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(ex);
ErrorLogHelper.logException(aai);
ErrorLogHelper.logError(aai.getCode(), ex.getMessage() + ", resolve and restart GraphAdmin");
- throw aai;
+ throw ex;
}
LOGGER.info(
"Application '{}' is running on {}!" ,
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.Paths;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Collection;
import org.onap.aai.util.FormatDate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public static final String DEFAULT_OUTPUT_DIR = "logs/data/audit";
//DEBUG -- should be getting default-src-dir, default-output-dir and rdbms-db-name from param file
- @Autowired
public AuditGraphson2Sql( EdgeIngestor ei, SchemaVersions schemaVersions, LoaderFactory loaderFactory, ApertureService apertureService) {
this.schemaVersions = schemaVersions;
this.loader = loaderFactory.createLoaderForVersion(ModelType.MOXY, schemaVersions.getDefaultVersion());
throw new Exception(msg);
}
- final List<File> graphsons = Files.walk(Paths.get(graphsonDir))
+ final List<File> graphsons = Files.walk(Path.of(graphsonDir))
.filter(Files::isRegularFile)
.map(Path::toFile)
.sorted()
package org.onap.aai.config;
+import static org.springframework.security.web.util.matcher.AntPathRequestMatcher.antMatcher;
+
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
SecurityFilterChain filterChain(HttpSecurity httpSecurity) throws Exception{
httpSecurity.csrf(csrf -> csrf.disable())
.authorizeHttpRequests(requests -> requests
- .antMatchers("/util/echo", "/actuator/**")
+ .requestMatchers(antMatcher("/util/echo"), antMatcher("/actuator/**"))
.permitAll()
.anyRequest()
.authenticated())
import org.onap.aai.util.AAIConfig;
import org.onap.aai.util.AAIConstants;
import org.onap.logging.filter.base.ONAPComponents;
-import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.PropertySource;
import org.springframework.scheduling.annotation.Scheduled;
private EdgeIngestor edgeIngestor;
private SchemaVersions schemaVersions;
- @Autowired
public DataExportTasks(LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, SchemaVersions schemaVersions){
this.loaderFactory = loaderFactory;
this.edgeIngestor = edgeIngestor;
Vertex nullVtx = null;
Vertex preferredVtx = null;
- Long vidA = new Long(vtxA.id().toString());
- Long vidB = new Long(vtxB.id().toString());
+ Long vidA = Long.valueOf(vtxA.id().toString());
+ Long vidB = Long.valueOf(vtxB.id().toString());
String vtxANodeType = "";
String vtxBNodeType = "";
try(FileOutputStream subFileStr = new FileOutputStream(fname)) {
int okCount = 0;
int failCount = 0;
- Long debugDelayMsL = new Long(debugDelayMs);
+ Long debugDelayMsL = Long.valueOf(debugDelayMs);
GraphWriter graphWriter = null;
if ("gryo".equalsIgnoreCase(snapshotType)) {
vtx = ed.outVertex();
}
if (vtx == null) {
- retArr.add(String.format(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = %s <<< ", ed.id()));
+ retArr.add(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = %s <<< ".formatted(ed.id()));
} else {
String nType = vtx.<String>property("aai-node-type").orElse(null);
if (displayAllVidsFlag) {
}
if (retVertList.size() == 0) {
- logger.debug(String.format("DEBUG No node found for nodeType = [%s], propsAndVal = %s", nodeType, propsAndValuesForMsg));
+ logger.debug("DEBUG No node found for nodeType = [%s], propsAndVal = %s".formatted(nodeType, propsAndValuesForMsg));
}
return retVertList;
}
if (retVertList.size() == 0) {
- logger.debug(String.format("DEBUG No node found for: [%s, with aai-created-ts > %d", qStringForMsg, windowStartTime));
+ logger.debug("DEBUG No node found for: [%s, with aai-created-ts > %d".formatted(qStringForMsg, windowStartTime));
}
return retVertList;
Vertex nullVtx = null;
Vertex preferredVtx = null;
- Long vidA = new Long(vtxA.id().toString());
- Long vidB = new Long(vtxB.id().toString());
+ Long vidA = Long.valueOf(vtxA.id().toString());
+ Long vidB = Long.valueOf(vtxB.id().toString());
String vtxANodeType = "";
String vtxBNodeType = "";
String origVid = origVtx.id().toString();
if (ob == null || ob.toString().equals("")) {
// It is missing its aai-uri
- eLogger.debug(String.format("DEBUG No [aai-uri] property found for vid = [%s] ", origVid));
+ eLogger.debug("DEBUG No [aai-uri] property found for vid = [%s] ".formatted(origVid));
return false;
}
else {
Vertex foundV = verts.next();
String foundVid = foundV.id().toString();
if( !origVid.equals(foundVid) ){
- eLogger.debug(String.format("DEBUG aai-uri key property [%s] for vid = [%s] brought back different vertex with vid = [%s].", aaiUriStr, origVid, foundVid));
+ eLogger.debug("DEBUG aai-uri key property [%s] for vid = [%s] brought back different vertex with vid = [%s].".formatted(aaiUriStr, origVid, foundVid));
return false;
}
}
if( count == 0 ){
- eLogger.debug(String.format("DEBUG aai-uri key property [%s] for vid = [%s] could not be used to query for that vertex. ", aaiUriStr, origVid));
+ eLogger.debug("DEBUG aai-uri key property [%s] for vid = [%s] could not be used to query for that vertex. ".formatted(aaiUriStr, origVid));
return false;
}
else if( count > 1 ){
- eLogger.debug(String.format("DEBUG aai-uri key property [%s] for vid = [%s] brought back multiple (%d) vertices instead of just one. ", aaiUriStr, origVid, count));
+ eLogger.debug("DEBUG aai-uri key property [%s] for vid = [%s] brought back multiple (%d) vertices instead of just one. ".formatted(aaiUriStr, origVid, count));
return false;
}
}
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
-import java.nio.file.Paths;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.*;
// FileLocation
Path pathDir = null;
try {
- pathDir = Paths.get(dirName);
+ pathDir = Path.of(dirName);
} catch (InvalidPathException i) {
String emsg = "Directory " + dirName + " could not be found.";
LOGGER.error(emsg);
if (unwrapAdjacencyList) {
final JsonNode root = mapper.readTree(inputStream);
final JsonNode vertices = root.get(GraphSONTokens.VERTICES);
- if (!vertices.getNodeType().equals(JsonNodeType.ARRAY)) throw new IOException(String.format("The '%s' key must be an array", GraphSONTokens.VERTICES));
+ if (!vertices.getNodeType().equals(JsonNodeType.ARRAY)) throw new IOException("The '%s' key must be an array".formatted(GraphSONTokens.VERTICES));
return IteratorUtils.stream(vertices.elements()).map(Object::toString);
} else {
final BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
logAndPrint("All vertex IDs from file " + filePath + ":\n" + allVertexIdsString);
} catch (IOException ioe) {
if(ioe instanceof FileNotFoundException) {
- logErrorAndPrint(String.format("File %s not found.", filePath));
+ logErrorAndPrint("File %s not found.".formatted(filePath));
} else {
logErrorAndPrint("ERROR reading in text file failed.", ioe);
}
import org.onap.aai.interceptors.AAIContainerFilter;
import org.onap.aai.logging.ErrorLogHelper;
-import javax.annotation.Priority;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.container.ContainerResponseContext;
-import javax.ws.rs.container.ContainerResponseFilter;
-import javax.ws.rs.core.MediaType;
+import jakarta.annotation.Priority;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerResponseContext;
+import jakarta.ws.rs.container.ContainerResponseFilter;
+import jakarta.ws.rs.core.MediaType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.onap.aai.interceptors.AAIContainerFilter;
import org.onap.aai.interceptors.AAIHeaderProperties;
-import javax.annotation.Priority;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.container.ContainerResponseContext;
-import javax.ws.rs.container.ContainerResponseFilter;
-import javax.ws.rs.core.MediaType;
+import jakarta.annotation.Priority;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerResponseContext;
+import jakarta.ws.rs.container.ContainerResponseFilter;
+import jakarta.ws.rs.core.MediaType;
import java.io.IOException;
@Priority(AAIResponseFilterPriority.HEADER_MANIPULATION)
import org.onap.aai.util.AAIConfig;
import org.springframework.beans.factory.annotation.Autowired;
-import javax.annotation.Priority;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.container.ContainerResponseContext;
-import javax.ws.rs.container.ContainerResponseFilter;
+import jakarta.annotation.Priority;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerResponseContext;
+import jakarta.ws.rs.container.ContainerResponseFilter;
import java.io.IOException;
import java.util.Objects;
import java.util.Optional;
import org.onap.logging.filter.base.Constants;
import org.onap.logging.ref.slf4j.ONAPLogConstants;
-import javax.annotation.Priority;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.container.ContainerRequestFilter;
-import javax.ws.rs.container.PreMatching;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.Provider;
+import jakarta.annotation.Priority;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestFilter;
+import jakarta.ws.rs.container.PreMatching;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.ext.Provider;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.onap.aai.interceptors.AAIContainerFilter;
import org.onap.aai.interceptors.AAIHeaderProperties;
-import javax.annotation.Priority;
-import javax.ws.rs.HttpMethod;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.container.ContainerRequestFilter;
-import javax.ws.rs.container.PreMatching;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.ext.Provider;
+import jakarta.annotation.Priority;
+import jakarta.ws.rs.HttpMethod;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestFilter;
+import jakarta.ws.rs.container.PreMatching;
+import jakarta.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.ext.Provider;
import java.io.IOException;
/**
import org.onap.aai.interceptors.AAIContainerFilter;
import org.onap.aai.interceptors.AAIHeaderProperties;
-import javax.annotation.Priority;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.container.ContainerRequestFilter;
-import javax.ws.rs.container.PreMatching;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.ext.Provider;
+import jakarta.annotation.Priority;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestFilter;
+import jakarta.ws.rs.container.PreMatching;
+import jakarta.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.ext.Provider;
import java.util.Collections;
import java.util.regex.Matcher;
import org.onap.aai.interceptors.AAIContainerFilter;
-import javax.annotation.Priority;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.container.ContainerRequestFilter;
-import javax.ws.rs.container.PreMatching;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.UriBuilder;
-import javax.ws.rs.ext.Provider;
+import jakarta.annotation.Priority;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestFilter;
+import jakarta.ws.rs.container.PreMatching;
+import jakarta.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.core.UriBuilder;
+import jakarta.ws.rs.ext.Provider;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import org.onap.aai.util.HbaseSaltPrefixer;
import org.springframework.beans.factory.annotation.Autowired;
-import javax.annotation.Priority;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.HttpMethod;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.container.ContainerRequestFilter;
-import javax.ws.rs.container.PreMatching;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.ext.Provider;
+import jakarta.annotation.Priority;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.HttpMethod;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestFilter;
+import jakarta.ws.rs.container.PreMatching;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.ext.Provider;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.onap.aai.logging.ErrorLogHelper;
import org.onap.aai.service.RetiredService;
import org.onap.aai.util.AAIConfig;
-import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
-import javax.annotation.Priority;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.container.ContainerRequestFilter;
-import javax.ws.rs.container.PreMatching;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.Provider;
+import jakarta.annotation.Priority;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestFilter;
+import jakarta.ws.rs.container.PreMatching;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.ext.Provider;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
private String basePath;
- @Autowired
public RetiredInterceptor(RetiredService retiredService, @Value("${schema.uri.base.path}") String basePath){
this.retiredService = retiredService;
this.basePath = basePath;
import org.onap.aai.logging.ErrorLogHelper;
import org.onap.aai.setup.SchemaVersion;
import org.onap.aai.setup.SchemaVersions;
-import org.springframework.beans.factory.annotation.Autowired;
-import javax.annotation.Priority;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.container.ContainerRequestFilter;
-import javax.ws.rs.container.PreMatching;
-import javax.ws.rs.core.Response;
+import jakarta.annotation.Priority;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestFilter;
+import jakarta.ws.rs.container.PreMatching;
+import jakarta.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.Set;
import java.util.regex.Matcher;
private final SchemaVersions schemaVersions;
- @Autowired
public VersionInterceptor(SchemaVersions schemaVersions){
this.schemaVersions = schemaVersions;
allowedVersions = schemaVersions.getVersions()
import org.onap.aai.interceptors.AAIContainerFilter;
import org.onap.aai.setup.SchemaVersions;
-import org.springframework.beans.factory.annotation.Autowired;
-import javax.annotation.Priority;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.container.ContainerRequestFilter;
-import javax.ws.rs.container.PreMatching;
+import jakarta.annotation.Priority;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestFilter;
+import jakarta.ws.rs.container.PreMatching;
import java.net.URI;
@PreMatching
private final SchemaVersions schemaVersions;
- @Autowired
public VersionLatestInterceptor(SchemaVersions schemaVersions){
this.schemaVersions = schemaVersions;
}
import org.apache.commons.configuration2.PropertiesConfiguration;
import org.apache.commons.configuration2.builder.FileBasedConfigurationBuilder;
import org.apache.commons.configuration2.builder.fluent.Parameters;
-import org.apache.commons.lang.exception.ExceptionUtils;
+import org.apache.commons.lang3.exception.ExceptionUtils;
import org.onap.aai.datasnapshot.DataSnapshot;
import org.onap.aai.db.props.AAIProperties;
import org.onap.aai.dbmap.AAIGraph;
import java.lang.reflect.InvocationTargetException;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
dataSnapshot.executeCommand(dataSnapShotArgs, true, false, null, "MULTITHREAD_RELOAD", snapshotFile);
}
} catch (ConfigurationException e) {
- logAndPrint("ERROR: Could not load janusgraph configuration.\n" + ExceptionUtils.getFullStackTrace(e));
+ logAndPrint("ERROR: Could not load janusgraph configuration.\n" + ExceptionUtils.getStackTrace(e));
return;
}
}
SchemaVersions.class
).newInstance(engine, loaderFactory, edgeIngestor, edgeSerializer,schemaVersions);
} catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) {
- logAndPrint("EXCEPTION caught initalizing migration class " + migratorClass.getSimpleName() + ".\n" + ExceptionUtils.getFullStackTrace(e));
+ logAndPrint("EXCEPTION caught initalizing migration class " + migratorClass.getSimpleName() + ".\n" + ExceptionUtils.getStackTrace(e));
engine.rollback();
continue;
}
logAndPrint("Saving snapshot of graph " + phase + " migration to " + fileName);
try {
- Path pathToFile = Paths.get(fileName);
+ Path pathToFile = Path.of(fileName);
if (!pathToFile.toFile().exists()) {
Files.createDirectories(pathToFile.getParent());
}
DataSnapshot dataSnapshot = new DataSnapshot();
dataSnapshot.executeCommand(dataSnapshotArgs, true, false, null, "THREADED_SNAPSHOT", null);
} catch (IOException e) {
- logAndPrint("ERROR: Could not write in memory graph to " + phase + "Migration file. \n" + ExceptionUtils.getFullStackTrace(e));
+ logAndPrint("ERROR: Could not write in memory graph to " + phase + "Migration file. \n" + ExceptionUtils.getStackTrace(e));
engine.rollback();
}
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
-import java.nio.file.Paths;
+import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.Collections;
import java.util.Iterator;
if (dmaapMsgList.size() > 0) {
try {
- Files.write(Paths.get(logDirectory+"/"+fileName), (Iterable<String>)dmaapMsgList.stream()::iterator);
+ Files.write(Path.of(logDirectory+"/"+fileName), (Iterable<String>)dmaapMsgList.stream()::iterator);
} catch (IOException e) {
System.out.println("Unable to generate file with dmaap msgs for " + getMigrationName() +
" Exception is: " + e.getMessage());
f.mkdirs();
try {
- Files.createFile(Paths.get(logDirectory + "/" + fileName));
+ Files.createFile(Path.of(logDirectory + "/" + fileName));
} catch (Exception e) {
logger.error("Unable to create file", e);
}
try {
finalStr =
svIntr.getName() + "#@#" + svIntr.getURI() + "#@#" + str + "\n";
- Files.write(Paths.get(logDirectory + "/" + fileName),
+ Files.write(Path.of(logDirectory + "/" + fileName),
finalStr.getBytes(), StandardOpenOption.APPEND);
} catch (IOException e) {
System.out.println("Unable to generate file with dmaap msgs for " +
import java.util.HashMap;
import java.util.List;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ws.rs.core.Response.Status;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.onap.aai.exceptions.AAIException;
String propertyValue = v.property(property).value().toString();
if (propertyValue.isEmpty()) {
v.property(property, newValue);
- logAndPrint(logger,String.format("Node Type %s: Property %s is empty, adding value %s",
- nodeType, property, newValue.toString()));
+ logAndPrint(logger,"Node Type %s: Property %s is empty, adding value %s".formatted(
+ nodeType, property, newValue.toString()));
this.touchVertexProperties(v, false);
updateDmaapList(v);
this.migrationSuccess++;
this.subTotal++;
} else {
- logAndPrint(logger,String.format("Node Type %s: Property %s value already exists - skipping",
- nodeType, property));
+ logAndPrint(logger,"Node Type %s: Property %s value already exists - skipping".formatted(
+ nodeType, property));
}
} else {
logAndPrint(logger,String.format("Node Type %s: Property %s does not exist or " +
}
else if (vertices.size() == 1) {
try {
- logger.info(String.format("Updating %s with groupUuid %s from generic-vnf with vnfId %s with newResourceUuid %s", nodeType, groupUuid, vnfId, newResourceUuid));
+ logger.info("Updating %s with groupUuid %s from generic-vnf with vnfId %s with newResourceUuid %s".formatted(nodeType, groupUuid, vnfId, newResourceUuid));
Vertex v = vertices.get(0);
String resourceUuid = v.<String>property("resource-uuid").value();
logger.info("\tOriginal resource-uuid: " + resourceUuid);
import java.util.Optional;
-import org.apache.commons.lang.exception.ExceptionUtils;
+import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.tinkerpop.gremlin.structure.Edge;
import org.onap.aai.edges.EdgeIngestor;
import org.onap.aai.edges.enums.EdgeProperty;
public class ContainmentDeleteOtherVPropertyMigration extends Migrator {
private boolean success = true;
-
+
public ContainmentDeleteOtherVPropertyMigration(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
}
-
+
//just for testing using test edge rule files
public ContainmentDeleteOtherVPropertyMigration(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions, String edgeRulesFile) {
super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
}
-
+
@Override
public void run() {
try {
engine.asAdmin().getTraversalSource().E().sideEffect(t -> {
Edge e = t.get();
- logger.info("out vertex: " + e.outVertex().property("aai-node-type").value() +
+ logger.info("out vertex: " + e.outVertex().property("aai-node-type").value() +
" in vertex: " + e.inVertex().property("aai-node-type").value() +
" label : " + e.label());
if (e.property(EdgeProperty.CONTAINS.toString()).isPresent() &&
}
}).iterate();
} catch (Exception e) {
- logger.info("error encountered " + e.getClass() + " " + e.getMessage() + " " + ExceptionUtils.getFullStackTrace(e));
- logger.error("error encountered " + e.getClass() + " " + e.getMessage() + " " + ExceptionUtils.getFullStackTrace(e));
+ logger.info("error encountered " + e.getClass() + " " + e.getMessage() + " " + ExceptionUtils.getStackTrace(e));
+ logger.error("error encountered " + e.getClass() + " " + e.getMessage() + " " + ExceptionUtils.getStackTrace(e));
success = false;
}
-
+
}
@Override
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
-import java.nio.file.Paths;
+import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
logger.info("---------- Processing HUB Entries from file ----------");
try {
String line;
- List<String> lines = Files.readAllLines(Paths.get(fileName));
+ List<String> lines = Files.readAllLines(Path.of(fileName));
Iterator<String> lineItr = lines.iterator();
while (lineItr.hasNext()){
line = lineItr.next();
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
-import java.nio.file.Paths;
+import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
* @throws Exception
*/
protected Map<String,Set<String>> loadFile(String fileName) throws Exception {
- List<String> lines = Files.readAllLines(Paths.get(fileName));
+ List<String> lines = Files.readAllLines(Path.of(fileName));
return this.getFileContents(lines);
}
.map(line -> Arrays.stream(line.split(",", -1)).map(String::trim).collect(Collectors.toList()))
// .filter(this::verifyLine)
.map(this::processLine)
- .filter(Optional::isPresent)
- .map(Optional::get)
+ .flatMap(Optional::stream)
.forEach(p -> {
processedRowsCount.getAndIncrement();
String pnfName = p.getValue0();
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
-import java.nio.file.Paths;
+import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
logger.info(fileName);
logger.info("---------- Processing PATH Entries from file ----------");
try {
- List<String> lines = Files.readAllLines(Paths.get(fileName));
+ List<String> lines = Files.readAllLines(Path.of(fileName));
Iterator<String> lineItr = lines.iterator();
while (lineItr.hasNext()){
String line = lineItr.next().replace("\n", "").replace("\r", "");
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
-import java.nio.file.Paths;
+import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
try {
String line;
- List<String> lines = Files.readAllLines(Paths.get(fileName));
+ List<String> lines = Files.readAllLines(Path.of(fileName));
Iterator<String> lineItr = lines.iterator();
while (lineItr.hasNext()){
line = lineItr.next().replace("\n", "").replace("\r", "");
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
-import javax.ws.rs.core.UriBuilder;
+import jakarta.ws.rs.core.UriBuilder;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
-import java.nio.file.Paths;
+import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
logger.info(fileName);
logger.info("---------- Processing Entries from file ----------");
try {
- List<String> lines = Files.readAllLines(Paths.get(fileName));
+ List<String> lines = Files.readAllLines(Path.of(fileName));
Iterator<String> lineItr = lines.iterator();
while (lineItr.hasNext()){
String line = lineItr.next().replace("\n", "").replace("\r", "");
currentValueModelVersionID = getModelVersionIdNodeValue(vertex);
currrentValueModelInvariantID = getModelInvariantIdNodeValue(vertex);
- String uri = String.format("/service-design-and-creation/models/model/%s/model-vers/model-ver/%s", currrentValueModelInvariantID, currentValueModelVersionID);
+ String uri = "/service-design-and-creation/models/model/%s/model-vers/model-ver/%s".formatted(currrentValueModelInvariantID, currentValueModelVersionID);
String propertyKey = NODETYPEKEYMAP.get(nodeTypeString);
String propertyValue = vertex != null ? vertex.value(propertyKey).toString() : "";
logger.info("Processing "+nodeTypeString+ " vertex with key "+ propertyValue);
import org.onap.aai.setup.SchemaVersions;
import org.springframework.web.util.UriUtils;
-import javax.ws.rs.core.UriBuilder;
+import jakarta.ws.rs.core.UriBuilder;
import java.io.UnsupportedEncodingException;
import java.util.*;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
-import java.nio.file.Paths;
+import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
try {
- List<String> lines = Files.readAllLines(Paths.get(fileName));
+ List<String> lines = Files.readAllLines(Path.of(fileName));
Iterator<String> lineItr = lines.iterator();
while (lineItr.hasNext()){
String line = lineItr.next().trim();
forwarderEvcId = forwarderEvcVtx.property("forwarder-evc-id").value().toString();
try{
forwarderEvcVtx.property("ivlan", ivlanValue);
- logger.info(String.format("Updating Node Type forwarder-evc Property ivlan value %s", ivlanValue.toString()));
+ logger.info("Updating Node Type forwarder-evc Property ivlan value %s".formatted(ivlanValue.toString()));
this.touchVertexProperties(forwarderEvcVtx, false);
updateDmaapList(forwarderEvcVtx);
migrationSuccess++;
import org.onap.aai.serialization.engines.TransactionalGraphEngine;
import org.springframework.web.util.UriUtils;
-import javax.ws.rs.core.UriBuilder;
+import jakarta.ws.rs.core.UriBuilder;
import java.io.UnsupportedEncodingException;
import java.util.*;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
-import java.nio.file.Paths;
+import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
* @throws Exception
*/
protected ArrayList loadFile(String fileName) throws Exception {
- List<String> lines = Files.readAllLines(Paths.get(fileName));
+ List<String> lines = Files.readAllLines(Path.of(fileName));
return this.getFileContents(lines);
}
logger.info("Total rows count excluding header: " + lines.size());
lines.stream().filter(line -> !line.isEmpty()).map(line -> Arrays.stream(line.split(",", -1)).map(String::trim).collect(Collectors.toList()))
- .map(this::processRegionUpgradeCycle).filter(Optional::isPresent).map(Optional::get).forEach(p -> {
+ .map(this::processRegionUpgradeCycle).flatMap(Optional::stream).forEach(p -> {
processedRowsCount.getAndIncrement();
String pnfName = p.getValue0();
if (!regionMap.containsKey(pnfName)) {
fileContent.add(regionMap);
lines.stream().filter(line -> !line.isEmpty()).map(line -> Arrays.stream(line.split(",", -1)).map(String::trim).collect(Collectors.toList()))
- .map(this::processRegionAlias).filter(Optional::isPresent).map(Optional::get).forEach(p -> {
+ .map(this::processRegionAlias).flatMap(Optional::stream).forEach(p -> {
processedRowsCount.getAndIncrement();
String pnfName = p.getValue0();
if (!aliasMap.containsKey(pnfName)) {
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
-import java.nio.file.Paths;
+import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
logger.info("---------- Reading all file types and vf-modules ----------");
ArrayList<String> fileTypeList = new ArrayList<String>();
try {
- List<String> lines = Files.readAllLines(Paths.get(fileName));
+ List<String> lines = Files.readAllLines(Path.of(fileName));
Iterator<String> lineItr = lines.iterator();
int typeFileLineCounter = 0;
while (lineItr.hasNext()){
logger.info(fileName);
logger.info("---------- Processing Entries from file ----------");
try {
- List<String> lines = Files.readAllLines(Paths.get(fileName));
+ List<String> lines = Files.readAllLines(Path.of(fileName));
Iterator<String> lineItr = lines.iterator();
while (lineItr.hasNext()){
String line = lineItr.next().replace("\n", "").replace("\r", "");
fileName = feedDir+ "INPUT-VNF.csv";
logger.info(fileName);
logger.info("---------- Processing Entries from file ----------");
- lines = Files.readAllLines(Paths.get(fileName));
+ lines = Files.readAllLines(Path.of(fileName));
lineItr = lines.iterator();
while (lineItr.hasNext()){
String line = lineItr.next().replace("\n", "").replace("\r", "");
import org.onap.aai.util.AAIConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
-import javax.ws.rs.*;
-import javax.ws.rs.core.*;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ws.rs.*;
+import jakarta.ws.rs.core.*;
+import jakarta.ws.rs.core.Response.Status;
@Component
@Path("{version: v1}/audit-sql-db")
private String rdbmsDbName;
private AuditGraphson2Sql auditGraphson2Sql;
- @Autowired
public AuditSqlDbConsumer(
AuditGraphson2Sql auditGraphson2Sql,
@Value("${aperture.rdbmsname}") String rdbmsDbName
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.logging.ErrorLogHelper;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.WebApplicationException;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.ext.ExceptionMapper;
+import jakarta.ws.rs.ext.Provider;
import java.util.ArrayList;
import java.util.List;
//the general case is that cxf will give us a WebApplicationException
//with a linked exception
- if (exception instanceof WebApplicationException) {
- WebApplicationException e = (WebApplicationException) exception;
+ if (exception instanceof WebApplicationException e) {
if (e.getCause() != null) {
if (e.getCause() instanceof SAXParseException2) {
templateVars.add("UnmarshalException");
if (response == null) {
Exception actual_e = exception;
- if (exception instanceof WebApplicationException) {
- WebApplicationException e = (WebApplicationException) exception;
+ if (exception instanceof WebApplicationException e) {
response = e.getResponse();
} else {
templateVars.add(request.getMethod());
import org.onap.aai.util.AAIConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.*;
-import javax.ws.rs.core.*;
-import javax.ws.rs.core.Response.Status;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.*;
+import jakarta.ws.rs.core.*;
+import jakarta.ws.rs.core.Response.Status;
import java.util.List;
@Component
private String basePath;
- @Autowired
public QueryConsumer(
@Qualifier("traversalUriHttpEntry") HttpEntry traversalUriHttpEntry,
DslQueryProcessor dslQueryProcessor,
*/
package org.onap.aai.rest;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.MediaType;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.service.SchemaJobStatusService;
import com.google.gson.JsonObject;
-import org.apache.http.conn.ConnectTimeoutException;
+import org.apache.hc.client5.http.ConnectTimeoutException;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.restclient.RestClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpMethod;
private final String appName;
- @Autowired
public ApertureService(
@Qualifier("apertureRestClient") RestClient apertureRestClient,
@Value("${spring.application.name}") String appName
protected HttpComponentsClientHttpRequestFactory getHttpRequestFactory() throws Exception {
HttpComponentsClientHttpRequestFactory requestFactory = super.getHttpRequestFactory();
requestFactory.setConnectionRequestTimeout(timeout);
+ // Manual migration to `SocketConfig.Builder.setSoTimeout(Timeout)` necessary; see: https://docs.spring.io/spring-framework/docs/6.0.0/javadoc-api/org/springframework/http/client/HttpComponentsClientHttpRequestFactory.html#setReadTimeout(int)
requestFactory.setReadTimeout(timeout);
requestFactory.setConnectTimeout(timeout);
return requestFactory;
protected HttpComponentsClientHttpRequestFactory getHttpRequestFactory() throws Exception {
HttpComponentsClientHttpRequestFactory requestFactory = super.getHttpRequestFactory();
requestFactory.setConnectionRequestTimeout(timeout);
+ // Manual migration to `SocketConfig.Builder.setSoTimeout(Timeout)` necessary; see: https://docs.spring.io/spring-framework/docs/6.0.0/javadoc-api/org/springframework/http/client/HttpComponentsClientHttpRequestFactory.html#setReadTimeout(int)
requestFactory.setReadTimeout(timeout);
requestFactory.setConnectTimeout(timeout);
return requestFactory;
protected HttpComponentsClientHttpRequestFactory getHttpRequestFactory() throws Exception {
HttpComponentsClientHttpRequestFactory requestFactory = super.getHttpRequestFactory();
requestFactory.setConnectionRequestTimeout(timeout);
+ // Manual migration to `SocketConfig.Builder.setSoTimeout(Timeout)` necessary; see: https://docs.spring.io/spring-framework/docs/6.0.0/javadoc-api/org/springframework/http/client/HttpComponentsClientHttpRequestFactory.html#setReadTimeout(int)
requestFactory.setReadTimeout(timeout);
requestFactory.setConnectTimeout(timeout);
return requestFactory;
import org.onap.aai.edges.EdgeRuleQuery;
import org.onap.aai.edges.exceptions.AmbiguousRuleChoiceException;
import org.onap.aai.edges.exceptions.EdgeRuleNotFoundException;
-import org.springframework.beans.factory.annotation.Autowired;
import java.util.HashMap;
import java.util.List;
/**
* Instantiates a new DslListener.
*/
- @Autowired
public DslListener(EdgeIngestor edgeIngestor) {
this.edgeRules = edgeIngestor;
}
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import org.onap.aai.AAIDslLexer;
import org.onap.aai.AAIDslParser;
-import org.springframework.beans.factory.annotation.Autowired;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
private DslListener dslListener;
- @Autowired
public DslQueryProcessor(DslListener dslListener){
this.dslListener = dslListener;
}
import org.onap.aai.serialization.engines.TransactionalGraphEngine;
import org.onap.aai.serialization.queryformats.SubGraphStyle;
-import javax.ws.rs.core.MultivaluedHashMap;
-import javax.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.core.MultivaluedHashMap;
+import jakarta.ws.rs.core.MultivaluedMap;
import java.io.FileNotFoundException;
import java.net.URI;
import java.util.*;
import java.util.ArrayList;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.Status;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.Response.Status;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.logging.ErrorLogHelper;
import org.onap.aai.restcore.RESTAPI;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
private static final String UP_RESPONSE="{\"status\":\"UP\",\"groups\":[\"liveness\",\"readiness\"]}";
- @Autowired
public EchoResponse(AaiGraphChecker aaiGraphChecker ) {
this.aaiGraphChecker = aaiGraphChecker;
}
private static void setSchemaInitializedToFalse(JanusGraph graph, Optional<Vertex> schemaInitializedVertex) {
Vertex vertex = schemaInitializedVertex.get();
Object schemaInitializedValueObj = vertex.property(SCHEMA_INITIALIZED).value();
- Boolean schemaInitializedValue = schemaInitializedValueObj instanceof Boolean ? (Boolean) schemaInitializedValueObj : Boolean.FALSE;
+ Boolean schemaInitializedValue = schemaInitializedValueObj instanceof Boolean b ? b : Boolean.FALSE;
//Setting schema-initialized vertex to False
if (Boolean.TRUE.equals(schemaInitializedValue)) {
import org.springframework.context.annotation.PropertySource;
import org.springframework.stereotype.Service;
-import javax.annotation.PostConstruct;
+import jakarta.annotation.PostConstruct;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;
aai = new AAIException("AAI_3025","Error parsing exception - Please Investigate" +
LogFormatTools.getStackTop(ex));
} else {
- LOGGER.info(String.format("Exception is %sRoot cause is%s", ExceptionUtils.getRootCause(ex).getMessage(), ExceptionUtils.getRootCause(ex).toString()));
+ LOGGER.info("Exception is %sRoot cause is%s".formatted(ExceptionUtils.getRootCause(ex).getMessage(), ExceptionUtils.getRootCause(ex).toString()));
if(ExceptionUtils.getRootCause(ex).getMessage().contains("NodeIngestor")){
aai = new AAIException("AAI_3026","Error reading OXM from SchemaService - Investigate");
}
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
-import java.nio.file.Paths;
+import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
}
private Map<Integer,String> processFile() throws IOException {
- List<String> lines = Files.readAllLines(Paths.get(path));
+ List<String> lines = Files.readAllLines(Path.of(path));
final Map<Integer,String> data = new LinkedHashMap<>();
AtomicInteger counter = new AtomicInteger(0);
lines.stream().forEach(line -> {
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
-import java.nio.file.Paths;
+import java.nio.file.Path;
import java.util.*;
public class SendMigrationNotifications {
}
private Map<String, String> processFile() throws IOException {
- List<String> lines = Files.readAllLines(Paths.get(path));
+ List<String> lines = Files.readAllLines(Path.of(path));
final Map<String,String> vertexIds = new LinkedHashMap<>();
lines.stream().forEach(line -> {
if (line.contains("_")) {
import java.util.Set;
import java.util.logging.Logger;
import java.util.stream.Collectors;
-import javax.annotation.Priority;
-import javax.ws.rs.container.ContainerRequestFilter;
-import javax.ws.rs.container.ContainerResponseFilter;
+import jakarta.annotation.Priority;
+import jakarta.ws.rs.container.ContainerRequestFilter;
+import jakarta.ws.rs.container.ContainerResponseFilter;
import org.glassfish.jersey.logging.LoggingFeature;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletProperties;
import org.onap.aai.rest.util.EchoResponse;
import org.onap.logging.filter.base.AuditLogContainerFilter;
import org.reflections8.Reflections;
-import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Profile;
import org.springframework.core.env.Environment;
+import org.springframework.core.env.Profiles;
import org.springframework.stereotype.Component;
@Component
private Environment env;
- @Autowired
public JerseyConfiguration(Environment env) {
this.env = env;
.stream()
.filter(f -> {
if (f.isAnnotationPresent(Profile.class)
- && !env.acceptsProfiles(f.getAnnotation(Profile.class).value())) {
+ && !env.acceptsProfiles(Profiles.of(f.getAnnotation(Profile.class).value()))) {
return false;
}
return true;
+++ /dev/null
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.aai.web;
-
-import ch.qos.logback.access.jetty.RequestLogImpl;
-import org.eclipse.jetty.server.handler.HandlerCollection;
-import org.eclipse.jetty.server.handler.RequestLogHandler;
-import org.eclipse.jetty.util.thread.QueuedThreadPool;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.boot.web.embedded.jetty.JettyServerCustomizer;
-import org.springframework.boot.web.embedded.jetty.JettyServletWebServerFactory;
-import org.springframework.boot.web.servlet.server.AbstractServletWebServerFactory;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import java.util.Arrays;
-
-@Configuration
-public class LocalHostAccessLog {
-
- @Bean
- public AbstractServletWebServerFactory jettyConfigBean(
- @Value("${jetty.threadPool.maxThreads:200}") final String maxThreads,
- @Value("${jetty.threadPool.minThreads:8}") final String minThreads
- ){
- JettyServletWebServerFactory jef = new JettyServletWebServerFactory();
- jef.addServerCustomizers((JettyServerCustomizer) server -> {
-
- HandlerCollection handlers = new HandlerCollection();
-
- Arrays.stream(server.getHandlers()).forEach(handlers::addHandler);
-
- RequestLogHandler requestLogHandler = new RequestLogHandler();
- requestLogHandler.setServer(server);
-
- RequestLogImpl requestLogImpl = new RequestLogImpl();
- requestLogImpl.setResource("/localhost-access-logback.xml");
- requestLogImpl.start();
-
- requestLogHandler.setRequestLog(requestLogImpl);
- handlers.addHandler(requestLogHandler);
- server.setHandler(handlers);
-
- final QueuedThreadPool threadPool = server.getBean(QueuedThreadPool.class);
- threadPool.setMaxThreads(Integer.valueOf(maxThreads));
- threadPool.setMinThreads(Integer.valueOf(minThreads));
- });
- return jef;
- }
-}
management.endpoints.enabled-by-default=true
#To Enable Actuator Endpoint, you can override this in OOM Charts
management.endpoints.web.exposure.include=info, health, loggers, prometheus
-management.metrics.web.server.auto-time-requests=false
+# This property is deprecated: Should be applied at the ObservationRegistry level.
+# management.metrics.web.server.request.autotime.enabled=false
management.info.env.enabled=true
# If true, the actuator health check will be overriden
+++ /dev/null
-<!--
-
- ============LICENSE_START=======================================================
- org.onap.aai
- ================================================================================
- Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
-
- ECOMP is a trademark and service mark of AT&T Intellectual Property.
-
--->
-<configuration>
- <property name="AJSC_HOME" value="${AJSC_HOME:-.}" />
- <property name="maxHistory" value='7' />
- <property name="totalSizeCap" value='1GB' />
- <appender name="ACCESS"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${AJSC_HOME}/logs/ajsc-jetty/localhost_access.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${AJSC_HOME}/logs/ajsc-jetty/localhost_access.log.%d{yyyy-MM-dd}
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder class="org.onap.aai.logging.CustomLogPatternLayoutEncoder">
- <Pattern>%a %u %z [%t] "%m %U%q" %s %b %y %i{X-TransactionId} %i{X-FromAppId} %i{X-Forwarded-For} %i{X-AAI-SSL-Client-CN} %i{X-AAI-SSL-Client-OU} %i{X-AAI-SSL-Client-O} %i{X-AAI-SSL-Client-L} %i{X-AAI-SSL-Client-ST} %i{X-AAI-SSL-Client-C} %i{X-AAI-SSL-Client-NotBefore} %i{X-AAI-SSL-Client-NotAfter} %i{X-AAI-SSL-Client-DN} %D</Pattern>
- </encoder>
- </appender>
- <appender-ref ref="ACCESS" />
- <appender name="STDOUTACCESS" class="ch.qos.logback.core.ConsoleAppender">
- <encoder class="org.onap.aai.logging.CustomLogPatternLayoutEncoder">
- <Pattern>%a %u %z [%t] "%m %U%q" %s %b %y %i{X-TransactionId} %i{X-FromAppId} %i{X-Forwarded-For} %i{X-AAI-SSL-Client-CN} %i{X-AAI-SSL-Client-OU} %i{X-AAI-SSL-Client-O} %i{X-AAI-SSL-Client-L} %i{X-AAI-SSL-Client-ST} %i{X-AAI-SSL-Client-C} %i{X-AAI-SSL-Client-NotBefore} %i{X-AAI-SSL-Client-NotAfter} %i{X-AAI-SSL-Client-DN} %D - "logType": "access"</Pattern>
- </encoder>
- </appender>
- <appender-ref ref="STDOUTACCESS" />
-</configuration>
-
-<!--
-%a - Remote IP address
-%A - Local IP address
-%b - Bytes sent, excluding HTTP headers, or '-' if no bytes were sent
-%B - Bytes sent, excluding HTTP headers
-%h - Remote host name
-%H - Request protocol
-%l - Remote logical username from identd (always returns '-')
-%m - Request method
-%p - Local port
-%q - Query string (prepended with a '?' if it exists, otherwise an empty string
-%r - First line of the request
-%s - HTTP status code of the response
-%S - User session ID
-%t - Date and time, in Common Log Format format
-%u - Remote user that was authenticated
-%U - Requested URL path
-%v - Local server name
-%I - current request thread name (can compare later with stacktraces)
-
-%z - Custom pattern that parses the cert for the subject
-%y - Custom pattern determines rest or dme2
- -->
\ No newline at end of file
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.util.AAIConfig;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.web.server.LocalServerPort;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.cassandra.CassandraAutoConfiguration;
import org.springframework.boot.autoconfigure.data.cassandra.CassandraDataAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.web.server.LocalServerPort;
import org.springframework.context.annotation.Import;
import org.springframework.http.*;
import org.springframework.test.context.TestPropertySource;
.getClassLoader()
.getResourceAsStream(filename);
- String message = String.format("Unable to find the %s in src/test/resources", filename);
+ String message = "Unable to find the %s in src/test/resources".formatted(filename);
assertNotNull(inputStream, message);
String resource = IOUtils.toString(inputStream);
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-
-import org.apache.http.client.HttpClient;
-import org.apache.http.impl.client.HttpClients;
-import org.springframework.beans.factory.annotation.Autowired;
+import org.apache.hc.client5.http.classic.HttpClient;
+import org.apache.hc.client5.http.impl.classic.HttpClients;
+import org.apache.hc.client5.http.impl.io.PoolingHttpClientConnectionManager;
+import org.apache.hc.client5.http.impl.io.PoolingHttpClientConnectionManagerBuilder;
+import org.apache.hc.client5.http.ssl.SSLConnectionSocketFactoryBuilder;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.boot.web.client.RestTemplateBuilder;
import org.springframework.context.annotation.Bean;
-import org.springframework.core.env.Environment;
import org.springframework.http.HttpStatus;
import org.springframework.http.client.ClientHttpResponse;
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
RestTemplate restTemplate(RestTemplateBuilder builder) throws Exception {
SSLContext sslContext = SSLContext.getDefault();
-
- HttpClient client = HttpClients.custom()
- .setSSLContext(sslContext)
- .setSSLHostnameVerifier((s, sslSession) -> true)
- .build();
-
- RestTemplate restTemplate = builder
- .requestFactory(() -> new HttpComponentsClientHttpRequestFactory(client))
- .build();
+ PoolingHttpClientConnectionManager connectionManager = PoolingHttpClientConnectionManagerBuilder.create()
+ .setSSLSocketFactory(
+ SSLConnectionSocketFactoryBuilder.create()
+ .setSslContext(sslContext)
+ .build()
+ )
+ .build();
+ HttpClient client = HttpClients
+ .custom()
+ .setConnectionManager(connectionManager)
+ .build();
+
+ RestTemplate restTemplate = builder.requestFactory(() -> new HttpComponentsClientHttpRequestFactory(client)).build();
restTemplate.setErrorHandler(new ResponseErrorHandler() {
@Override
import org.onap.aai.config.SpringContextAware;
import org.onap.aai.exceptions.AAIException;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.actuate.autoconfigure.web.server.LocalManagementPort;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.cassandra.CassandraAutoConfiguration;
import org.springframework.boot.autoconfigure.data.cassandra.CassandraDataAutoConfiguration;
import org.springframework.boot.test.autoconfigure.actuate.metrics.AutoConfigureMetrics;
import org.springframework.boot.test.context.SpringBootTest;
-import org.springframework.boot.web.server.LocalServerPort;
+import org.springframework.boot.test.web.server.LocalManagementPort;
+import org.springframework.boot.test.web.server.LocalServerPort;
import org.springframework.context.annotation.Import;
import org.springframework.http.*;
import org.springframework.test.context.ContextConfiguration;
InputStream inputStream = PayloadUtil.class.getClassLoader().getResourceAsStream("payloads/expected/" + fileName);
- String message = String.format("Unable to find the %s in src/test/resources", fileName);
+ String message = "Unable to find the %s in src/test/resources".formatted(fileName);
assertNotNull(inputStream, message);
String resource = IOUtils.toString(inputStream);
InputStream inputStream = PayloadUtil.class.getClassLoader().getResourceAsStream("payloads/resource/" + fileName);
- String message = String.format("Unable to find the %s in src/test/resources", fileName);
+ String message = "Unable to find the %s in src/test/resources".formatted(fileName);
assertNotNull(inputStream, message);
String resource = IOUtils.toString(inputStream);
InputStream inputStream = PayloadUtil.class.getClassLoader().getResourceAsStream("payloads/templates/" + fileName);
- String message = String.format("Unable to find the %s in src/test/resources", fileName);
+ String message = "Unable to find the %s in src/test/resources".formatted(fileName);
assertNotNull(inputStream, message);
String resource;
InputStream inputStream = PayloadUtil.class.getClassLoader().getResourceAsStream("payloads/named-queries/" + fileName);
- String message = String.format("Unable to find the %s in src/test/resources/payloads/named-queries", fileName);
+ String message = "Unable to find the %s in src/test/resources/payloads/named-queries".formatted(fileName);
assertNotNull(inputStream, message);
String resource = IOUtils.toString(inputStream);
import java.util.Set;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.Paths;
import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.is;
String logsFolder = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
- Set<Path> preSnapshotFiles = Files.walk(Paths.get(logsFolder)).collect(Collectors.toSet());
+ Set<Path> preSnapshotFiles = Files.walk(Path.of(logsFolder)).collect(Collectors.toSet());
// Run the clear dataSnapshot and this time it should fail
//String [] args = {"JUST_TAKE_SNAPSHOT"}; >> default behavior is now to use 15 threads
// Add sleep so the file actually gets created with the data
- Set<Path> postSnapshotFiles = Files.walk(Paths.get(logsFolder)).collect(Collectors.toSet());
+ Set<Path> postSnapshotFiles = Files.walk(Path.of(logsFolder)).collect(Collectors.toSet());
assertThat(postSnapshotFiles.size(), is(preSnapshotFiles.size()+1));
postSnapshotFiles.removeAll(preSnapshotFiles);
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
String logsFolder = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
- Set<Path> preSnapshotFiles = Files.walk(Paths.get(logsFolder)).collect(Collectors.toSet());
+ Set<Path> preSnapshotFiles = Files.walk(Path.of(logsFolder)).collect(Collectors.toSet());
// Run the clear dataSnapshot and this time it should fail
//String [] args = {"JUST_TAKE_SNAPSHOT"}; >> default behavior is now to use 15 threads
// Add sleep so the file actually gets created with the data
- Set<Path> postSnapshotFiles = Files.walk(Paths.get(logsFolder)).collect(Collectors.toSet());
+ Set<Path> postSnapshotFiles = Files.walk(Path.of(logsFolder)).collect(Collectors.toSet());
assertThat(postSnapshotFiles.size(), is(preSnapshotFiles.size() +1));
postSnapshotFiles.removeAll(preSnapshotFiles);
String logsFolder = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
- Set<Path> preSnapshotFiles = Files.walk(Paths.get(logsFolder)).collect(Collectors.toSet());
+ Set<Path> preSnapshotFiles = Files.walk(Path.of(logsFolder)).collect(Collectors.toSet());
// previous test may have the same generated file name, this wait will ensure a new name is used for this test
System.out.println("delay generation, seconds " + DELAYSINGLETHREADTEST);
// Add sleep so the file actually gets created with the data
- Set<Path> postSnapshotFiles = Files.walk(Paths.get(logsFolder)).collect(Collectors.toSet());
+ Set<Path> postSnapshotFiles = Files.walk(Path.of(logsFolder)).collect(Collectors.toSet());
assertThat(postSnapshotFiles.size(), is(preSnapshotFiles.size() +1));
boolean gryoSnapshotExists = postSnapshotFiles.stream()
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
String logsFolder = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
- Set<Path> preSnapshotFiles = Files.walk(Paths.get(logsFolder)).collect(Collectors.toSet());
+ Set<Path> preSnapshotFiles = Files.walk(Path.of(logsFolder)).collect(Collectors.toSet());
// Run the clear dataSnapshot and this time it should fail
//String [] args = {"JUST_TAKE_SNAPSHOT"}; >> default behavior is now to use 15 threads
// Add sleep so the file actually gets created with the data
- Set<Path> postSnapshotFiles = Files.walk(Paths.get(logsFolder)).collect(Collectors.toSet());
+ Set<Path> postSnapshotFiles = Files.walk(Path.of(logsFolder)).collect(Collectors.toSet());
assertThat(postSnapshotFiles.size(), is(preSnapshotFiles.size()+1));
postSnapshotFiles.removeAll(preSnapshotFiles);
assertEquals(true, g.V().has("hostname", "TEST1").both().has("interface-name", "p-interface2").has("special-prop", "value").hasNext(), "pserver skeleton child merged");
assertEquals(true, g.V().has("hostname", "TEST1").both().has("interface-name", "p-interface2").both().has("interface-name", "l-interface1").has("special-prop", "value").hasNext(), "l-interface child merged");
assertEquals(true, g.V().has("hostname", "TEST1").both().has("interface-name", "p-interface2").both().has("interface-name", "l-interface1").both().has("link-name", "logical-link1").hasNext(), "l-interface child cousin edge merged");
- assertEquals(new Long(1), g.V().has("interface-name", "l-interface1").count().next(), "one l-interface1 found");
- assertEquals(new Long(1), g.V().has("interface-name", "p-interface2").count().next(), "one p-interface2 found");
+ assertEquals(Long.valueOf(1), g.V().has("interface-name", "l-interface1").count().next(), "one l-interface1 found");
+ assertEquals(Long.valueOf(1), g.V().has("interface-name", "p-interface2").count().next(), "one p-interface2 found");
}
}
@Test
public void pnfsExistTest() throws Exception {
// check if pnf node gets created
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "pnf")
.count().next(),
"2 PNFs exist");
@Test
public void pInterfacesExistTest() throws Exception {
- assertEquals(new Long(4L),
+ assertEquals(Long.valueOf(4L),
g.V().has("aai-node-type", "p-interface")
.count().next(),
"4 Pinterfaces exist");
.has("interface-name","1.7")
.hasNext(),
"p-interface 1.7 created for pnf-name-collector-1");
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "pnf")
.has("pnf-name", "pnf-name-1")
.in("tosca.relationships.network.BindsTo").count().next(),
.has("aai-node-type", "model-ver").has("model-version-id","model-version-id-1").hasNext(),
"Edge not created");
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-1")
.in().count().next(),
"Edge exists to 2 connectors");
.has("aai-node-type", "model-ver").has("model-version-id","model-version-id-2").hasNext(),
"Edge not created");
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-2")
.in().count().next(),
"Edge exists to only 2 service-instances");
.has("aai-node-type", "model-ver").has("model-version-id","model-version-id-3").hasNext(),
"Edge not created");
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-3")
.in().count().next(),
"Edge exists to only 2 pnfs");
.has("aai-node-type", "model-ver").has("model-version-id","model-version-id-4").hasNext(),
"Edge not created");
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-4")
.in().count().next(),
"Edge exists to only 2 logical-link");
.has("aai-node-type", "model-ver").has("model-version-id","model-version-id-5").hasNext(),
"Edge not created");
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-5")
.in().count().next(),
"Edge exists to only 2 logical-link");
.has("aai-node-type", "model-ver").has("model-version-id","model-version-id-6").hasNext(),
"Edge not created");
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-6")
.in().count().next(),
"Edge exists to only 2 generic-vnfs");
.has("aai-node-type", "model-ver").has("model-version-id","model-version-id-7").hasNext(),
"Edge not created");
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-7")
.in().count().next(),
"Edge exists to only 2 configurations");
.has("aai-node-type", "model-ver").has("model-version-id","model-version-id-8").hasNext(),
"Edge not created");
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-8")
.in().count().next(),
"Edge exists to only 2 l3-networks");
.has("aai-node-type", "model-ver").has("model-version-id","model-version-id-9").hasNext(),
"Edge not created");
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-9")
.in().count().next(),
"Edge exists to only 2 vf-modules");
.has("aai-node-type", "model-ver").has("model-version-id","model-version-id-10").hasNext(),
"Edge not created");
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-10")
.in().count().next(),
"Edge exists to only 2 collections");
.has("aai-node-type", "model-ver").has("model-version-id","model-version-id-11").hasNext(),
"Edge not created");
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-11")
.in().count().next(),
"Edge exists to only 2 instance-group2");
.has("aai-node-type", "model-ver").has("model-version-id","model-version-id-12").hasNext(),
"Edge not created");
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-12")
.in().count().next(),
"Edge exists to only 2 allotted-resource");
"Edge not created");
- assertEquals(new Long(2L),
+ assertEquals(Long.valueOf(2L),
g.V().has("aai-node-type", "network-technology").has("network-technology-id", "network-technology-1")
.in().count().next(),
"Edge exists to 2 cloud regions");
assertEquals(true, g.V().has("aai-node-type", "p-interface").has("aai-uri","/cloud-infrastructure/pservers/pserver/rctP3/p-interfaces/p-interface/pint11").hasNext());
//2. pint12 int-name matches with pint31. So, verify that p-int does not move from rctP1 to rctP3
- assertEquals(new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","rctP3").in("tosca.relationships.network.BindsTo")
+ assertEquals(Long.valueOf(1L), g.V().has("aai-node-type", "pserver").has("hostname","rctP3").in("tosca.relationships.network.BindsTo")
.has("aai-node-type","p-interface").has("interface-name","pint12").count().next(), "rctP3 has only 1 pint with name pint12");
//3. Verify that the p-interface from pserver is not moved to another pserver that has null fqdn
//4. If the fqdn is "" within 2 RCT pservers, ignore that case. Don't move the p-int from old resource-version to new resource-version pserver
assertEquals(false, g.V().has("aai-node-type", "pserver").has("hostname","rctP5").in("tosca.relationships.network.BindsTo")
.has("aai-node-type","p-interface").has("interface-name","pint41").hasNext());
- assertEquals(new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","rctP5").in("tosca.relationships.network.BindsTo")
+ assertEquals(Long.valueOf(1L), g.V().has("aai-node-type", "pserver").has("hostname","rctP5").in("tosca.relationships.network.BindsTo")
.has("aai-node-type","p-interface").count().next(), "rctP5 has only 1 p-interface");
//5. plink is moved from pint3 on pserver fqdn1 to pint2 on pserver fqdn3. Both p-ints have the same interface-name
//2. lagint12 int-name matches with lagint31. So, verify that lag-int does not move from rctP1 to rctP3
- assertEquals(new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","rctP3").in("tosca.relationships.network.BindsTo")
+ assertEquals(Long.valueOf(1L), g.V().has("aai-node-type", "pserver").has("hostname","rctP3").in("tosca.relationships.network.BindsTo")
.has("aai-node-type","lag-interface").has("interface-name","lagint12").count().next(), "rctP3 has only 1 lag-interface with name lagint12");
}
@Test
public void checkRCTPserverHasRelnToOnly1Zone() throws Exception {
- assertEquals(new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","Scn6.pserverRCT1Scn6").out("org.onap.relationships.inventory.LocatedIn")
+ assertEquals(Long.valueOf(1L), g.V().has("aai-node-type", "pserver").has("hostname","Scn6.pserverRCT1Scn6").out("org.onap.relationships.inventory.LocatedIn")
.has("aai-node-type","zone").count().next(), "Edge to only 1 Zone exists");
assertEquals(true, g.V().has("aai-node-type", "zone").has("zone-id","zone-62").hasNext());
//Verify no edge exists from zone61 to lower resource-version RCT pserver
@Test
public void checkRCTPserverHasRelnTo2GenericVnfs() throws Exception {
- assertEquals(new Long(2L), g.V().has("aai-node-type", "pserver").has("hostname","Scn6.pserverRCT1Scn6").in("tosca.relationships.HostedOn")
+ assertEquals(Long.valueOf(2L), g.V().has("aai-node-type", "pserver").has("hostname","Scn6.pserverRCT1Scn6").in("tosca.relationships.HostedOn")
.has("aai-node-type","generic-vnf").count().next(), "Edge to 2 generic-vnfs exists");
assertEquals(true, g.V().has("aai-node-type", "generic-vnf").has("vnf-id","vnf-1").out().has("aai-node-type", "pserver").has("hostname", "Scn6.pserverRCT1Scn6").hasNext());
//Verify no edge exists from vnf-1 to lower resource-version pserver
assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").in("tosca.relationships.network.BindsTo")
.has("aai-node-type","p-interface").has("interface-name","pintRo1").out().has("link-name","plinkROonOldRo1").hasNext());
//Verify complex does not get attached to pserverRO5
- assertEquals(new Long(1L), g.V().has("physical-location-id", "complexOldRO").in("org.onap.relationships.inventory.LocatedIn").count().next(), "Complex is related to only 1 pserver");
+ assertEquals(Long.valueOf(1L), g.V().has("physical-location-id", "complexOldRO").in("org.onap.relationships.inventory.LocatedIn").count().next(), "Complex is related to only 1 pserver");
}
@Test
assertEquals("Scn4.pserverROSPlinkScn4",pserverRCTPlinkScn4.property("fqdn").value().toString());
assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTPlinkScn4").in("tosca.relationships.network.BindsTo")
.has("aai-node-type","p-interface").has("interface-name","pintPlinkScn4").hasNext());
- assertEquals(new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTPlinkScn4").in("tosca.relationships.network.BindsTo")
+ assertEquals(Long.valueOf(1L), g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTPlinkScn4").in("tosca.relationships.network.BindsTo")
.has("aai-node-type","p-interface").has("interface-name","pintPlinkScn4").count().next(), "only 1 p-int is present on RCT pserver");
assertEquals("/cloud-infrastructure/pservers/pserver/pserverRCTPlinkScn4/p-interfaces/p-interface/pintPlinkScn4", samePintScn4RCT.property("aai-uri").value().toString());
//plink is not moved from RO to RCT when p-int matches
@Test
public void checkRCTPserverHasRelnToOnly1Complex() throws Exception {
- assertEquals(new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTScn3").out("org.onap.relationships.inventory.LocatedIn")
+ assertEquals(Long.valueOf(1L), g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTScn3").out("org.onap.relationships.inventory.LocatedIn")
.has("aai-node-type","complex").count().next(), "Edge to only 1 complex exists");
}
@Test
public void checkRCTPserverHasRelnToOnly1Zone() throws Exception {
- assertEquals(new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTScn6").out("org.onap.relationships.inventory.LocatedIn")
+ assertEquals(Long.valueOf(1L), g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTScn6").out("org.onap.relationships.inventory.LocatedIn")
.has("aai-node-type","zone").count().next(), "Edge to only 1 Zone exists");
assertEquals(true, g.V().has("aai-node-type", "zone").has("zone-id","zone-62").hasNext());
//Verify no edge exists from zone62 to RO pserver
@Test
public void checkRCTPserverHasRelnTo2GenericVnfs() throws Exception {
- assertEquals(new Long(2L), g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTScn6").in("tosca.relationships.HostedOn")
+ assertEquals(Long.valueOf(2L), g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTScn6").in("tosca.relationships.HostedOn")
.has("aai-node-type","generic-vnf").count().next(), "Edge to 2 generic-vnfs exists");
assertEquals(true, g.V().has("aai-node-type", "generic-vnf").has("vnf-id","vnf-2").out().has("aai-node-type", "pserver").has("hostname", "pserverRCTScn6").hasNext());
//Verify no edge exists from zone62 to RO pserver
//2. lagint12 int-name matches with lagint31. So, verify that lag-int does not move from rctP1 to rctP3
- assertEquals(new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","rctP1").in("tosca.relationships.network.BindsTo")
+ assertEquals(Long.valueOf(1L), g.V().has("aai-node-type", "pserver").has("hostname","rctP1").in("tosca.relationships.network.BindsTo")
.has("aai-node-type","lag-interface").has("interface-name","lagint12").count().next(), "rctP1 has only 1 lag-interface with name lagint12");
}
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
-import static org.mockito.ArgumentMatchers.anyObject;
+import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Set;
import java.util.UUID;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedHashMap;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriInfo;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.MultivaluedHashMap;
+import jakarta.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.UriInfo;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
when(uriInfo.getQueryParameters(false)).thenReturn(queryParameters);
// TODO - Check if this is valid since RemoveDME2QueryParameters seems to be very unreasonable
- Mockito.doReturn(null).when(queryParameters).remove(anyObject());
+ Mockito.doReturn(null).when(queryParameters).remove(any());
when(httpHeaders.getMediaType()).thenReturn(APPLICATION_JSON);
}
@Test
public void addNewIndex() throws JsonParseException, JsonMappingException, IOException {
ObjectMapper mapper = new ObjectMapper();
- String content = " {\r\n" +
- " \"name\" : \"equipment-name\",\r\n" +
- " \"unique\" : false,\r\n" +
- " \"properties\" : [ {\r\n" +
- " \"name\" : \"equipment-name\",\r\n" +
- " \"cardinality\" : \"SINGLE\",\r\n" +
- " \"typeClass\" : \"java.lang.String\"\r\n" +
- " } ]\r\n" +
- " }";
+ String content = """
+ {\r
+ "name" : "equipment-name",\r
+ "unique" : false,\r
+ "properties" : [ {\r
+ "name" : "equipment-name",\r
+ "cardinality" : "SINGLE",\r
+ "typeClass" : "java.lang.String"\r
+ } ]\r
+ }\
+ """;
DBIndex index = mapper.readValue(content, DBIndex.class);
ManageJanusGraphSchema schema = new ManageJanusGraphSchema(graph, auditorFactory, schemaVersions, edgeIngestor);
JanusGraphManagement mgmt = graph.openManagement();
import java.io.IOException;
import java.nio.file.Files;
-import java.nio.file.Paths;
+import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.concurrent.atomic.AtomicBoolean;
tx.commit();
try{
- Files.createFile(Paths.get(FILE));
+ Files.createFile(Path.of(FILE));
}catch(Exception e) {
System.out.println("Create File error : " + e.getMessage());
}
String finalStr = "";
finalStr = "pserver" + "#@#" + "/cloud-infrastructure/pservers/pserver/"+SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-1" + "#@#" + "{\"hostname\":\""+ SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-1\",\"resource-version\":\"333\"}" + "\n";
- Files.write(Paths.get(FILE), finalStr.getBytes(),StandardOpenOption.APPEND);
+ Files.write(Path.of(FILE), finalStr.getBytes(),StandardOpenOption.APPEND);
finalStr = "pserver" + "#@#" + "/cloud-infrastructure/pservers/pserver/"+SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-2" + "#@#" + "{\"hostname\":\""+ SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-2\",\"resource-version\":\"334\"}" + "\n";
- Files.write(Paths.get(FILE), finalStr.getBytes(),StandardOpenOption.APPEND);
+ Files.write(Path.of(FILE), finalStr.getBytes(),StandardOpenOption.APPEND);
finalStr = "pserver" + "#@#" + "/cloud-infrastructure/pservers/pserver/"+SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-3" + "#@#" + "{\"hostname\":\""+ SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-3\",\"resource-version\":\"335\"}" + "\n";
- Files.write(Paths.get(FILE), finalStr.getBytes(),StandardOpenOption.APPEND);
+ Files.write(Path.of(FILE), finalStr.getBytes(),StandardOpenOption.APPEND);
finalStr = "pserver" + "#@#" + "/cloud-infrastructure/pservers/pserver/"+SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-4" + "#@#" + "{\"hostname\":\""+ SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-4\",\"resource-version\":\"336\"}" + "\n";
- Files.write(Paths.get(FILE), finalStr.getBytes(),StandardOpenOption.APPEND);
+ Files.write(Path.of(FILE), finalStr.getBytes(),StandardOpenOption.APPEND);
graphCreated.compareAndSet(false, true);
}
}
@AfterAll
public static void cleanUp() throws IOException {
- Files.delete(Paths.get(FILE));
+ Files.delete(Path.of(FILE));
}
@AfterEach
import java.io.IOException;
import java.nio.file.Files;
-import java.nio.file.Paths;
+import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
list.add(pnf3.id().toString() + "_222"); // invalid: wrong resource version
list.add("345_345"); // invalid
list.add(pserver1.id().toString() + "_333"); // valid
- Files.write(Paths.get(FILE), (Iterable<String>)list.stream()::iterator);
+ Files.write(Path.of(FILE), (Iterable<String>)list.stream()::iterator);
graphCreated.compareAndSet(false, true);
}
}
@AfterAll
public static void cleanUp() throws IOException {
- Files.delete(Paths.get(FILE));
+ Files.delete(Path.of(FILE));
}
@AfterEach
spring.application.name=GraphAdmin
-server.contextPath=/
+server.servlet.context-path=/
spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration,org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration
# These two properties represent whether it should use component scan
server.basic.auth.location=${server.local.startpath}/etc/auth/realm.properties
server.port=8449
-security.require-ssl=false
+# This property is deprecated: The security auto-configuration is no longer customizable. Provide your own WebSecurityConfigurer bean instead.
+# security.require-ssl=false
server.ssl.enabled=false
# Schema related attributes for the oxm and edges
management.server.port=0
management.endpoints.enabled-by-default=true
management.endpoints.web.exposure.include=info, health, loggers, prometheus
-management.metrics.web.server.auto-time-requests=false
+# This property is deprecated: Should be applied at the ObservationRegistry level.
+# management.metrics.web.server.request.autotime.enabled=false
aai.notifications.enabled=false
# because they are used in Jenkins, whose plug-in doesn't support
major_version=1
-minor_version=15
-patch_version=6
+minor_version=16
+patch_version=0
base_version=${major_version}.${minor_version}.${patch_version}