<parent>
<groupId>org.onap.aai.aai-common</groupId>
<artifactId>aai-parent</artifactId>
- <version>1.15.0</version>
+ <version>1.15.1-SNAPSHOT</version>
</parent>
<groupId>org.onap.aai.graphadmin</groupId>
<artifactId>aai-graphadmin</artifactId>
- <version>1.15.0-SNAPSHOT</version>
+ <version>1.15.1-SNAPSHOT</version>
<properties>
<docker.push.registry>localhost:5000</docker.push.registry>
<aai.docker.version>1.0.0</aai.docker.version>
<aai.schema.service.version>1.12.5</aai.schema.service.version>
- <aai.common.version>1.15.0</aai.common.version>
+ <aai.common.version>1.15.1-SNAPSHOT</aai.common.version>
<aai.build.directory>${project.build.directory}/${project.artifactId}-${project.version}-build/
</aai.build.directory>
<aai.docker.namespace>onap</aai.docker.namespace>
<!-- End of Default ONAP Schema Properties -->
<micrometer.version>1.8.1</micrometer.version>
- <activemq.version>5.16.7</activemq.version>
<antlr.version>4.9.3</antlr.version>
<jcommander.version>1.78</jcommander.version>
<gremlin.version>3.7.1</gremlin.version>
</dependency>
<!-- End of graphadmin metric collection dependencies -->
- <dependency>
- <groupId>javax.jms</groupId>
- <artifactId>javax.jms-api</artifactId>
- <version>2.0.1</version>
- </dependency>
<dependency>
<groupId>org.onap.aai.aai-common</groupId>
<artifactId>aai-core</artifactId>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-jmx</artifactId>
</exclusion>
- <exclusion>
- <groupId>org.onap.aai.aai-common</groupId>
- <artifactId>aai-aaf-auth</artifactId>
- </exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
</dependency>
- <!--
- Do not use activemq-all because they force you to use a specific logging
- and they shade it so you can't simply exclude it and when you deploy the
- jar, you will notice failure
- -->
- <dependency>
- <groupId>org.apache.activemq</groupId>
- <artifactId>activemq-broker</artifactId>
- <version>${activemq.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.activemq</groupId>
- <artifactId>activemq-client</artifactId>
- <version>${activemq.version}</version>
- </dependency>
<!-- Do not use the jersey-client since jersey client 1.0 version clashes
with jersey 2 which we are using -->
<!-- Use this to make http requests instead of jersey 1.0 client -->
</exclusion>
</exclusions>
</dependency>
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-jms</artifactId>
- <exclusions>
- <exclusion>
- <groupId>com.sun.jersey</groupId>
- <artifactId>jersey-core</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
import org.json.JSONException;
import org.json.JSONObject;
import org.onap.aai.aailog.logs.AaiDebugLog;
+import org.onap.aai.config.SpringContextAware;
import org.onap.aai.edges.EdgeIngestor;
import org.onap.aai.edges.enums.EdgeType;
import org.onap.aai.edges.exceptions.AmbiguousRuleChoiceException;
import org.onap.aai.introspection.Loader;
import org.onap.aai.introspection.LoaderFactory;
import org.onap.aai.introspection.ModelType;
+import org.onap.aai.kafka.NotificationProducer;
import org.onap.aai.serialization.db.DBSerializer;
import org.onap.aai.serialization.db.EdgeSerializer;
import org.onap.aai.serialization.db.exceptions.NoEdgeRuleFoundException;
@MigrationPriority(0)
@MigrationDangerRating(0)
public abstract class Migrator implements Runnable {
-
+
protected Logger logger = null;
protected DBSerializer serializer = null;
protected static final String MIGRATION_ERROR = "Migration Error: ";
protected static final String MIGRATION_SUMMARY_COUNT = "Migration Summary Count: ";
-
+
private static AaiDebugLog debugLog = new AaiDebugLog();
static {
debugLog.setupMDC();
this.edgeIngestor = edgeIngestor;
this.edgeSerializer = edgeSerializer;
this.schemaVersions = schemaVersions;
- initDBSerializer();
- this.notificationHelper = new NotificationHelper(loader, serializer, loaderFactory, schemaVersions, engine, "AAI-MIGRATION", this.getMigrationName());
+ initDBSerializer();
+ NotificationProducer notificationProducer = SpringContextAware.getBean(NotificationProducer.class);
+ this.notificationHelper = new NotificationHelper(notificationProducer, loader, serializer, loaderFactory, schemaVersions, engine, "AAI-MIGRATION", this.getMigrationName());
MDC.put("logFilenameAppender", this.getClass().getSimpleName());
logger = LoggerFactory.getLogger(this.getClass().getSimpleName());
logAndPrint(logger,"\tInitilization of " + this.getClass().getSimpleName() + " migration script complete.");
if (dmaapMsgList.size() > 0) {
try {
Files.write(Paths.get(logDirectory+"/"+fileName), (Iterable<String>)dmaapMsgList.stream()::iterator);
- } catch (IOException e) {
- System.out.println("Unable to generate file with dmaap msgs for " + getMigrationName() +
+ } catch (IOException e) {
+ System.out.println("Unable to generate file with dmaap msgs for " + getMigrationName() +
" Exception is: " + e.getMessage());
logger.error("Unable to generate file with dmaap msgs for " + getMigrationName(), e);
}
}
}
-
+
/**
* Creates the edge
*
}
return newEdge;
}
-
+
/**
* Creates the edge
*
}
/**
- * Creates the TREE edge
+ * Creates the TREE edge
*
* @param out the out
* @param in the in
Edge newEdge = createEdge(EdgeType.TREE, out, in);
return newEdge;
}
-
+
/**
- * Creates the COUSIN edge
+ * Creates the COUSIN edge
*
* @param out the out
* @param in the in
throw new RuntimeException("could not create seralizer", e);
}
}
-
+
/**
* These are the node types you would like your traversal to process
* @return
*/
public abstract Optional<String[]> getAffectedNodeTypes();
-
+
/**
* used as the "fromAppId" when modifying vertices
* @return
*/
public abstract String getMigrationName();
-
+
/**
* updates all internal vertex properties
* @param v
protected void touchVertexProperties(Vertex v, boolean isNewVertex) {
this.serializer.touchStandardVertexProperties(v, isNewVertex);
}
-
+
public NotificationHelper getNotificationHelper() {
return this.notificationHelper;
}
-
+
/**
* Log and print.
*
import org.onap.aai.introspection.Loader;
import org.onap.aai.introspection.LoaderFactory;
import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;
+import org.onap.aai.kafka.NotificationProducer;
import org.onap.aai.rest.notification.UEBNotification;
import org.onap.aai.serialization.db.DBSerializer;
import org.onap.aai.serialization.engines.TransactionalGraphEngine;
protected final String transactionId;
protected final String sourceOfTruth;
protected final UEBNotification notification;
+ protected final NotificationProducer notificationProducer;
- public NotificationHelper(Loader loader, DBSerializer serializer, LoaderFactory loaderFactory, SchemaVersions schemaVersions, TransactionalGraphEngine engine, String transactionId, String sourceOfTruth) {
+ public NotificationHelper(NotificationProducer notificationProducer, Loader loader, DBSerializer serializer, LoaderFactory loaderFactory, SchemaVersions schemaVersions, TransactionalGraphEngine engine, String transactionId, String sourceOfTruth) {
this.loader = loader;
this.serializer = serializer;
this.engine = engine;
this.transactionId = transactionId;
this.sourceOfTruth = sourceOfTruth;
- this.notification = new UEBNotification(loader, loaderFactory, schemaVersions);
+ this.notification = new UEBNotification(loaderFactory, schemaVersions);
+ this.notificationProducer = notificationProducer;
MDC.put("logFilenameAppender", this.getClass().getSimpleName());
LOGGER = LoggerFactory.getLogger(this.getClass().getSimpleName());
}
public void triggerEvents() throws AAIException {
- notification.triggerEvents();
+ notificationProducer.sendUEBNotification(notification);
}
public UEBNotification getNotifications() {
package org.onap.aai.util;
import com.att.eelf.configuration.Configuration;
+
+import org.onap.aai.config.SpringContextAware;
import org.onap.aai.dbmap.AAIGraph;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.introspection.Introspector;
import org.onap.aai.introspection.Loader;
import org.onap.aai.introspection.LoaderFactory;
import org.onap.aai.introspection.ModelType;
+import org.onap.aai.kafka.NotificationProducer;
import org.onap.aai.migration.EventAction;
import org.onap.aai.migration.NotificationHelper;
import org.onap.aai.serialization.db.DBSerializer;
protected final LoaderFactory loaderFactory;
protected final SchemaVersions schemaVersions;
protected final SchemaVersion version;
-
+
public SendDeleteMigrationNotifications(LoaderFactory loaderFactory, SchemaVersions schemaVersions, String config, String path, int sleepInMilliSecs, int numToBatch, String requestId, EventAction eventAction, String eventSource) {
System.setProperty("aai.service.name", SendDeleteMigrationNotifications.class.getSimpleName());
Properties props = System.getProperties();
} catch (AAIException e) {
throw new RuntimeException("could not create serializer", e);
}
- this.notificationHelper = new NotificationHelper(loader, serializer, loaderFactory, schemaVersions, engine, requestId, this.eventSource);
+ NotificationProducer notificationProducer = SpringContextAware.getBean(NotificationProducer.class);
+ this.notificationHelper = new NotificationHelper(notificationProducer, loader, serializer, loaderFactory, schemaVersions, engine, requestId, this.eventSource);
}
protected void initGraph() {
}
-}
\ No newline at end of file
+}
import com.att.eelf.configuration.Configuration;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.config.SpringContextAware;
import org.onap.aai.db.props.AAIProperties;
import org.onap.aai.dbmap.AAIGraph;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.introspection.Loader;
import org.onap.aai.introspection.LoaderFactory;
import org.onap.aai.introspection.ModelType;
+import org.onap.aai.kafka.NotificationProducer;
import org.onap.aai.migration.EventAction;
import org.onap.aai.migration.NotificationHelper;
import org.onap.aai.serialization.db.DBSerializer;
} catch (AAIException e) {
throw new RuntimeException("could not create serializer", e);
}
- this.notificationHelper = new NotificationHelper(loader, serializer, loaderFactory, schemaVersions, engine, requestId, this.eventSource);
+ NotificationProducer notificationProducer = SpringContextAware.getBean(NotificationProducer.class);
+ this.notificationHelper = new NotificationHelper(notificationProducer, loader, serializer, loaderFactory, schemaVersions, engine, requestId, this.eventSource);
}
protected void initGraph() {
}
-}
\ No newline at end of file
+}
server.ssl.client-auth=want
server.ssl.key-store-type=JKS
-# JMS bind address host port
-jms.bind.address=tcp://localhost:61450
-
# dmaap is deprecated and now replaced with kafka
spring.kafka.producer.bootstrap-servers=${BOOTSTRAP_SERVERS}
spring.kafka.producer.properties.security.protocol=SASL_PLAINTEXT
# but doesn't show up in micrometer metrics
aai.actuator.echo.enabled=false
+aai.notifications.enabled=false
+BOOTSTRAP_SERVERS=localhost:9092
+JAAS_CONFIG=
aai.model.delete.sleep.per.vtx.msec=500
aai.model.query.resultset.maxcount=50
aai.model.query.timeout.sec=90
-
+
# Used by Data Grooming
aai.grooming.default.max.fix=150
aai.grooming.default.sleep.minutes=7
aai.ctagPool.rangeString.vplsPe1=2001-2500
aai.ctagPool.rangeString.vplsPe2=2501-3000
-aai.jms.enable=false
-
#used by the dataGrooming and dataSnapshot cleanup tasks
aai.cron.enable.datagroomingcleanup=true
aai.cron.enable.datasnapshotcleanup=true
XmlFormatTransformerConfiguration.class,
RestBeanConfig.class,
LoaderFactory.class,
- NotificationService.class
+ NotificationService.class,
+ KafkaConfig.class
})
@TestPropertySource(properties = {
"schema.uri.base.path = /aai",
@Test
public void processEverything() throws Exception {
SendMigrationNotifications s = spy(new SendMigrationNotifications(
- loaderFactory, schemaVersions, REALTIME_CONFIG, FILE, Collections.EMPTY_SET, 0, 0, "test", EventAction.UPDATE, "DMAAP-LOAD"));
+ loaderFactory, schemaVersions, REALTIME_CONFIG, FILE, Collections.emptySet(), 0, 0, "test", EventAction.UPDATE, "DMAAP-LOAD"));
doNothing().when(s).trigger();
doNothing().when(s).cleanup();
s.process("/aai/");
@Test
public void processEverythingBatched2() throws Exception {
SendMigrationNotifications s = spy(new SendMigrationNotifications(
- loaderFactory, schemaVersions, REALTIME_CONFIG, FILE, Collections.EMPTY_SET, 0, 2, "test", EventAction.UPDATE, "DMAAP-LOAD"));
+ loaderFactory, schemaVersions, REALTIME_CONFIG, FILE, Collections.emptySet(), 0, 2, "test", EventAction.UPDATE, "DMAAP-LOAD"));
doNothing().when(s).trigger();
doNothing().when(s).cleanup();
s.process("/aai/");
@Test
public void processEverythingBatched3() throws Exception {
SendMigrationNotifications s = spy(new SendMigrationNotifications(
- loaderFactory, schemaVersions, REALTIME_CONFIG, FILE, Collections.EMPTY_SET, 0, 3, "test", EventAction.UPDATE, "DMAAP-LOAD"));
+ loaderFactory, schemaVersions, REALTIME_CONFIG, FILE, Collections.emptySet(), 0, 3, "test", EventAction.UPDATE, "DMAAP-LOAD"));
doNothing().when(s).trigger();
doNothing().when(s).cleanup();
s.process("/aai/");
}
-}
\ No newline at end of file
+}
security.require-ssl=false
server.ssl.enabled=false
-# JMS bind address host port
-jms.bind.address=tcp://localhost:61450
-
# Schema related attributes for the oxm and edges
# Any additional schema related attributes should start with prefix schema
schema.configuration.location=N/A
management.endpoints.enabled-by-default=true
management.endpoints.web.exposure.include=info, health, loggers, prometheus
management.metrics.web.server.auto-time-requests=false
+
+aai.notifications.enabled=false
aai.model.delete.sleep.per.vtx.msec=500
aai.model.query.resultset.maxcount=50
aai.model.query.timeout.sec=90
-
+
# Used by Data Grooming
aai.grooming.default.max.fix=150
aai.grooming.default.sleep.minutes=7
aai.ctagPool.rangeString.vplsPe1=2001-2500
aai.ctagPool.rangeString.vplsPe2=2501-3000
-aai.jms.enable=false
-
#used by the dataGrooming and dataSnapshot cleanup tasks
aai.cron.enable.datagroomingcleanup=true
aai.cron.enable.datasnapshotcleanup=true
# because they are used in Jenkins, whose plug-in doesn't support
major_version=1
-minor_version=14
-patch_version=7
+minor_version=15
+patch_version=1
base_version=${major_version}.${minor_version}.${patch_version}