*.iml
.idea
-**/target/
\ No newline at end of file
+**/target/
+*.log
\ No newline at end of file
\r
Provisioning is implemented as a Java servlet running under Jetty in one JVM\r
\r
- Provisioning data is stored in a MySQL database\r
+ Provisioning data is stored in a MariaDB database\r
\r
The backup provisioning server and each node is informed any time provisioning data changes\r
\r
\r
Pre-requisites to run the service\r
\r
-MySQL Version 5.6\r
+MariaDB Version 10.2.14\r
\r
Java JDK 1.8\r
\r
-Install MySQL and load needed table into the database\r
+Install MariaDB and load needed table into the database\r
\r
-Sample install_db.sql is provided in the datarouter-prov/data .\r
+Sample sql_init_01.sql is provided in the datarouter-prov/src/main/resources/misc\r
\r
Go to datarouter-prov module and run the service using main.java \r
\r
\r
Maven - 3.2.5 \r
\r
-MySQL - 5.6\r
+MariaDB - 10.2.14\r
\r
Self Signed SSL certificates\r
\r
<version>1.6.4</version>\r
<scope>test</scope>\r
</dependency>\r
- <!-- <dependency><groupId>org.junit</groupId><artifactId>com.springsource.org.junit</artifactId><version>4.4.0</version></dependency>-->\r
<dependency>\r
- <groupId>mysql</groupId>\r
- <artifactId>mysql-connector-java</artifactId>\r
- <version>5.1.21</version>\r
+ <groupId>org.mariadb.jdbc</groupId>\r
+ <artifactId>mariadb-java-client</artifactId>\r
+ <version>2.2.5</version>\r
</dependency>\r
<dependency>\r
<groupId>org.eclipse.jetty.cdi</groupId>\r
<include>**/log4j.properties</include>\r
</includes>\r
</resource>\r
- <!-- <resource><directory>src/main/config</directory><filtering>true</filtering><includes><include>**/log4j*.xml</include></includes></resource><resource><directory>src/main/resources</directory><filtering>false</filtering><excludes><exclude>**/cambriaApiVersion.properties</exclude></excludes></resource>-->\r
+ <resource>\r
+ <directory>src/test/resources</directory>\r
+ <filtering>true</filtering>\r
+ <includes>\r
+ <include>**/log4j.properties</include>\r
+ </includes>\r
+ </resource>\r
</resources>\r
<plugins>\r
<plugin>\r
\r
/** Get the identity of the owner of a feed by group id - Rally : US708115\r
* \r
- * @param feedid, user the ID of the feed whose owner is being looked up.\r
+ * @param feedId, user the ID of the feed whose owner is being looked up.\r
* @return the feed owner's identity by group.\r
*/\r
public String getGroupByFeedGroupId(String owner, String feedId);\r
\r
/** Get the identity of the owner of a sub by group id Rally : US708115\r
* \r
- * @param subid, user the ID of the feed whose owner is being looked up.\r
+ * @param subId, user the ID of the feed whose owner is being looked up.\r
* @return the feed owner's identity by group.\r
*/\r
public String getGroupBySubGroupId(String owner, String subId);\r
\r
/**\r
* queryGeneretor - Generating sql query\r
- * @exception SQL Query parse exception.\r
- * @param Map as key value pare of all user input fields\r
+ * @exception ParseException\r
+ * @param map as key value pare of all user input fields\r
*/\r
public String queryGeneretor(Map<String, String> map) throws ParseException{\r
\r
* <ol>\r
* <li>Checking DNS once per minute to see which POD the DNS CNAME points to. The CNAME will point to\r
* the active (master) POD.</li>\r
- * <li>On non-master (standby) PODs, fetches provisioning data and logs in order to keep MySQL in sync.</li>\r
+ * <li>On non-master (standby) PODs, fetches provisioning data and logs in order to keep MariaDB in sync.</li>\r
* <li>Providing information to other parts of the system as to the current role (ACTIVE, STANDBY, UNKNOWN)\r
* of this POD.</li>\r
* </ol>\r
import org.onap.dmaap.datarouter.provisioning.utils.LOGJSONObject;\r
\r
/**\r
- * An object that can be represented as a {@link JSONObject}.\r
+ * An object that can be represented as a {@link LOGJSONObject}.\r
* @author Robert Eby\r
* @version $Id: JSONable.java,v 1.1 2013/04/26 21:00:26 eby Exp $\r
*/\r
\r
package org.onap.dmaap.datarouter.provisioning.utils;\r
\r
-import java.io.File;\r
-import java.io.FileReader;\r
-import java.io.IOException;\r
-import java.io.InputStream;\r
-import java.io.LineNumberReader;\r
-import java.lang.reflect.Constructor;\r
-import java.lang.reflect.InvocationTargetException;\r
-import java.sql.Connection;\r
-import java.sql.DatabaseMetaData;\r
-import java.sql.DriverManager;\r
-import java.sql.PreparedStatement;\r
-import java.sql.ResultSet;\r
-import java.sql.SQLException;\r
-import java.sql.Statement;\r
-import java.util.HashSet;\r
-import java.util.LinkedList;\r
-import java.util.NoSuchElementException;\r
-import java.util.Properties;\r
-import java.util.Queue;\r
-import java.util.Set;\r
-\r
import org.apache.log4j.Logger;\r
-import org.onap.dmaap.datarouter.provisioning.beans.DeliveryRecord;\r
-import org.onap.dmaap.datarouter.provisioning.beans.ExpiryRecord;\r
-import org.onap.dmaap.datarouter.provisioning.beans.Loadable;\r
-import org.onap.dmaap.datarouter.provisioning.beans.PublishRecord;\r
+\r
+import java.io.*;\r
+import java.sql.*;\r
+import java.util.*;\r
\r
/**\r
* Load the DB JDBC driver, and manage a simple pool of connections to the DB.\r
*/\r
public class DB {\r
/** The name of the properties file (in CLASSPATH) */\r
- public static final String CONFIG_FILE = "provserver.properties";\r
+ private static final String CONFIG_FILE = "provserver.properties";\r
\r
- private static String DB_DRIVER = "com.mysql.jdbc.Driver";\r
- private static String DB_URL = "jdbc:mysql://127.0.0.1:3306/datarouter";\r
- private static String DB_LOGIN = "datarouter";\r
- private static String DB_PASSWORD = "datarouter";\r
+ private static String DB_URL;\r
+ private static String DB_LOGIN;\r
+ private static String DB_PASSWORD;\r
private static Properties props;\r
private static Logger intlogger = Logger.getLogger("org.onap.dmaap.datarouter.provisioning.internal");\r
- private static Queue<Connection> queue = new LinkedList<Connection>();\r
+ private static final Queue<Connection> queue = new LinkedList<>();\r
\r
public static String HTTPS_PORT;\r
public static String HTTP_PORT;\r
public DB() {\r
if (props == null) {\r
props = new Properties();\r
- InputStream inStream = getClass().getClassLoader().getResourceAsStream(CONFIG_FILE);\r
- try {\r
+ try (InputStream inStream = getClass().getClassLoader().getResourceAsStream(CONFIG_FILE)) {\r
props.load(inStream);\r
- DB_DRIVER = (String) props.get("org.onap.dmaap.datarouter.db.driver");\r
- DB_URL = (String) props.get("org.onap.dmaap.datarouter.db.url");\r
- DB_LOGIN = (String) props.get("org.onap.dmaap.datarouter.db.login");\r
+ String DB_DRIVER = (String) props.get("org.onap.dmaap.datarouter.db.driver");\r
+ DB_URL = (String) props.get("org.onap.dmaap.datarouter.db.url");\r
+ DB_LOGIN = (String) props.get("org.onap.dmaap.datarouter.db.login");\r
DB_PASSWORD = (String) props.get("org.onap.dmaap.datarouter.db.password");\r
HTTPS_PORT = (String) props.get("org.onap.dmaap.datarouter.provserver.https.port");\r
HTTP_PORT = (String) props.get("org.onap.dmaap.datarouter.provserver.http.port");\r
Class.forName(DB_DRIVER);\r
} catch (IOException e) {\r
- intlogger.fatal("PROV9003 Opening properties: "+e.getMessage());\r
+ intlogger.fatal("PROV9003 Opening properties: " + e.getMessage());\r
e.printStackTrace();\r
System.exit(1);\r
} catch (ClassNotFoundException e) {\r
- intlogger.fatal("PROV9004 cannot find the DB driver: "+e);\r
+ intlogger.fatal("PROV9004 cannot find the DB driver: " + e);\r
e.printStackTrace();\r
System.exit(1);\r
- } finally {\r
- try {\r
- inStream.close();\r
- } catch (IOException e) {\r
- }\r
}\r
}\r
}\r
*/\r
@SuppressWarnings("resource")\r
public Connection getConnection() throws SQLException {\r
- Connection c = null;\r
- while (c == null) {\r
+ Connection connection = null;\r
+ while (connection == null) {\r
synchronized (queue) {\r
try {\r
- c = queue.remove();\r
- } catch (NoSuchElementException e) {\r
+ connection = queue.remove();\r
+ } catch (NoSuchElementException nseEx) {\r
int n = 0;\r
do {\r
// Try up to 3 times to get a connection\r
try {\r
- c = DriverManager.getConnection(DB_URL, DB_LOGIN, DB_PASSWORD);\r
- } catch (SQLException e1) {\r
+ connection = DriverManager.getConnection(DB_URL, DB_LOGIN, DB_PASSWORD);\r
+ } catch (SQLException sqlEx) {\r
if (++n >= 3)\r
- throw e1;\r
+ throw sqlEx;\r
}\r
- } while (c == null);\r
+ } while (connection == null);\r
}\r
}\r
- if (c != null && !c.isValid(1)) {\r
- c.close();\r
- c = null;\r
+ if (connection != null && !connection.isValid(1)) {\r
+ connection.close();\r
+ connection = null;\r
}\r
}\r
- return c;\r
+ return connection;\r
}\r
/**\r
* Returns a JDBC connection to the pool.\r
- * @param c the Connection to return\r
- * @throws SQLException\r
+ * @param connection the Connection to return\r
*/\r
- public void release(Connection c) {\r
- if (c != null) {\r
+ public void release(Connection connection) {\r
+ if (connection != null) {\r
synchronized (queue) {\r
- if (!queue.contains(c))\r
- queue.add(c);\r
+ if (!queue.contains(connection))\r
+ queue.add(connection);\r
}\r
}\r
}\r
* @return true if all retrofits worked, false otherwise\r
*/\r
public boolean runRetroFits() {\r
- return retroFit1()\r
- && retroFit2()\r
- && retroFit3()\r
- && retroFit4()\r
- && retroFit5()\r
- && retroFit6()\r
- && retroFit7()\r
- && retroFit8()\r
- && retroFit9() //New retroFit call to add CREATED_DATE column Rally:US674199 - 1610\r
- && retroFit10() //New retroFit call to add BUSINESS_DESCRIPTION column Rally:US708102 - 1610\r
- && retroFit11() //New retroFit call for groups feature Rally:US708115 - 1610 \r
- ;\r
+ return retroFit1();\r
}\r
+\r
/**\r
- * Retrofit 1 - Make sure the expected tables are in MySQL and are initialized.\r
- * Uses mysql_init_0000 and mysql_init_0001 to setup the DB.\r
+ * Retrofit 1 - Make sure the expected tables are in DB and are initialized.\r
+ * Uses sql_init_01.sql to setup the DB.\r
* @return true if the retrofit worked, false otherwise\r
*/\r
private boolean retroFit1() {\r
- final String[] expected_tables = {\r
- "FEEDS", "FEED_ENDPOINT_ADDRS", "FEED_ENDPOINT_IDS", "PARAMETERS", "SUBSCRIPTIONS"\r
+ final String[] expectedTables = {\r
+ "FEEDS", "FEED_ENDPOINT_ADDRS", "FEED_ENDPOINT_IDS", "PARAMETERS",\r
+ "SUBSCRIPTIONS", "LOG_RECORDS", "INGRESS_ROUTES", "EGRESS_ROUTES",\r
+ "NETWORK_ROUTES", "NODESETS", "NODES", "GROUPS"\r
};\r
- Connection c = null;\r
+ Connection connection = null;\r
try {\r
- c = getConnection();\r
- Set<String> tables = getTableSet(c);\r
+ connection = getConnection();\r
+ Set<String> actualTables = getTableSet(connection);\r
boolean initialize = false;\r
- for (String s : expected_tables) {\r
- initialize |= !tables.contains(s);\r
+ for (String table : expectedTables) {\r
+ initialize |= !actualTables.contains(table);\r
}\r
if (initialize) {\r
intlogger.info("PROV9001: First time startup; The database is being initialized.");\r
- runInitScript(c, 0); // script 0 creates the provisioning tables\r
- runInitScript(c, 1); // script 1 initializes PARAMETERS\r
- }\r
- } catch (SQLException e) {\r
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());\r
- return false;\r
- } finally {\r
- if (c != null)\r
- release(c);\r
- }\r
- return true;\r
- }\r
- /**\r
- * Retrofit 2 - if the LOG_RECORDS table is missing, add it.\r
- * Uses mysql_init_0002 to create this table.\r
- * @return true if the retrofit worked, false otherwise\r
- */\r
- private boolean retroFit2() {\r
- Connection c = null;\r
- try {\r
- // If LOG_RECORDS table is missing, add it\r
- c = getConnection();\r
- Set<String> tables = getTableSet(c);\r
- if (!tables.contains("LOG_RECORDS")) {\r
- intlogger.info("PROV9002: Creating LOG_RECORDS table.");\r
- runInitScript(c, 2); // script 2 creates the LOG_RECORDS table\r
- }\r
- } catch (SQLException e) {\r
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());\r
- return false;\r
- } finally {\r
- if (c != null)\r
- release(c);\r
- }\r
- return true;\r
- }\r
- /**\r
- * Retrofit 3 - if the FEEDS_UNIQUEID table (from release 1.0.*) exists, drop it.\r
- * If SUBSCRIPTIONS.SUBID still has the auto_increment attribute, remove it.\r
- * @return true if the retrofit worked, false otherwise\r
- */\r
- @SuppressWarnings("resource")\r
- private boolean retroFit3() {\r
- Connection c = null;\r
- try {\r
- // if SUBSCRIPTIONS.SUBID still has auto_increment, remove it\r
- boolean doremove = false;\r
- c = getConnection();\r
- DatabaseMetaData md = c.getMetaData();\r
- ResultSet rs = md.getColumns("datarouter", "", "SUBSCRIPTIONS", "SUBID");\r
- if (rs != null) {\r
- while (rs.next()) {\r
- doremove = rs.getString("IS_AUTOINCREMENT").equals("YES");\r
- }\r
- rs.close();\r
- rs = null;\r
- }\r
- if (doremove) {\r
- intlogger.info("PROV9002: Modifying SUBSCRIPTIONS SUBID column to remove auto increment.");\r
- Statement s = c.createStatement();\r
- s.execute("ALTER TABLE SUBSCRIPTIONS MODIFY COLUMN SUBID INT UNSIGNED NOT NULL");\r
- s.close();\r
- }\r
-\r
- // Remove the FEEDS_UNIQUEID table, if it exists\r
- Set<String> tables = getTableSet(c);\r
- if (tables.contains("FEEDS_UNIQUEID")) {\r
- intlogger.info("PROV9002: Dropping FEEDS_UNIQUEID table.");\r
- Statement s = c.createStatement();\r
- s.execute("DROP TABLE FEEDS_UNIQUEID");\r
- s.close();\r
- }\r
- } catch (SQLException e) {\r
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());\r
- return false;\r
- } finally {\r
- if (c != null)\r
- release(c);\r
- }\r
- return true;\r
- }\r
- private long nextid = 0; // used for initial creation of LOG_RECORDS table.\r
- /**\r
- * Retrofit 4 - if old log tables exist (from release 1.0.*), copy them to LOG_RECORDS, then drop them.\r
- * @return true if the retrofit worked, false otherwise\r
- */\r
- @SuppressWarnings("resource")\r
- private boolean retroFit4() {\r
- Connection c = null;\r
- try {\r
- c = getConnection();\r
- Set<String> tables = getTableSet(c);\r
- if (tables.contains("PUBLISH_RECORDS")) {\r
- intlogger.info("PROV9002: Copying PUBLISH_RECORDS to LOG_RECORDS table.");\r
- copyLogTable("PUBLISH_RECORDS", PublishRecord.class);\r
- intlogger.info("PROV9002: Dropping PUBLISH_RECORDS table.");\r
- Statement s = c.createStatement();\r
- s.execute("DROP TABLE PUBLISH_RECORDS");\r
- s.close();\r
- }\r
- if (tables.contains("DELIVERY_RECORDS")) {\r
- intlogger.info("PROV9002: Copying DELIVERY_RECORDS to LOG_RECORDS table.");\r
- copyLogTable("DELIVERY_RECORDS", DeliveryRecord.class);\r
- intlogger.info("PROV9002: Dropping DELIVERY_RECORDS table.");\r
- Statement s = c.createStatement();\r
- s.execute("DROP TABLE DELIVERY_RECORDS");\r
- s.close();\r
- }\r
- if (tables.contains("EXPIRY_RECORDS")) {\r
- intlogger.info("PROV9002: Copying EXPIRY_RECORDS to LOG_RECORDS table.");\r
- copyLogTable("EXPIRY_RECORDS", ExpiryRecord.class);\r
- intlogger.info("PROV9002: Dropping EXPIRY_RECORDS table.");\r
- Statement s = c.createStatement();\r
- s.execute("DROP TABLE EXPIRY_RECORDS");\r
- s.close();\r
- }\r
- } catch (SQLException e) {\r
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());\r
- return false;\r
- } finally {\r
- if (c != null)\r
- release(c);\r
- }\r
- return true;\r
- }\r
- /**\r
- * Retrofit 5 - Create the new routing tables required for Release 2.\r
- * Adds a new "SUSPENDED" column to FEEDS and SUBSCRIPTIONS.\r
- * Modifies the LOG_RECORDS table to handle new R2 records.\r
- * @return true if the retrofit worked, false otherwise\r
- */\r
- @SuppressWarnings("resource")\r
- private boolean retroFit5() {\r
- final String[] expected_tables = {\r
- "INGRESS_ROUTES", "EGRESS_ROUTES", "NETWORK_ROUTES", "NODESETS", "NODES"\r
- };\r
- Connection c = null;\r
- try {\r
- // If expected tables are not present, then add new routing tables\r
- c = getConnection();\r
- Set<String> tables = getTableSet(c);\r
- boolean initialize = false;\r
- for (String s : expected_tables) {\r
- initialize |= !tables.contains(s);\r
- }\r
- if (initialize) {\r
- intlogger.info("PROV9002: Adding routing tables for Release 2.0.");\r
- runInitScript(c, 3); // script 3 creates the routing tables\r
- }\r
-\r
- // Add SUSPENDED column to FEEDS/SUBSCRIPTIONS\r
- DatabaseMetaData md = c.getMetaData();\r
- for (String tbl : new String[] {"FEEDS", "SUBSCRIPTIONS" }) {\r
- boolean add_col = true;\r
- ResultSet rs = md.getColumns("datarouter", "", tbl, "SUSPENDED");\r
- if (rs != null) {\r
- add_col = !rs.next();\r
- rs.close();\r
- rs = null;\r
- }\r
- if (add_col) {\r
- intlogger.info("PROV9002: Adding SUSPENDED column to "+tbl+" table.");\r
- Statement s = c.createStatement();\r
- s.execute("ALTER TABLE "+tbl+" ADD COLUMN SUSPENDED BOOLEAN DEFAULT FALSE");\r
- s.close();\r
- }\r
- }\r
-\r
- // Modify LOG_RECORDS for R2\r
- intlogger.info("PROV9002: Modifying LOG_RECORDS table.");\r
- Statement s = c.createStatement();\r
- s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN TYPE ENUM('pub', 'del', 'exp', 'pbf', 'dlx') NOT NULL");\r
- s.close();\r
- s = c.createStatement();\r
- s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN REASON ENUM('notRetryable', 'retriesExhausted', 'diskFull', 'other')");\r
- s.close();\r
- boolean add_col = true;\r
- ResultSet rs = md.getColumns("datarouter", "", "LOG_RECORDS", "CONTENT_LENGTH_2");\r
- if (rs != null) {\r
- add_col = !rs.next();\r
- rs.close();\r
- rs = null;\r
+ runInitScript(connection, 1);\r
}\r
- if (add_col) {\r
- intlogger.info("PROV9002: Fixing two columns in LOG_RECORDS table (this may take some time).");\r
- s = c.createStatement();\r
- s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN CONTENT_LENGTH BIGINT NOT NULL, ADD COLUMN CONTENT_LENGTH_2 BIGINT AFTER RECORD_ID");\r
- s.close();\r
- }\r
- } catch (SQLException e) {\r
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());\r
- return false;\r
- } finally {\r
- if (c != null)\r
- release(c);\r
- }\r
- return true;\r
- }\r
- /**\r
- * Retrofit 6 - Adjust LOG_RECORDS.USER to be 50 chars (MR #74).\r
- * @return true if the retrofit worked, false otherwise\r
- */\r
- @SuppressWarnings("resource")\r
- private boolean retroFit6() {\r
- Connection c = null;\r
- try {\r
- c = getConnection();\r
- // Modify LOG_RECORDS for R2\r
- intlogger.info("PROV9002: Modifying LOG_RECORDS.USER length.");\r
- Statement s = c.createStatement();\r
- s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN USER VARCHAR(50)");\r
- s.close();\r
- } catch (SQLException e) {\r
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());\r
- return false;\r
- } finally {\r
- if (c != null)\r
- release(c);\r
- }\r
- return true;\r
- }\r
- /**\r
- * Retrofit 7 - Adjust LOG_RECORDS.FEED_FILEID and LOG_RECORDS.DELIVERY_FILEID to be 256 chars.\r
- * @return true if the retrofit worked, false otherwise\r
- */\r
- @SuppressWarnings("resource")\r
- private boolean retroFit7() {\r
- Connection c = null;\r
- try {\r
- c = getConnection();\r
- // Modify LOG_RECORDS for long (>128) FILEIDs\r
- intlogger.info("PROV9002: Modifying LOG_RECORDS.USER length.");\r
- Statement s = c.createStatement();\r
- s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN FEED_FILEID VARCHAR(256), MODIFY COLUMN DELIVERY_FILEID VARCHAR(256)");\r
- s.close();\r
} catch (SQLException e) {\r
intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());\r
return false;\r
} finally {\r
- if (c != null)\r
- release(c);\r
+ if (connection != null)\r
+ release(connection);\r
}\r
return true;\r
}\r
- /**\r
- * Retrofit 8 - Adjust FEEDS.NAME to be 255 chars (MR #74).\r
- * @return true if the retrofit worked, false otherwise\r
- */\r
- @SuppressWarnings("resource")\r
- private boolean retroFit8() {\r
- Connection c = null;\r
- try {\r
- c = getConnection();\r
- intlogger.info("PROV9002: Modifying FEEDS.NAME length.");\r
- Statement s = c.createStatement();\r
- s.execute("ALTER TABLE FEEDS MODIFY COLUMN NAME VARCHAR(255)");\r
- s.close();\r
- } catch (SQLException e) {\r
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());\r
- return false;\r
- } finally {\r
- if (c != null)\r
- release(c);\r
- }\r
- return true;\r
- }\r
- \r
- /**\r
- * Retrofit 9 - Add column FEEDS.CREATED_DATE and SUBSCRIPTIONS.CREATED_DATE, 1610 release user story US674199.\r
- * @return true if the retrofit worked, false otherwise\r
- */\r
-\r
- @SuppressWarnings("resource") \r
- private boolean retroFit9() { \r
- Connection c = null; \r
- try { \r
- c = getConnection(); \r
- // Add CREATED_DATE column to FEEDS/SUBSCRIPTIONS tables\r
- DatabaseMetaData md = c.getMetaData(); \r
- for (String tbl : new String[] {"FEEDS", "SUBSCRIPTIONS" }) { \r
- boolean add_col = true; \r
- ResultSet rs = md.getColumns("datarouter", "", tbl, "CREATED_DATE"); \r
- if (rs != null) { \r
- add_col = !rs.next(); \r
- rs.close(); \r
- rs = null; \r
- } \r
- if (add_col) { \r
- intlogger.info("PROV9002: Adding CREATED_DATE column to "+tbl+" table."); \r
- Statement s = c.createStatement();\r
- s.execute("ALTER TABLE "+tbl+" ADD COLUMN CREATED_DATE timestamp DEFAULT CURRENT_TIMESTAMP"); \r
- s.close(); \r
- } \r
- } \r
- } catch (SQLException e) { \r
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage()); \r
- return false; \r
- } finally { \r
- if (c != null) \r
- release(c); \r
- } \r
- return true; \r
- }\r
-\r
- /**\r
- * Retrofit 10 -Adding business BUSINESS_DESCRIPTION to FEEDS table (Rally\r
- * US708102).\r
- * \r
- * @return true if the retrofit worked, false otherwise\r
- */\r
-\r
- @SuppressWarnings("resource")\r
- private boolean retroFit10() {\r
- Connection c = null;\r
- boolean addColumn = true;\r
- \r
- try {\r
-\r
- c = getConnection(); \r
- // Add BUSINESS_DESCRIPTION column to FEEDS table\r
- DatabaseMetaData md = c.getMetaData(); \r
- boolean add_col = true; \r
- ResultSet rs = md.getColumns("datarouter", "", "FEEDS", "BUSINESS_DESCRIPTION"); \r
- if (rs != null) { \r
- add_col = !rs.next(); \r
- rs.close(); \r
- rs = null; \r
- } \r
- if(add_col) {\r
- intlogger\r
- .info("PROV9002: Adding BUSINESS_DESCRIPTION column to FEEDS table.");\r
- Statement s = c.createStatement();\r
- s.execute("ALTER TABLE FEEDS ADD COLUMN BUSINESS_DESCRIPTION varchar(1000) DEFAULT NULL AFTER DESCRIPTION, MODIFY COLUMN DESCRIPTION VARCHAR(1000)");\r
- s.close();\r
- }\r
- }\r
- catch (SQLException e) {\r
- intlogger\r
- .fatal("PROV9000: The database credentials are not working: "\r
- + e.getMessage());\r
- return false;\r
- } finally {\r
- if (c != null)\r
- release(c);\r
- }\r
- return true;\r
- }\r
-\r
-\r
- /*New retroFit method is added for groups feature Rally:US708115 - 1610 \r
- * @retroFit11()\r
- * @parmas: none\r
- * @return - boolean if table and fields are created (Group table, group id in FEEDS, SUBSCRIPTION TABLES)\r
- */\r
- @SuppressWarnings("resource") \r
- private boolean retroFit11() { \r
- final String[] expected_tables = { \r
- "GROUPS" \r
- }; \r
- Connection c = null; \r
- \r
- try { \r
- // If expected tables are not present, then add new routing tables \r
- c = getConnection(); \r
- Set<String> tables = getTableSet(c); \r
- boolean initialize = false; \r
- for (String s : expected_tables) { \r
- initialize |= !tables.contains(s); \r
- } \r
- if (initialize) { \r
- intlogger.info("PROV9002: Adding GROUPS table for Release 1610."); \r
- runInitScript(c, 4); // script 4 creates the routing tables \r
- } \r
- \r
- // Add GROUPID column to FEEDS/SUBSCRIPTIONS \r
- DatabaseMetaData md = c.getMetaData(); \r
- for (String tbl : new String[] {"FEEDS", "SUBSCRIPTIONS" }) { \r
- boolean add_col = true; \r
- ResultSet rs = md.getColumns("datarouter", "", tbl, "GROUPID"); \r
- if (rs != null) { \r
- add_col = !rs.next(); \r
- rs.close(); \r
- rs = null; \r
- } \r
- if (add_col) { \r
- intlogger.info("PROV9002: Adding GROUPID column to "+tbl+" table."); \r
- Statement s = c.createStatement(); \r
- s.execute("ALTER TABLE "+tbl+" ADD COLUMN GROUPID INT(10) UNSIGNED NOT NULL DEFAULT 0 AFTER FEEDID"); \r
- s.close(); \r
- } \r
- } \r
- } catch (SQLException e) { \r
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage()); \r
- return false; \r
- } finally { \r
- if (c != null) \r
- release(c); \r
- } \r
- return true; \r
- }\r
-\r
-\r
- /**\r
- * Copy the log table <i>table_name</i> to LOG_RECORDS;\r
- * @param table_name the name of the old (1.0.*) table to copy\r
- * @param table_class the class used to instantiate a record from the table\r
- * @throws SQLException if there is a problem getting a MySQL connection\r
- */\r
- @SuppressWarnings("resource")\r
- private void copyLogTable(String table_name, Class<? extends Loadable> table_class) throws SQLException {\r
- long start = System.currentTimeMillis();\r
- int n = 0;\r
- Connection c1 = getConnection();\r
- Connection c2 = getConnection();\r
-\r
- try {\r
- Constructor<? extends Loadable> cnst = table_class.getConstructor(ResultSet.class);\r
- PreparedStatement ps = c2.prepareStatement(LogfileLoader.INSERT_SQL);\r
- Statement stmt = c1.createStatement();\r
- ResultSet rs = stmt.executeQuery("select * from "+table_name);\r
- while (rs.next()) {\r
- Loadable rec = cnst.newInstance(rs);\r
- rec.load(ps);\r
- ps.setLong(18, ++nextid);\r
- ps.executeUpdate();\r
- if ((++n % 10000) == 0)\r
- intlogger.debug(" "+n+" records done.");\r
- }\r
- stmt.close();\r
- ps.close();\r
- } catch (SQLException e) {\r
- e.printStackTrace();\r
- } catch (NoSuchMethodException e) {\r
- e.printStackTrace();\r
- } catch (SecurityException e) {\r
- e.printStackTrace();\r
- } catch (InstantiationException e) {\r
- e.printStackTrace();\r
- } catch (IllegalAccessException e) {\r
- e.printStackTrace();\r
- } catch (IllegalArgumentException e) {\r
- e.printStackTrace();\r
- } catch (InvocationTargetException e) {\r
- e.printStackTrace();\r
- }\r
-\r
- release(c1);\r
- release(c2);\r
- long x = (System.currentTimeMillis() - start);\r
- intlogger.debug(" "+n+" records done in "+x+" ms.");\r
- }\r
\r
/**\r
* Get a set of all table names in the DB.\r
- * @param c a DB connection\r
+ * @param connection a DB connection\r
* @return the set of table names\r
*/\r
- private Set<String> getTableSet(Connection c) {\r
+ private Set<String> getTableSet(Connection connection) {\r
Set<String> tables = new HashSet<String>();\r
try {\r
- DatabaseMetaData md = c.getMetaData();\r
+ DatabaseMetaData md = connection.getMetaData();\r
ResultSet rs = md.getTables("datarouter", "", "", null);\r
if (rs != null) {\r
while (rs.next()) {\r
/**\r
* Initialize the tables by running the initialization scripts located in the directory specified\r
* by the property <i>org.onap.dmaap.datarouter.provserver.dbscripts</i>. Scripts have names of\r
- * the form mysql_init_NNNN.\r
- * @param c a DB connection\r
- * @param n the number of the mysql_init_NNNN script to run\r
+ * the form sql_init_NN.sql\r
+ * @param connection a DB connection\r
+ * @param scriptId the number of the sql_init_NN.sql script to run\r
*/\r
- private void runInitScript(Connection c, int n) {\r
- String scriptdir = (String) props.get("org.onap.dmaap.datarouter.provserver.dbscripts");\r
+ private void runInitScript(Connection connection, int scriptId) {\r
+ String scriptDir = (String) props.get("org.onap.dmaap.datarouter.provserver.dbscripts");\r
StringBuilder sb = new StringBuilder();\r
try {\r
- String scriptfile = String.format("%s/mysql_init_%04d", scriptdir, n);\r
- if (!(new File(scriptfile)).exists())\r
+ String scriptFile = String.format("%s/sql_init_%02d.sql", scriptDir, scriptId);\r
+ if (!(new File(scriptFile)).exists())\r
return;\r
\r
- LineNumberReader in = new LineNumberReader(new FileReader(scriptfile));\r
+ LineNumberReader in = new LineNumberReader(new FileReader(scriptFile));\r
String line;\r
while ((line = in.readLine()) != null) {\r
if (!line.startsWith("--")) {\r
// Execute one DDL statement\r
String sql = sb.toString();\r
sb.setLength(0);\r
- Statement s = c.createStatement();\r
+ Statement s = connection.createStatement();\r
s.execute(sql);\r
s.close();\r
}\r
\r
/**\r
* This class provides methods that run in a separate thread, in order to process logfiles uploaded into the spooldir.\r
- * These logfiles are loaded into the MySQL LOG_RECORDS table. In a running provisioning server, there should only be\r
+ * These logfiles are loaded into the MariaDB LOG_RECORDS table. In a running provisioning server, there should only be\r
* two places where records can be loaded into this table; here, and in the method DB.retroFit4() which may be run at\r
* startup to load the old (1.0) style log tables into LOG_RECORDS;\r
* <p>This method maintains an {@link RLEBitSet} which can be used to easily see what records are presently in the\r
}\r
/**\r
* Run continuously to look for new logfiles in the spool directory and import them into the DB.\r
- * The spool is checked once per second. If free space on the MySQL filesystem falls below\r
+ * The spool is checked once per second. If free space on the MariaDB filesystem falls below\r
* REQUIRED_FREE_PCT (normally 20%) then the oldest logfile entries are removed and the LOG_RECORDS\r
* table is compacted until free space rises above the threshold.\r
*/\r
db.release(conn);\r
}\r
}\r
-// OLD CODE - commented here for historical purposes\r
-//\r
-// private boolean pruneRecordsOldAlgorithm() {\r
-// // Determine space available -- available space must be at least 20% under /opt/app/mysql\r
-// int pct = getFreePercentage();\r
-// boolean did1 = false;\r
-// while (pct < REQUIRED_FREE_PCT) {\r
-// logger.info("PROV8008: Free space is " + pct + "% - removing old log entries");\r
-// boolean didit = removeOldestEntries();\r
-// pct = didit ? getFreePercentage() : 100; // don't loop endlessly\r
-// did1 |= didit;\r
-// }\r
-// return did1;\r
-// }\r
-// private int getFreePercentage() {\r
-// FileSystem fs = (Paths.get("/opt/app/mysql")).getFileSystem();\r
-// long total = 0;\r
-// long avail = 0;\r
-// try {\r
-// for (FileStore store : fs.getFileStores()) {\r
-// total += store.getTotalSpace();\r
-// avail += store.getUsableSpace();\r
-// }\r
-// } catch (IOException e) {\r
-// }\r
-// try { fs.close(); } catch (Exception e) { }\r
-// return (int)((avail * 100) / total);\r
-// }\r
-// private boolean removeOldestEntries() {\r
-// // Remove the last days worth of entries\r
-// Connection conn = null;\r
-// try {\r
-// conn = db.getConnection();\r
-// Statement stmt = conn.createStatement();\r
-// ResultSet rs = stmt.executeQuery("select min(event_time) as MIN from LOG_RECORDS");\r
-// if (rs != null) {\r
-// if (rs.next()) {\r
-// // Compute the end of the first day of logs\r
-// long first = rs.getLong("MIN");\r
-// Calendar cal = new GregorianCalendar();\r
-// cal.setTime(new Date(first));\r
-// cal.add(Calendar.DAY_OF_YEAR, 1);\r
-// cal.set(Calendar.HOUR_OF_DAY, 0);\r
-// cal.set(Calendar.MINUTE, 0);\r
-// cal.set(Calendar.SECOND, 0);\r
-// cal.set(Calendar.MILLISECOND, 0);\r
-// if (!stmt.execute("delete from LOG_RECORDS where event_time < " + cal.getTimeInMillis())) {\r
-// int count = stmt.getUpdateCount();\r
-// logger.info("PROV0009: Removed "+count+" old log entries.");\r
-// stmt.execute("OPTIMIZE TABLE LOG_RECORDS");\r
-// }\r
-// rs.close();\r
-// stmt.close();\r
-// return true;\r
-// }\r
-// rs.close();\r
-// }\r
-// stmt.close();\r
-// } catch (SQLException e) {\r
-// System.err.println(e);\r
-// e.printStackTrace();\r
-// } finally {\r
-// db.release(conn);\r
-// }\r
-// return false;\r
-// }\r
+\r
@SuppressWarnings("resource")\r
private int[] process(File f) {\r
int ok = 0, total = 0;\r
# *\r
#-------------------------------------------------------------------------------\r
version: '2.1'\r
-services: \r
+services:\r
datarouter-prov:\r
- image: onap/dmaap/datarouter-prov\r
+ image: nexus3.onap.org:10003/onap/dmaap/datarouter-prov\r
container_name: datarouter-prov\r
hostname: prov.datarouternew.com\r
ports:\r
- "8443:8443"\r
- "8080:8080" \r
-# volumes:\r
-# - ./prov_data/proserver.properties:/opt/app/datartr/etc/proserver.properties\r
+ volumes:\r
+ - ./prov_data/proserver.properties:/opt/app/datartr/etc/proserver.properties\r
# - ./prov_data/datarouter-prov-jar-with-dependencies.jar:/opt/app/datartr/lib/datarouter-prov-jar-with-dependencies.jar\r
-# - ./prov_data/addSubscriber.txt:/opt/app/datartr/addSubscriber.txt\r
-# - ./prov_data/addFeed3.txt:/opt/app/datartr/addFeed3.txt\r
- entrypoint: ["bash", "-c", "sleep 10; /bin/sh -c ./startup.sh"]\r
+ - ./prov_data/addSubscriber.txt:/opt/app/datartr/addSubscriber.txt\r
+ - ./prov_data/addFeed3.txt:/opt/app/datartr/addFeed3.txt\r
+ entrypoint: ["bash", "-c", "./startup.sh"]\r
depends_on:\r
- mysql_container:\r
+ mariadb_container:\r
condition: service_healthy\r
+ healthcheck:\r
+ test: ["CMD", "curl", "-f", "http://prov.datarouternew.com:8080/internal/prov"]\r
+ interval: 10s\r
+ timeout: 10s\r
+ retries: 5\r
extra_hosts:\r
- - "node.datarouternew.com:172.18.0.4"\r
- \r
+ - "node.datarouternew.com:172.100.0.4"\r
+ networks:\r
+ testing_net:\r
+ ipv4_address: 172.100.0.3\r
+\r
datarouter-node:\r
- image: onap/dmaap/datarouter-node\r
+ image: nexus3.onap.org:10003/onap/dmaap/datarouter-node\r
container_name: datarouter-node\r
hostname: node.datarouternew.com\r
ports:\r
- "9443:8443"\r
- "9090:8080"\r
-# volumes:\r
-# - ./node_data/node.properties:/opt/app/datartr/etc/node.properties\r
- entrypoint: ["bash", "-c", "sleep 15; /bin/sh -c ./startup.sh"] \r
+ volumes:\r
+ - ./node_data/node.properties:/opt/app/datartr/etc/node.properties\r
+ entrypoint: ["bash", "-c", "./startup.sh"]\r
depends_on:\r
- - datarouter-prov\r
+ datarouter-prov:\r
+ condition: service_healthy\r
extra_hosts:\r
- - "prov.datarouternew.com:172.18.0.3"\r
+ - "prov.datarouternew.com:172.100.0.3"\r
+ networks:\r
+ testing_net:\r
+ ipv4_address: 172.100.0.4\r
\r
- mysql_container:\r
- image: mysql/mysql-server:5.6\r
- container_name: mysql\r
+ mariadb_container:\r
+ image: mariadb:10.2.14\r
+ container_name: mariadb\r
ports:\r
- "3306:3306"\r
environment:\r
- MYSQL_ROOT_PASSWORD: att2017\r
- volumes:\r
- - ./database:/tmp/database\r
- - ./database:/docker-entrypoint-initdb.d\r
- \r
+ MYSQL_ROOT_PASSWORD: datarouter\r
+ MYSQL_DATABASE: datarouter\r
+ MYSQL_USER: datarouter\r
+ MYSQL_PASSWORD: datarouter\r
+ healthcheck:\r
+ test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"]\r
+ interval: 10s\r
+ timeout: 10s\r
+ retries: 5\r
+\r
+ networks:\r
+ testing_net:\r
+ ipv4_address: 172.100.0.2\r
+\r
+networks:\r
+ testing_net:\r
+ driver: bridge\r
+ ipam:\r
+ driver: default\r
+ config:\r
+ - subnet: 172.100.0.0/16\r
"authorization": {\r
"classification": "unclassified",\r
"endpoint_addrs": [\r
- "172.18.0.3",\r
+ "172.100.0.3",\r
],\r
"endpoint_ids": [\r
{\r
"delivery" : \r
\r
{ \r
- "url" : "http://172.18.0.3:7070/", \r
- "user" : "LOGIN", \r
- "password" : "PASSWORD", \r
+ "url" : "http://172.100.0.3:7070/",\r
+ "user" : "datarouter",\r
+ "password" : "datarouter",\r
"use100" : true \r
},\r
"metadataOnly" : false, \r
org.onap.dmaap.datarouter.provserver.truststore.password = changeit\r
org.onap.dmaap.datarouter.provserver.accesslog.dir = /opt/app/datartr/logs\r
org.onap.dmaap.datarouter.provserver.spooldir = /opt/app/datartr/spool\r
-#org.onap.dmaap.datarouter.provserver.dbscripts = /home/eby/dr2/cvs/datarouter/prov/misc/\r
+org.onap.dmaap.datarouter.provserver.dbscripts = /opt/app/datartr/etc/misc\r
org.onap.dmaap.datarouter.provserver.logretention = 30\r
\r
# Database access\r
-org.onap.dmaap.datarouter.db.driver = com.mysql.jdbc.Driver\r
-org.onap.dmaap.datarouter.db.url = jdbc:mysql://172.18.0.2:3306/datarouter\r
+org.onap.dmaap.datarouter.db.driver = org.mariadb.jdbc.Driver\r
+org.onap.dmaap.datarouter.db.url = jdbc:mariadb://172.100.0.2:3306/datarouter\r
org.onap.dmaap.datarouter.db.login = datarouter\r
org.onap.dmaap.datarouter.db.password = datarouter\r
# fi
if [ "`pgrep -u mysql mysqld`" = "" ]
then
- echo MySQL is not running. It must be started before drtrprov
+ echo MariaDB is not running. It must be started before drtrprov
exit 0
fi
PIDS=`pids`
+++ /dev/null
-create database datarouter;
-
-use datarouter;
-
-CREATE TABLE FEEDS (
- FEEDID INT UNSIGNED NOT NULL PRIMARY KEY,
- NAME VARCHAR(20) NOT NULL,
- VERSION VARCHAR(20) NOT NULL,
- DESCRIPTION VARCHAR(256),
- AUTH_CLASS VARCHAR(32) NOT NULL,
- PUBLISHER VARCHAR(8) NOT NULL,
- SELF_LINK VARCHAR(256),
- PUBLISH_LINK VARCHAR(256),
- SUBSCRIBE_LINK VARCHAR(256),
- LOG_LINK VARCHAR(256),
- DELETED BOOLEAN DEFAULT FALSE,
- LAST_MOD TIMESTAMP DEFAULT CURRENT_TIMESTAMP
-);
-
-CREATE TABLE FEED_ENDPOINT_IDS (
- FEEDID INT UNSIGNED NOT NULL,
- USERID VARCHAR(20) NOT NULL,
- PASSWORD VARCHAR(32) NOT NULL
-);
-
-CREATE TABLE FEED_ENDPOINT_ADDRS (
- FEEDID INT UNSIGNED NOT NULL,
- ADDR VARCHAR(44) NOT NULL
-);
-
-CREATE TABLE SUBSCRIPTIONS (
- SUBID INT UNSIGNED NOT NULL PRIMARY KEY,
- FEEDID INT UNSIGNED NOT NULL,
- DELIVERY_URL VARCHAR(256),
- DELIVERY_USER VARCHAR(20),
- DELIVERY_PASSWORD VARCHAR(32),
- DELIVERY_USE100 BOOLEAN DEFAULT FALSE,
- METADATA_ONLY BOOLEAN DEFAULT FALSE,
- SUBSCRIBER VARCHAR(8) NOT NULL,
- SELF_LINK VARCHAR(256),
- LOG_LINK VARCHAR(256),
- LAST_MOD TIMESTAMP DEFAULT CURRENT_TIMESTAMP
-);
-
-CREATE TABLE PARAMETERS (
- KEYNAME VARCHAR(32) NOT NULL PRIMARY KEY,
- VALUE VARCHAR(4096) NOT NULL
-);
-
-CREATE TABLE LOG_RECORDS (
- TYPE ENUM('pub', 'del', 'exp') NOT NULL,
- EVENT_TIME BIGINT NOT NULL, /* time of the publish request */
- PUBLISH_ID VARCHAR(64) NOT NULL, /* unique ID assigned to this publish attempt */
- FEEDID INT UNSIGNED NOT NULL, /* pointer to feed in FEEDS */
- REQURI VARCHAR(256) NOT NULL, /* request URI */
- METHOD ENUM('DELETE', 'GET', 'HEAD', 'OPTIONS', 'PUT', 'POST', 'TRACE') NOT NULL, /* HTTP method */
- CONTENT_TYPE VARCHAR(256) NOT NULL, /* content type of published file */
- CONTENT_LENGTH BIGINT UNSIGNED NOT NULL, /* content length of published file */
-
- FEED_FILEID VARCHAR(128), /* file ID of published file */
- REMOTE_ADDR VARCHAR(40), /* IP address of publishing endpoint */
- USER VARCHAR(20), /* user name of publishing endpoint */
- STATUS SMALLINT, /* status code returned to delivering agent */
-
- DELIVERY_SUBID INT UNSIGNED, /* pointer to subscription in SUBSCRIPTIONS */
- DELIVERY_FILEID VARCHAR(128), /* file ID of file being delivered */
- RESULT SMALLINT, /* result received from subscribing agent */
-
- ATTEMPTS INT, /* deliveries attempted */
- REASON ENUM('notRetryable', 'retriesExhausted'),
-
- RECORD_ID BIGINT UNSIGNED NOT NULL PRIMARY KEY, /* unique ID for this record */
-
- INDEX (FEEDID) USING BTREE,
- INDEX (DELIVERY_SUBID) USING BTREE,
- INDEX (RECORD_ID) USING BTREE
-) ENGINE = MyISAM;
-
-CREATE TABLE INGRESS_ROUTES (
- SEQUENCE INT UNSIGNED NOT NULL,
- FEEDID INT UNSIGNED NOT NULL,
- USERID VARCHAR(20),
- SUBNET VARCHAR(44),
- NODESET INT UNSIGNED NOT NULL
-);
-
-CREATE TABLE EGRESS_ROUTES (
- SUBID INT UNSIGNED NOT NULL PRIMARY KEY,
- NODEID INT UNSIGNED NOT NULL
-);
-
-CREATE TABLE NETWORK_ROUTES (
- FROMNODE INT UNSIGNED NOT NULL,
- TONODE INT UNSIGNED NOT NULL,
- VIANODE INT UNSIGNED NOT NULL
-);
-
-CREATE TABLE NODESETS (
- SETID INT UNSIGNED NOT NULL,
- NODEID INT UNSIGNED NOT NULL
-);
-
-CREATE TABLE NODES (
- NODEID INT UNSIGNED NOT NULL PRIMARY KEY,
- NAME VARCHAR(255) NOT NULL,
- ACTIVE BOOLEAN DEFAULT TRUE
-);
-
-CREATE TABLE GROUPS (
- GROUPID INT UNSIGNED NOT NULL PRIMARY KEY,
- AUTHID VARCHAR(100) NOT NULL,
- NAME VARCHAR(50) NOT NULL,
- DESCRIPTION VARCHAR(255),
- CLASSIFICATION VARCHAR(20) NOT NULL,
- MEMBERS TINYTEXT,
- LAST_MOD TIMESTAMP DEFAULT CURRENT_TIMESTAMP
-);
-
--- 'PROV_AUTH_ADDRESSES', '192.168.56.1' ipv4 address of provision server
-INSERT INTO PARAMETERS VALUES
- ('ACTIVE_POD', '127.0.0.1'),
- ('PROV_ACTIVE_NAME', '${PROV_ACTIVE_NAME}'),
- ('STANDBY_POD', '${DRTR_PROV_STANDBYPOD}'),
- ('PROV_NAME', 'ALCDTL47TJ6015:6080'),
- ('NODES', '127.0.0.1:8080'),
- ('PROV_DOMAIN', '127.0.0.1'),
- ('DELIVERY_INIT_RETRY_INTERVAL', '10'),
- ('DELIVERY_MAX_AGE', '86400'),
- ('DELIVERY_MAX_RETRY_INTERVAL', '3600'),
- ('DELIVERY_RETRY_RATIO', '2'),
- ('LOGROLL_INTERVAL', '300'),
- ('PROV_AUTH_ADDRESSES', '192.168.56.1'),
- ('PROV_AUTH_SUBJECTS', ''),
- ('PROV_MAXFEED_COUNT', '10000'),
- ('PROV_MAXSUB_COUNT', '100000'),
- ('PROV_REQUIRE_CERT', 'false'),
- ('PROV_REQUIRE_SECURE', 'false'),
- ('_INT_VALUES', 'LOGROLL_INTERVAL|PROV_MAXFEED_COUNT|PROV_MAXSUB_COUNT|DELIVERY_INIT_RETRY_INTERVAL|DELIVERY_MAX_RETRY_INTERVAL|DELIVERY_RETRY_RATIO|DELIVERY_MAX_AGE')
- ;
\ No newline at end of file
file is specified.
DRTR_PROV_DBLOGIN (default datarouter)
- The login used to access MySQL
+ The login used to access MariaDB
DRTR_PROV_DBPASS (default datarouter)
- The password used to access MySQL
+ The password used to access MariaDB
DRTR_PROV_DBSCRIPTS (default /opt/app/datartr/etc)
The directory containing DB initialization scripts
fi
if [ "`pgrep -u mysql mysqld`" = "" ]
then
- echo MySQL is not running. It must be started before runreports
+ echo MariaDB is not running. It must be started before runreports
exit 1
fi
-CREATE DATABASE IF NOT EXISTS datarouter;
-
-CREATE USER 'datarouter'@'%' IDENTIFIED BY 'datarouter';
-
-GRANT ALL PRIVILEGES ON * . * TO 'datarouter'@'%';
-
use datarouter;
CREATE TABLE FEEDS (
FEEDID INT UNSIGNED NOT NULL PRIMARY KEY,
- NAME VARCHAR(20) NOT NULL,
+ GROUPID INT(10) UNSIGNED NOT NULL DEFAULT 0,
+ NAME VARCHAR(255) NOT NULL,
VERSION VARCHAR(20) NOT NULL,
- DESCRIPTION VARCHAR(256),
+ DESCRIPTION VARCHAR(1000),
+ BUSINESS_DESCRIPTION VARCHAR(1000) DEFAULT NULL,
AUTH_CLASS VARCHAR(32) NOT NULL,
PUBLISHER VARCHAR(8) NOT NULL,
SELF_LINK VARCHAR(256),
SUBSCRIBE_LINK VARCHAR(256),
LOG_LINK VARCHAR(256),
DELETED BOOLEAN DEFAULT FALSE,
- LAST_MOD TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+ LAST_MOD TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ SUSPENDED BOOLEAN DEFAULT FALSE,
+ CREATED_DATE TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE FEED_ENDPOINT_IDS (
CREATE TABLE SUBSCRIPTIONS (
SUBID INT UNSIGNED NOT NULL PRIMARY KEY,
FEEDID INT UNSIGNED NOT NULL,
+ GROUPID INT(10) UNSIGNED NOT NULL DEFAULT 0,
DELIVERY_URL VARCHAR(256),
DELIVERY_USER VARCHAR(20),
DELIVERY_PASSWORD VARCHAR(32),
SUBSCRIBER VARCHAR(8) NOT NULL,
SELF_LINK VARCHAR(256),
LOG_LINK VARCHAR(256),
- LAST_MOD TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+ LAST_MOD TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ SUSPENDED BOOLEAN DEFAULT FALSE,
+ CREATED_DATE TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+
);
CREATE TABLE PARAMETERS (
);
CREATE TABLE LOG_RECORDS (
- TYPE ENUM('pub', 'del', 'exp') NOT NULL,
+ TYPE ENUM('pub', 'del', 'exp', 'pbf', 'dlx') NOT NULL,
EVENT_TIME BIGINT NOT NULL, /* time of the publish request */
PUBLISH_ID VARCHAR(64) NOT NULL, /* unique ID assigned to this publish attempt */
FEEDID INT UNSIGNED NOT NULL, /* pointer to feed in FEEDS */
REQURI VARCHAR(256) NOT NULL, /* request URI */
METHOD ENUM('DELETE', 'GET', 'HEAD', 'OPTIONS', 'PUT', 'POST', 'TRACE') NOT NULL, /* HTTP method */
CONTENT_TYPE VARCHAR(256) NOT NULL, /* content type of published file */
- CONTENT_LENGTH BIGINT UNSIGNED NOT NULL, /* content length of published file */
+ CONTENT_LENGTH BIGINT NOT NULL, /* content length of published file */
- FEED_FILEID VARCHAR(128), /* file ID of published file */
- REMOTE_ADDR VARCHAR(40), /* IP address of publishing endpoint */
- USER VARCHAR(20), /* user name of publishing endpoint */
- STATUS SMALLINT, /* status code returned to delivering agent */
+ FEED_FILEID VARCHAR(256), /* file ID of published file */
+ REMOTE_ADDR VARCHAR(40), /* IP address of publishing endpoint */
+ USER VARCHAR(50), /* user name of publishing endpoint */
+ STATUS SMALLINT, /* status code returned to delivering agent */
- DELIVERY_SUBID INT UNSIGNED, /* pointer to subscription in SUBSCRIPTIONS */
- DELIVERY_FILEID VARCHAR(128), /* file ID of file being delivered */
- RESULT SMALLINT, /* result received from subscribing agent */
+ DELIVERY_SUBID INT UNSIGNED, /* pointer to subscription in SUBSCRIPTIONS */
+ DELIVERY_FILEID VARCHAR(256), /* file ID of file being delivered */
+ RESULT SMALLINT, /* result received from subscribing agent */
- ATTEMPTS INT, /* deliveries attempted */
- REASON ENUM('notRetryable', 'retriesExhausted'),
+ ATTEMPTS INT, /* deliveries attempted */
+ REASON ENUM('notRetryable', 'retriesExhausted', 'diskFull', 'other'),
RECORD_ID BIGINT UNSIGNED NOT NULL PRIMARY KEY, /* unique ID for this record */
+ CONTENT_LENGTH_2 BIGINT,
INDEX (FEEDID) USING BTREE,
INDEX (DELIVERY_SUBID) USING BTREE,
LAST_MOD TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
--- 'PROV_AUTH_ADDRESSES', '192.168.56.1' ipv4 address of provision server
INSERT INTO PARAMETERS VALUES
- ('ACTIVE_POD', 'prov.datarouternew.com'),
- ('PROV_ACTIVE_NAME', 'prov.datarouternew.com'),
- ('STANDBY_POD', ''),
- ('PROV_NAME', 'prov.datarouternew.com'),
- ('NODES', 'node.datarouternew.com'),
- ('PROV_DOMAIN', 'datarouternew.com'),
- ('DELIVERY_INIT_RETRY_INTERVAL', '10'),
- ('DELIVERY_MAX_AGE', '86400'),
- ('DELIVERY_MAX_RETRY_INTERVAL', '3600'),
- ('DELIVERY_RETRY_RATIO', '2'),
- ('LOGROLL_INTERVAL', '300'),
- ('PROV_AUTH_ADDRESSES', 'prov.datarouternew.com'),
- ('PROV_AUTH_SUBJECTS', ''),
- ('PROV_MAXFEED_COUNT', '10000'),
- ('PROV_MAXSUB_COUNT', '100000'),
- ('PROV_REQUIRE_CERT', 'false'),
- ('PROV_REQUIRE_SECURE', 'false'),
- ('_INT_VALUES', 'LOGROLL_INTERVAL|PROV_MAXFEED_COUNT|PROV_MAXSUB_COUNT|DELIVERY_INIT_RETRY_INTERVAL|DELIVERY_MAX_RETRY_INTERVAL|DELIVERY_RETRY_RATIO|DELIVERY_MAX_AGE')
- ;
\ No newline at end of file
+ ('ACTIVE_POD', 'prov.datarouternew.com'),
+ ('PROV_ACTIVE_NAME', 'prov.datarouternew.com'),
+ ('STANDBY_POD', ''),
+ ('PROV_NAME', 'prov.datarouternew.com'),
+ ('NODES', '172.100.0.1|node.datarouternew.com'),
+ ('PROV_DOMAIN', 'datarouternew.com'),
+ ('DELIVERY_INIT_RETRY_INTERVAL', '10'),
+ ('DELIVERY_MAX_AGE', '86400'),
+ ('DELIVERY_MAX_RETRY_INTERVAL', '3600'),
+ ('DELIVERY_RETRY_RATIO', '2'),
+ ('LOGROLL_INTERVAL', '300'),
+ ('PROV_AUTH_ADDRESSES', '172.100.0.1|prov.datarouternew.com|node.datarouternew.com'),
+ ('PROV_AUTH_SUBJECTS', ''),
+ ('PROV_MAXFEED_COUNT', '10000'),
+ ('PROV_MAXSUB_COUNT', '100000'),
+ ('PROV_REQUIRE_CERT', 'false'),
+ ('PROV_REQUIRE_SECURE', 'false'),
+ ('_INT_VALUES', 'LOGROLL_INTERVAL|PROV_MAXFEED_COUNT|PROV_MAXSUB_COUNT|DELIVERY_INIT_RETRY_INTERVAL|DELIVERY_MAX_RETRY_INTERVAL|DELIVERY_RETRY_RATIO|DELIVERY_MAX_AGE')
+ ;
+
+INSERT INTO FEED_ENDPOINT_ADDRS VALUES
+ (1, '172.100.0.1');
\ No newline at end of file
org.onap.dmaap.datarouter.provserver.truststore.password = changeit\r
org.onap.dmaap.datarouter.provserver.accesslog.dir = /opt/app/datartr/logs\r
org.onap.dmaap.datarouter.provserver.spooldir = /opt/app/datartr/spool\r
-#org.onap.dmaap.datarouter.provserver.dbscripts = /home/eby/dr2/cvs/datarouter/prov/misc/\r
+org.onap.dmaap.datarouter.provserver.dbscripts = /opt/app/datartr/etc/misc\r
org.onap.dmaap.datarouter.provserver.logretention = 30\r
\r
# Database access\r
-org.onap.dmaap.datarouter.db.driver = com.mysql.jdbc.Driver\r
-org.onap.dmaap.datarouter.db.url = jdbc:mysql://172.18.0.2:3306/datarouter\r
+org.onap.dmaap.datarouter.db.driver = org.mariadb.jdbc.Driver\r
+org.onap.dmaap.datarouter.db.url = jdbc:mariadb://172.100.0.2:3306/datarouter\r
org.onap.dmaap.datarouter.db.login = datarouter\r
org.onap.dmaap.datarouter.db.password = datarouter\r
import java.security.KeyStore;
import java.util.Properties;
+import org.apache.commons.io.FileUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.StatusLine;
public class testBase {
/** The properties file to read the DB properties from */
- public static final String CONFIG_FILE = "tests.properties";
+ public static final String CONFIG_FILE = "integration_test.properties";
public Properties props;
protected AbstractHttpClient httpclient;
// shut down the connection manager to ensure
// immediate deallocation of all system resources
httpclient.getConnectionManager().shutdown();
+ FileUtils.deleteDirectory(new File("./unit-test-logs"));
}
protected void ckResponse(HttpResponse response, int expect) {
--- /dev/null
+test.keystore=self_signed/keystore.jks
+test.kspassword=changeit
+test.truststore=self_signed/cacerts.jks
+test.tspassword=changeit
+test.host=https://prov.datarouternew.com:8443
\ No newline at end of file
--- /dev/null
+#-------------------------------------------------------------------------------
+# ============LICENSE_START==================================================
+# * org.onap.dmaap
+# * ===========================================================================
+# * Copyright � 2017 AT&T Intellectual Property. All rights reserved.
+# * ===========================================================================
+# * Licensed under the Apache License, Version 2.0 (the "License");
+# * you may not use this file except in compliance with the License.
+# * You may obtain a copy of the License at
+# *
+# * http://www.apache.org/licenses/LICENSE-2.0
+# *
+# * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# * ============LICENSE_END====================================================
+# *
+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+# *
+#-------------------------------------------------------------------------------
+
+
+log4j.rootLogger=debug, eventlog, intlog, pelog
+
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d %5p [%t] - %m%n
+
+#
+# Logger used for provisioning events
+#
+log4j.logger.org.onap.dmaap.datarouter.provisioning.events=debug, eventlog
+log4j.additivity.org.onap.dmaap.datarouter.provisioning.events=false
+
+log4j.appender.eventlog=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.eventlog.file=./unit-test-logs/provevent.log
+log4j.appender.eventlog.datePattern='.'yyyyMMdd
+log4j.appender.eventlog.append=true
+log4j.appender.eventlog.layout=org.apache.log4j.PatternLayout
+log4j.appender.eventlog.layout.ConversionPattern=%d %-5p [%t] - %m%n
+
+#
+# Logger used for internal provisioning server events
+#
+log4j.logger.org.onap.dmaap.datarouter.provisioning.internal=debug, intlog
+log4j.additivity.org.onap.dmaap.datarouter.provisioning.internal=false
+
+log4j.appender.intlog=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.intlog.file=./unit-test-logs/provint.log
+log4j.appender.intlog.datePattern='.'yyyyMMdd
+log4j.appender.intlog.append=true
+log4j.appender.intlog.layout=org.apache.log4j.PatternLayout
+log4j.appender.intlog.layout.ConversionPattern=%d %-5p [%t] - %m%n
+
+#
+# Logger used for policy engine
+#
+log4j.logger.org.onap.dmaap.datarouter.authz.impl.ProvAuthorizer=debug, pelog
+log4j.additivity.org.onap.dmaap.datarouter.authz.impl.ProvAuthorizer=false
+
+log4j.appender.pelog=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.pelog.file=./unit-test-logs/policyengine.log
+log4j.appender.pelog.datePattern='.'yyyyMMdd
+log4j.appender.pelog.append=true
+log4j.appender.pelog.layout=org.apache.log4j.PatternLayout
+log4j.appender.pelog.layout.ConversionPattern=%d %-5p [%t] - %m%n