Update project structure to org.onap.aaf
[aaf/authz.git] / authz-cass / src / main / java / org / onap / aaf / dao / aaf / cass / HistoryDAO.java
diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/HistoryDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/HistoryDAO.java
new file mode 100644 (file)
index 0000000..e72c774
--- /dev/null
@@ -0,0 +1,237 @@
+/*******************************************************************************\r
+ * ============LICENSE_START====================================================\r
+ * * org.onap.aaf\r
+ * * ===========================================================================\r
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
+ * * ===========================================================================\r
+ * * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * * you may not use this file except in compliance with the License.\r
+ * * You may obtain a copy of the License at\r
+ * * \r
+ *  *      http://www.apache.org/licenses/LICENSE-2.0\r
+ * * \r
+ *  * Unless required by applicable law or agreed to in writing, software\r
+ * * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * * See the License for the specific language governing permissions and\r
+ * * limitations under the License.\r
+ * * ============LICENSE_END====================================================\r
+ * *\r
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
+ * *\r
+ ******************************************************************************/\r
+package org.onap.aaf.dao.aaf.cass;\r
+\r
+import java.nio.ByteBuffer;\r
+import java.text.SimpleDateFormat;\r
+import java.util.Date;\r
+import java.util.List;\r
+import java.util.UUID;\r
+\r
+import org.onap.aaf.authz.env.AuthzTrans;\r
+import org.onap.aaf.authz.layer.Result;\r
+import org.onap.aaf.dao.AbsCassDAO;\r
+import org.onap.aaf.dao.CassDAOImpl;\r
+import org.onap.aaf.dao.Loader;\r
+\r
+import com.datastax.driver.core.Cluster;\r
+import com.datastax.driver.core.ConsistencyLevel;\r
+import com.datastax.driver.core.ResultSet;\r
+import com.datastax.driver.core.Row;\r
+\r
+/**\r
+ * History\r
+ * \r
+ * \r
+ * History is a special case, because we don't want Updates or Deletes...  Too likely to mess up history.\r
+ * \r
+ * 9-9-2013 - Found a problem with using "Prepare".  You cannot prepare anything with a "now()" in it, as\r
+ * it is evaluated once during the prepare, and kept.  That renders any use of "now()" pointless.  Therefore\r
+ * the Create function needs to be run fresh everytime.\r
+ * \r
+ * Fixed in Cassandra 1.2.6 https://issues.apache.org/jira/browse/CASSANDRA-5616\r
+ *\r
+ */\r
+public class HistoryDAO extends CassDAOImpl<AuthzTrans, HistoryDAO.Data> {\r
+       private static final String TABLE = "history";\r
+\r
+       public static final SimpleDateFormat monthFormat = new SimpleDateFormat("yyyyMM");\r
+//     private static final SimpleDateFormat dayTimeFormat = new SimpleDateFormat("ddHHmmss");\r
+\r
+       private String[] helpers;\r
+\r
+       private HistLoader defLoader;\r
+\r
+       private AbsCassDAO<AuthzTrans, Data>.PSInfo readByUser, readBySubject, readByYRMN;\r
+\r
+       public HistoryDAO(AuthzTrans trans, Cluster cluster, String keyspace) {\r
+               super(trans, HistoryDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);\r
+               init(trans);\r
+       }\r
+\r
+       public HistoryDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {\r
+               super(trans, HistoryDAO.class.getSimpleName(),aDao,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);\r
+               init(trans);\r
+       }\r
+\r
+\r
+       private static final int KEYLIMIT = 1;\r
+       public static class Data {\r
+               public UUID id;\r
+               public int      yr_mon;\r
+               public String user;\r
+               public String action;\r
+               public String target;\r
+               public String subject;\r
+               public String  memo;\r
+//             Map<String, String>  detail = null;\r
+//             public Map<String, String>  detail() {\r
+//                     if(detail == null) {\r
+//                             detail = new HashMap<String, String>();\r
+//                     }\r
+//                     return detail;\r
+//             }\r
+               public ByteBuffer reconstruct;\r
+       }\r
+       \r
+       private static class HistLoader extends Loader<Data> {\r
+               public HistLoader(int keylimit) {\r
+                       super(keylimit);\r
+               }\r
+\r
+               @Override\r
+               public Data load(Data data, Row row) {\r
+                       data.id = row.getUUID(0);\r
+                       data.yr_mon = row.getInt(1);\r
+                       data.user = row.getString(2);\r
+                       data.action = row.getString(3);\r
+                       data.target = row.getString(4);\r
+                       data.subject = row.getString(5);\r
+                       data.memo = row.getString(6);\r
+//                     data.detail = row.getMap(6, String.class, String.class);\r
+                       data.reconstruct = row.getBytes(7);\r
+                       return data;\r
+               }\r
+\r
+               @Override\r
+               protected void key(Data data, int idx, Object[] obj) {\r
+                       obj[idx]=data.id;\r
+               }\r
+\r
+               @Override\r
+               protected void body(Data data, int _idx, Object[] obj) {\r
+                       int idx = _idx;\r
+                       obj[idx]=data.yr_mon;\r
+                       obj[++idx]=data.user;\r
+                       obj[++idx]=data.action;\r
+                       obj[++idx]=data.target;\r
+                       obj[++idx]=data.subject;\r
+                       obj[++idx]=data.memo;\r
+//                     obj[++idx]=data.detail;\r
+                       obj[++idx]=data.reconstruct;            \r
+               }\r
+       };\r
+       \r
+       private void init(AuthzTrans trans) {\r
+               // Loader must match fields order\r
+               defLoader = new HistLoader(KEYLIMIT);\r
+               helpers = setCRUD(trans, TABLE, Data.class, defLoader);\r
+\r
+               // Need a specialty Creator to handle the "now()"\r
+               // 9/9/2013 - jg - Just great... now() is evaluated once on Client side, invalidating usage (what point is a now() from a long time in the past?\r
+               // Unless this is fixed, we're putting in non-prepared statement\r
+               // Solved in Cassandra.  Make sure you are running 1.2.6 Cassandra or later. https://issues.apache.org/jira/browse/CASSANDRA-5616       \r
+               replace(CRUD.create, new PSInfo(trans, "INSERT INTO history (" +  helpers[FIELD_COMMAS] +\r
+                                       ") VALUES(now(),?,?,?,?,?,?,?)", \r
+                                       new HistLoader(0) {\r
+                                               @Override\r
+                                               protected void key(Data data, int idx, Object[] obj) {\r
+                                               }\r
+                                       },writeConsistency)\r
+                               );\r
+//             disable(CRUD.Create);\r
+               \r
+               replace(CRUD.read, new PSInfo(trans, SELECT_SP +  helpers[FIELD_COMMAS] +\r
+                               " FROM history WHERE id = ?", defLoader,readConsistency) \r
+//                             new HistLoader(2) {\r
+//                                     @Override\r
+//                                     protected void key(Data data, int idx, Object[] obj) {\r
+//                                             obj[idx]=data.yr_mon;\r
+//                                             obj[++idx]=data.id;\r
+//                                     }\r
+//                             })\r
+                       );\r
+               disable(CRUD.update);\r
+               disable(CRUD.delete);\r
+               \r
+               readByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + \r
+                               " FROM history WHERE user = ?", defLoader,readConsistency);\r
+               readBySubject = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + \r
+                               " FROM history WHERE subject = ? and target = ? ALLOW FILTERING", defLoader,readConsistency);\r
+               readByYRMN = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + \r
+                               " FROM history WHERE yr_mon = ?", defLoader,readConsistency);\r
+               async(true); //TODO dropping messages with Async\r
+       }\r
+\r
+       public static Data newInitedData() {\r
+               Data data = new Data();\r
+               Date now = new Date();\r
+               data.yr_mon = Integer.parseInt(monthFormat.format(now));\r
+               // data.day_time = Integer.parseInt(dayTimeFormat.format(now));\r
+               return data;            \r
+       }\r
+\r
+       public Result<List<Data>> readByYYYYMM(AuthzTrans trans, int yyyymm) {\r
+               Result<ResultSet> rs = readByYRMN.exec(trans, "yr_mon", yyyymm);\r
+               if(rs.notOK()) {\r
+                       return Result.err(rs);\r
+               }\r
+               return extract(defLoader,rs.value,null,dflt);\r
+       }\r
+\r
+       /**\r
+        * Gets the history for a user in the specified year and month\r
+        * year - the year in yyyy format\r
+        * month -  the month in a year ...values 1 - 12\r
+        **/\r
+       public Result<List<Data>> readByUser(AuthzTrans trans, String user, int ... yyyymm) {\r
+               if(yyyymm.length==0) {\r
+                       return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");\r
+               }\r
+               Result<ResultSet> rs = readByUser.exec(trans, "user", user);\r
+               if(rs.notOK()) {\r
+                       return Result.err(rs);\r
+               }\r
+               return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);\r
+       }\r
+       \r
+       public Result<List<Data>> readBySubject(AuthzTrans trans, String subject, String target, int ... yyyymm) {\r
+               if(yyyymm.length==0) {\r
+                       return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");\r
+               }\r
+               Result<ResultSet> rs = readBySubject.exec(trans, "subject", subject, target);\r
+               if(rs.notOK()) {\r
+                       return Result.err(rs);\r
+               }\r
+               return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);\r
+       }\r
+       \r
+       private class YYYYMM implements Accept<Data> {\r
+               private int[] yyyymm;\r
+               public YYYYMM(int yyyymm[]) {\r
+                       this.yyyymm = yyyymm;\r
+               }\r
+               @Override\r
+               public boolean ok(Data data) {\r
+                       int dym = data.yr_mon;\r
+                       for(int ym:yyyymm) {\r
+                               if(dym==ym) {\r
+                                       return true;\r
+                               }\r
+                       }\r
+                       return false;\r
+               }\r
+               \r
+       };\r
+       \r
+}\r