[AAF-21] Initial code import
[aaf/authz.git] / authz-cass / src / main / java / com / att / dao / Cached.java
diff --git a/authz-cass/src/main/java/com/att/dao/Cached.java b/authz-cass/src/main/java/com/att/dao/Cached.java
new file mode 100644 (file)
index 0000000..2cdd2b2
--- /dev/null
@@ -0,0 +1,198 @@
+/*******************************************************************************\r
+ * ============LICENSE_START====================================================\r
+ * * org.onap.aai\r
+ * * ===========================================================================\r
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
+ * * Copyright © 2017 Amdocs\r
+ * * ===========================================================================\r
+ * * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * * you may not use this file except in compliance with the License.\r
+ * * You may obtain a copy of the License at\r
+ * * \r
+ *  *      http://www.apache.org/licenses/LICENSE-2.0\r
+ * * \r
+ *  * Unless required by applicable law or agreed to in writing, software\r
+ * * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * * See the License for the specific language governing permissions and\r
+ * * limitations under the License.\r
+ * * ============LICENSE_END====================================================\r
+ * *\r
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
+ * *\r
+ ******************************************************************************/\r
+package com.att.dao;\r
+\r
+import java.util.Date;\r
+import java.util.List;\r
+import java.util.Map;\r
+import java.util.Timer;\r
+import java.util.TimerTask;\r
+\r
+import com.att.authz.env.AuthzEnv;\r
+import com.att.authz.env.AuthzTrans;\r
+import com.att.authz.layer.Result;\r
+import com.att.cache.Cache;\r
+import com.att.dao.aaf.cass.Status;\r
+import com.att.inno.env.Env;\r
+import com.att.inno.env.Trans;\r
+\r
+public class Cached<TRANS extends Trans, DATA extends Cacheable> extends Cache<TRANS,DATA> {\r
+       // Java does not allow creation of Arrays with Generics in them...\r
+       // private Map<String,Dated> cache[];\r
+       protected final CIDAO<TRANS> info;\r
+       \r
+       private static Timer infoTimer;\r
+       private Object cache[];\r
+       public final int segSize;\r
+\r
+       protected final String name;\r
+       \r
+\r
+\r
+       // Taken from String Hash, but coded, to ensure consistent across Java versions.  Also covers negative case;\r
+       public int cacheIdx(String key) {\r
+               int h = 0;\r
+               for (int i = 0; i < key.length(); i++) {\r
+                   h = 31*h + key.charAt(i);\r
+               }\r
+               if(h<0)h*=-1;\r
+               return h%segSize;\r
+       }\r
+       \r
+       public Cached(CIDAO<TRANS> info, String name, int segSize) {\r
+               this.name =name;\r
+               this.segSize = segSize;\r
+               this.info = info;\r
+               cache = new Object[segSize];\r
+               // Create a new Map for each Segment, and store locally\r
+               for(int i=0;i<segSize;++i) {\r
+                       cache[i]=obtain(name+i);\r
+               }\r
+       }\r
+       \r
+       public void add(String key, List<DATA> data) {\r
+               @SuppressWarnings("unchecked")\r
+               Map<String,Dated> map = ((Map<String,Dated>)cache[cacheIdx(key)]);\r
+               map.put(key, new Dated(data));\r
+       }\r
+\r
+\r
+       public int invalidate(String key)  {\r
+               int cacheIdx = cacheIdx(key);\r
+               @SuppressWarnings("unchecked")\r
+               Map<String,Dated> map = ((Map<String,Dated>)cache[cacheIdx]);\r
+//             if(map.remove(key)!=null) // Not seeming to remove all the time\r
+               if(map!=null)map.clear();\r
+//                     System.err.println("Remove " + name + " " + key);\r
+               return cacheIdx;\r
+       }\r
+\r
+       public Result<Void> invalidate(int segment)  {\r
+               if(segment<0 || segment>=cache.length) return Result.err(Status.ERR_BadData,"Cache Segment %s is out of range",Integer.toString(segment));\r
+               @SuppressWarnings("unchecked")\r
+               Map<String,Dated> map = ((Map<String,Dated>)cache[segment]);\r
+               if(map!=null) {\r
+                       map.clear();\r
+               }\r
+               return Result.ok();\r
+       }\r
+\r
+       protected interface Getter<D> {\r
+               public abstract Result<List<D>> get();\r
+       };\r
+       \r
+       // TODO utilize Segmented Caches, and fold "get" into "reads"\r
+       @SuppressWarnings("unchecked")\r
+       public Result<List<DATA>> get(TRANS trans, String key, Getter<DATA> getter) {\r
+               List<DATA> ld = null;\r
+               Result<List<DATA>> rld = null;\r
+               \r
+               int cacheIdx = cacheIdx(key);\r
+               Map<String, Dated> map = ((Map<String,Dated>)cache[cacheIdx]);\r
+               \r
+               // Check for saved element in cache\r
+               Dated cached = map.get(key);\r
+               // Note: These Segment Timestamps are kept up to date with DB\r
+               Date dbStamp = info.get(trans, name,cacheIdx);\r
+               \r
+               // Check for cache Entry and whether it is still good (a good Cache Entry is same or after DBEntry, so we use "before" syntax)\r
+               if(cached!=null && dbStamp.before(cached.timestamp)) {\r
+                       ld = (List<DATA>)cached.data;\r
+                       rld = Result.ok(ld);\r
+               } else {\r
+                       rld = getter.get();\r
+                       if(rld.isOK()) { // only store valid lists\r
+                               map.put(key, new Dated(rld.value));  // successful item found gets put in cache\r
+//                     } else if(rld.status == Result.ERR_Backend){\r
+//                             map.remove(key);\r
+                       }\r
+               }\r
+               return rld;\r
+       }\r
+\r
+       /**\r
+        * Each Cached object has multiple Segments that need cleaning.  Derive each, and add to Cleansing Thread\r
+        * @param env\r
+        * @param dao\r
+        */\r
+       public static void startCleansing(AuthzEnv env, CachedDAO<?,?,?> ... dao) {\r
+               for(CachedDAO<?,?,?> d : dao) {  \r
+                       for(int i=0;i<d.segSize;++i) {\r
+                               startCleansing(env, d.table()+i);\r
+                       }\r
+               }\r
+       }\r
+\r
+\r
+       public static<T extends Trans> void startRefresh(AuthzEnv env, CIDAO<AuthzTrans> cidao) {\r
+               if(infoTimer==null) {\r
+                       infoTimer = new Timer("CachedDAO Info Refresh Timer");\r
+                       int minRefresh = 10*1000*60; // 10 mins Integer.parseInt(env.getProperty(CACHE_MIN_REFRESH_INTERVAL,"2000")); // 2 second minimum refresh \r
+                       infoTimer.schedule(new Refresh(env,cidao, minRefresh), 1000, minRefresh); // note: Refresh from DB immediately\r
+               }\r
+       }\r
+       \r
+       public static void stopTimer() {\r
+               Cache.stopTimer();\r
+               if(infoTimer!=null) {\r
+                       infoTimer.cancel();\r
+                       infoTimer = null;\r
+               }\r
+       }\r
+       \r
+       private final static class Refresh extends TimerTask {\r
+               private static final int maxRefresh = 2*60*10000; // 20 mins\r
+               private AuthzEnv env;\r
+               private CIDAO<AuthzTrans> cidao;\r
+               private int minRefresh;\r
+               private long lastRun;\r
+               \r
+               public Refresh(AuthzEnv env, CIDAO<AuthzTrans> cidao, int minRefresh) {\r
+                       this.env = env;\r
+                       this.cidao = cidao;\r
+                       this.minRefresh = minRefresh;\r
+                       lastRun = System.currentTimeMillis()-maxRefresh-1000;\r
+               }\r
+               \r
+               @Override\r
+               public void run() {\r
+                       // Evaluate whether to refresh based on transaction rate\r
+                       long now = System.currentTimeMillis();\r
+                       long interval = now-lastRun;\r
+\r
+                       if(interval < minRefresh || interval < Math.min(env.transRate(),maxRefresh)) return;\r
+                       lastRun = now;\r
+                       AuthzTrans trans = env.newTransNoAvg();\r
+                       Result<Void> rv = cidao.check(trans);\r
+                       if(rv.status!=Result.OK) {\r
+                               env.error().log("Error in CacheInfo Refresh",rv.details);\r
+                       }\r
+                       if(env.debug().isLoggable()) {\r
+                               StringBuilder sb = new StringBuilder("Cache Info Refresh: ");\r
+                               trans.auditTrail(0, sb, Env.REMOTE);\r
+                               env.debug().log(sb);\r
+                       }\r
+               }\r
+       }\r
+}\r