1 /*******************************************************************************
\r
2 * ============LICENSE_START====================================================
\r
4 * * ===========================================================================
\r
5 * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
\r
6 * * ===========================================================================
\r
7 * * Licensed under the Apache License, Version 2.0 (the "License");
\r
8 * * you may not use this file except in compliance with the License.
\r
9 * * You may obtain a copy of the License at
\r
11 * * http://www.apache.org/licenses/LICENSE-2.0
\r
13 * * Unless required by applicable law or agreed to in writing, software
\r
14 * * distributed under the License is distributed on an "AS IS" BASIS,
\r
15 * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
\r
16 * * See the License for the specific language governing permissions and
\r
17 * * limitations under the License.
\r
18 * * ============LICENSE_END====================================================
\r
20 * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
\r
22 ******************************************************************************/
\r
23 package com.att.dao.aaf.cass;
\r
25 import java.io.IOException;
\r
26 import java.net.HttpURLConnection;
\r
27 import java.net.URI;
\r
28 import java.util.Date;
\r
29 import java.util.HashMap;
\r
30 import java.util.HashSet;
\r
31 import java.util.Map;
\r
32 import java.util.Map.Entry;
\r
33 import java.util.concurrent.BlockingQueue;
\r
34 import java.util.concurrent.ConcurrentHashMap;
\r
35 import java.util.concurrent.LinkedBlockingQueue;
\r
36 import java.util.concurrent.TimeUnit;
\r
38 import com.att.authz.env.AuthzEnv;
\r
39 import com.att.authz.env.AuthzTrans;
\r
40 import com.att.authz.layer.Result;
\r
41 import com.att.cadi.CadiException;
\r
42 import com.att.cadi.SecuritySetter;
\r
43 import com.att.cadi.client.Future;
\r
44 import com.att.cadi.client.Rcli;
\r
45 import com.att.cadi.client.Retryable;
\r
46 import com.att.cadi.http.HMangr;
\r
47 import com.att.dao.AbsCassDAO;
\r
48 import com.att.dao.CIDAO;
\r
49 import com.att.dao.CassAccess;
\r
50 import com.att.dao.CassDAOImpl;
\r
51 import com.att.dao.Loader;
\r
52 import com.att.inno.env.APIException;
\r
53 import com.att.inno.env.Env;
\r
54 import com.att.inno.env.TimeTaken;
\r
55 import com.att.inno.env.Trans;
\r
56 import com.datastax.driver.core.BoundStatement;
\r
57 import com.datastax.driver.core.Cluster;
\r
58 import com.datastax.driver.core.ResultSet;
\r
59 import com.datastax.driver.core.Row;
\r
60 import com.datastax.driver.core.exceptions.DriverException;
\r
62 public class CacheInfoDAO extends CassDAOImpl<AuthzTrans,CacheInfoDAO.Data> implements CIDAO<AuthzTrans> {
\r
64 private static final String TABLE = "cache";
\r
65 public static final Map<String,Date[]> info = new ConcurrentHashMap<String,Date[]>();
\r
67 private static CacheUpdate cacheUpdate;
\r
70 private BoundStatement check;
\r
71 // Hold current time stamps from Tables
\r
72 private final Date startTime;
\r
74 public CacheInfoDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
\r
75 super(trans, CacheInfoDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
\r
76 startTime = new Date();
\r
80 public CacheInfoDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) throws APIException, IOException {
\r
81 super(trans, CacheInfoDAO.class.getSimpleName(),aDao,Data.class,TABLE,readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
\r
82 startTime = new Date();
\r
87 //////////////////////////////////////////
\r
88 // Data Definition, matches Cassandra DM
\r
89 //////////////////////////////////////////
\r
90 private static final int KEYLIMIT = 2;
\r
93 public static class Data {
\r
98 public Data(String name, int seg) {
\r
104 public String name;
\r
106 public Date touched;
\r
109 private static class InfoLoader extends Loader<Data> {
\r
110 public static final InfoLoader dflt = new InfoLoader(KEYLIMIT);
\r
112 public InfoLoader(int keylimit) {
\r
117 public Data load(Data data, Row row) {
\r
118 // Int more efficient
\r
119 data.name = row.getString(0);
\r
120 data.seg = row.getInt(1);
\r
121 data.touched = row.getDate(2);
\r
126 protected void key(Data data, int _idx, Object[] obj) {
\r
129 obj[idx]=data.name;
\r
130 obj[++idx]=data.seg;
\r
134 protected void body(Data data, int idx, Object[] obj) {
\r
135 obj[idx]=data.touched;
\r
139 public static<T extends Trans> void startUpdate(AuthzEnv env, HMangr hman, SecuritySetter<HttpURLConnection> ss, String ip, int port) {
\r
140 if(cacheUpdate==null) {
\r
141 Thread t= new Thread(cacheUpdate = new CacheUpdate(env,hman,ss, ip,port),"CacheInfo Update Thread");
\r
147 public static<T extends Trans> void stopUpdate() {
\r
148 if(cacheUpdate!=null) {
\r
149 cacheUpdate.go=false;
\r
153 private final static class CacheUpdate extends Thread {
\r
154 public static BlockingQueue<Transfer> notifyDQ = new LinkedBlockingQueue<Transfer>(2000);
\r
156 private static final String VOID_CT="application/Void+json;q=1.0;charset=utf-8;version=2.0,application/json;q=1.0;version=2.0,*/*;q=1.0";
\r
157 private AuthzEnv env;
\r
158 private HMangr hman;
\r
159 private SecuritySetter<HttpURLConnection> ss;
\r
160 private final String authority;
\r
161 public boolean go = true;
\r
163 public CacheUpdate(AuthzEnv env, HMangr hman, SecuritySetter<HttpURLConnection> ss, String ip, int port) {
\r
168 this.authority = ip+':'+port;
\r
171 private static class Transfer {
\r
172 public String table;
\r
174 public Transfer(String table, int[] segs) {
\r
175 this.table = table;
\r
179 private class CacheClear extends Retryable<Integer> {
\r
180 public int total=0;
\r
181 private AuthzTrans trans;
\r
182 private String type;
\r
183 private String segs;
\r
185 public CacheClear(AuthzTrans trans) {
\r
186 this.trans = trans;
\r
189 public void set(Entry<String, IntHolder> es) {
\r
190 type = es.getKey();
\r
191 segs = es.getValue().toString();
\r
195 public Integer code(Rcli<?> client) throws APIException, CadiException {
\r
196 URI to = client.getURI();
\r
197 if(!to.getAuthority().equals(authority)) {
\r
198 Future<Void> f = client.delete("/mgmt/cache/"+type+'/'+segs,VOID_CT);
\r
199 if(f.get(hman.readTimeout())) {
\r
202 trans.error().log("Error During AAF Peer Notify",f.code(),f.body());
\r
209 private class IntHolder {
\r
211 HashSet<Integer> set;
\r
213 public IntHolder(int ints[]) {
\r
217 public void add(int[] ints) {
\r
219 set = new HashSet<Integer>();
\r
221 for(int i=0;i<raw.length;++i) {
\r
225 for(int i=0;i<ints.length;++i) {
\r
231 public String toString() {
\r
232 StringBuilder sb = new StringBuilder();
\r
233 boolean first = true;
\r
244 for(Integer i : set) {
\r
253 return sb.toString();
\r
258 public void run() {
\r
261 Transfer data = notifyDQ.poll(4,TimeUnit.SECONDS);
\r
267 CacheClear cc = null;
\r
268 Map<String,IntHolder> gather = null;
\r
269 AuthzTrans trans = null;
\r
271 // Do a block poll first
\r
274 start = System.nanoTime();
\r
275 trans = env.newTransNoAvg();
\r
276 cc = new CacheClear(trans);
\r
277 gather = new HashMap<String,IntHolder>();
\r
279 IntHolder prev = gather.get(data.table);
\r
281 gather.put(data.table,new IntHolder(data.segs));
\r
283 prev.add(data.segs);
\r
285 // continue while there is data
\r
286 } while((data = notifyDQ.poll())!=null);
\r
288 for(Entry<String, IntHolder> es : gather.entrySet()) {
\r
291 if(hman.all(ss, cc, false)!=null) {
\r
294 } catch (Exception e) {
\r
295 trans.error().log(e, "Error on Cache Update");
\r
298 if(env.debug().isLoggable()) {
\r
299 float millis = (System.nanoTime()-start)/1000000f;
\r
300 StringBuilder sb = new StringBuilder("Direct Cache Refresh: ");
\r
301 sb.append("Updated ");
\r
304 sb.append(" entry for ");
\r
306 sb.append(" entries for ");
\r
308 int peers = count<=0?0:cc.total/count;
\r
310 sb.append(" client");
\r
317 trans.auditTrail(0, sb, Env.REMOTE);
\r
318 env.debug().log(sb);
\r
321 } catch (InterruptedException e1) {
\r
328 private void init(AuthzTrans trans) throws APIException, IOException {
\r
330 String[] helpers = setCRUD(trans, TABLE, Data.class, InfoLoader.dflt);
\r
331 check = getSession(trans).prepare(SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE).bind();
\r
333 disable(CRUD.create);
\r
334 disable(CRUD.delete);
\r
338 * @see com.att.dao.aaf.cass.CIDAO#touch(com.att.authz.env.AuthzTrans, java.lang.String, int)
\r
342 public Result<Void> touch(AuthzTrans trans, String name, int ... seg) {
\r
344 // Direct Service Cache Invalidation
\r
346 // ConcurrentQueues are open-ended. We don't want any Memory leaks
\r
347 // Note: we keep a separate counter, because "size()" on a Linked Queue is expensive
\r
348 if(cacheUpdate!=null) {
\r
350 if(!CacheUpdate.notifyDQ.offer(new CacheUpdate.Transfer(name, seg),2,TimeUnit.SECONDS)) {
\r
351 trans.error().log("Cache Notify Queue is not accepting messages, bouncing may be appropriate" );
\r
353 } catch (InterruptedException e) {
\r
354 trans.error().log("Cache Notify Queue posting was interrupted" );
\r
359 // Table Based Cache Invalidation (original)
\r
361 // Note: Save time with multiple Sequence Touches, but PreparedStmt doesn't support IN
\r
362 StringBuilder start = new StringBuilder("CacheInfoDAO Touch segments ");
\r
363 start.append(name);
\r
364 start.append(": ");
\r
365 StringBuilder sb = new StringBuilder("BEGIN BATCH\n");
\r
366 boolean first = true;
\r
368 sb.append(UPDATE_SP);
\r
370 sb.append(" SET touched=dateof(now()) WHERE name = '");
\r
372 sb.append("' AND seg = ");
\r
382 sb.append("APPLY BATCH;");
\r
383 TimeTaken tt = trans.start(start.toString(),Env.REMOTE);
\r
385 getSession(trans).executeAsync(sb.toString());
\r
386 } catch (DriverException | APIException | IOException e) {
\r
387 reportPerhapsReset(trans,e);
\r
388 return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
\r
392 return Result.ok();
\r
396 * @see com.att.dao.aaf.cass.CIDAO#check(com.att.authz.env.AuthzTrans)
\r
399 public Result<Void> check(AuthzTrans trans) {
\r
401 TimeTaken tt = trans.start("Check Table Timestamps",Env.REMOTE);
\r
403 rs = getSession(trans).execute(check);
\r
404 } catch (DriverException | APIException | IOException e) {
\r
405 reportPerhapsReset(trans,e);
\r
406 return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
\r
411 String lastName = null;
\r
412 Date[] dates = null;
\r
413 for(Row row : rs.all()) {
\r
414 String name = row.getString(0);
\r
415 int seg = row.getInt(1);
\r
416 if(!name.equals(lastName)) {
\r
417 dates = info.get(name);
\r
421 dates=new Date[seg+1];
\r
422 info.put(name,dates);
\r
423 } else if(dates.length<=seg) {
\r
424 Date[] temp = new Date[seg+1];
\r
425 System.arraycopy(dates, 0, temp, 0, dates.length);
\r
427 info.put(name, dates);
\r
429 Date temp = row.getDate(2);
\r
430 if(dates[seg]==null || dates[seg].before(temp)) {
\r
434 return Result.ok();
\r
438 * @see com.att.dao.aaf.cass.CIDAO#get(java.lang.String, int)
\r
441 public Date get(AuthzTrans trans, String table, int seg) {
\r
442 Date[] dates = info.get(table);
\r
444 dates = new Date[seg+1];
\r
445 touch(trans,table, seg);
\r
446 } else if(dates.length<=seg) {
\r
447 Date[] temp = new Date[seg+1];
\r
448 System.arraycopy(dates, 0, temp, 0, dates.length);
\r
451 Date rv = dates[seg];
\r
453 rv=dates[seg]=startTime;
\r
459 protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
\r