1 /*******************************************************************************
\r
2 * ============LICENSE_START====================================================
\r
4 * * ===========================================================================
\r
5 * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
\r
6 * * ===========================================================================
\r
7 * * Licensed under the Apache License, Version 2.0 (the "License");
\r
8 * * you may not use this file except in compliance with the License.
\r
9 * * You may obtain a copy of the License at
\r
11 * * http://www.apache.org/licenses/LICENSE-2.0
\r
13 * * Unless required by applicable law or agreed to in writing, software
\r
14 * * distributed under the License is distributed on an "AS IS" BASIS,
\r
15 * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
\r
16 * * See the License for the specific language governing permissions and
\r
17 * * limitations under the License.
\r
18 * * ============LICENSE_END====================================================
\r
20 * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
\r
22 ******************************************************************************/
\r
23 package com.att.dao.aaf.cass;
\r
25 import java.nio.ByteBuffer;
\r
26 import java.text.SimpleDateFormat;
\r
27 import java.util.Date;
\r
28 import java.util.List;
\r
29 import java.util.UUID;
\r
31 import com.att.authz.env.AuthzTrans;
\r
32 import com.att.authz.layer.Result;
\r
33 import com.att.dao.AbsCassDAO;
\r
34 import com.att.dao.CassDAOImpl;
\r
35 import com.att.dao.Loader;
\r
36 import com.datastax.driver.core.Cluster;
\r
37 import com.datastax.driver.core.ConsistencyLevel;
\r
38 import com.datastax.driver.core.ResultSet;
\r
39 import com.datastax.driver.core.Row;
\r
45 * History is a special case, because we don't want Updates or Deletes... Too likely to mess up history.
\r
47 * 9-9-2013 - Found a problem with using "Prepare". You cannot prepare anything with a "now()" in it, as
\r
48 * it is evaluated once during the prepare, and kept. That renders any use of "now()" pointless. Therefore
\r
49 * the Create function needs to be run fresh everytime.
\r
51 * Fixed in Cassandra 1.2.6 https://issues.apache.org/jira/browse/CASSANDRA-5616
\r
54 public class HistoryDAO extends CassDAOImpl<AuthzTrans, HistoryDAO.Data> {
\r
55 private static final String TABLE = "history";
\r
57 public static final SimpleDateFormat monthFormat = new SimpleDateFormat("yyyyMM");
\r
58 // private static final SimpleDateFormat dayTimeFormat = new SimpleDateFormat("ddHHmmss");
\r
60 private String[] helpers;
\r
62 private HistLoader defLoader;
\r
64 private AbsCassDAO<AuthzTrans, Data>.PSInfo readByUser, readBySubject, readByYRMN;
\r
66 public HistoryDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
\r
67 super(trans, HistoryDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);
\r
71 public HistoryDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {
\r
72 super(trans, HistoryDAO.class.getSimpleName(),aDao,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);
\r
77 private static final int KEYLIMIT = 1;
\r
78 public static class Data {
\r
82 public String action;
\r
83 public String target;
\r
84 public String subject;
\r
86 // Map<String, String> detail = null;
\r
87 // public Map<String, String> detail() {
\r
88 // if(detail == null) {
\r
89 // detail = new HashMap<String, String>();
\r
93 public ByteBuffer reconstruct;
\r
96 private static class HistLoader extends Loader<Data> {
\r
97 public HistLoader(int keylimit) {
\r
102 public Data load(Data data, Row row) {
\r
103 data.id = row.getUUID(0);
\r
104 data.yr_mon = row.getInt(1);
\r
105 data.user = row.getString(2);
\r
106 data.action = row.getString(3);
\r
107 data.target = row.getString(4);
\r
108 data.subject = row.getString(5);
\r
109 data.memo = row.getString(6);
\r
110 // data.detail = row.getMap(6, String.class, String.class);
\r
111 data.reconstruct = row.getBytes(7);
\r
116 protected void key(Data data, int idx, Object[] obj) {
\r
121 protected void body(Data data, int _idx, Object[] obj) {
\r
123 obj[idx]=data.yr_mon;
\r
124 obj[++idx]=data.user;
\r
125 obj[++idx]=data.action;
\r
126 obj[++idx]=data.target;
\r
127 obj[++idx]=data.subject;
\r
128 obj[++idx]=data.memo;
\r
129 // obj[++idx]=data.detail;
\r
130 obj[++idx]=data.reconstruct;
\r
134 private void init(AuthzTrans trans) {
\r
135 // Loader must match fields order
\r
136 defLoader = new HistLoader(KEYLIMIT);
\r
137 helpers = setCRUD(trans, TABLE, Data.class, defLoader);
\r
139 // Need a specialty Creator to handle the "now()"
\r
140 // 9/9/2013 - jg - Just great... now() is evaluated once on Client side, invalidating usage (what point is a now() from a long time in the past?
\r
141 // Unless this is fixed, we're putting in non-prepared statement
\r
142 // Solved in Cassandra. Make sure you are running 1.2.6 Cassandra or later. https://issues.apache.org/jira/browse/CASSANDRA-5616
\r
143 replace(CRUD.create, new PSInfo(trans, "INSERT INTO history (" + helpers[FIELD_COMMAS] +
\r
144 ") VALUES(now(),?,?,?,?,?,?,?)",
\r
145 new HistLoader(0) {
\r
147 protected void key(Data data, int idx, Object[] obj) {
\r
149 },writeConsistency)
\r
151 // disable(CRUD.Create);
\r
153 replace(CRUD.read, new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
\r
154 " FROM history WHERE id = ?", defLoader,readConsistency)
\r
155 // new HistLoader(2) {
\r
157 // protected void key(Data data, int idx, Object[] obj) {
\r
158 // obj[idx]=data.yr_mon;
\r
159 // obj[++idx]=data.id;
\r
163 disable(CRUD.update);
\r
164 disable(CRUD.delete);
\r
166 readByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
\r
167 " FROM history WHERE user = ?", defLoader,readConsistency);
\r
168 readBySubject = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
\r
169 " FROM history WHERE subject = ? and target = ? ALLOW FILTERING", defLoader,readConsistency);
\r
170 readByYRMN = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
\r
171 " FROM history WHERE yr_mon = ?", defLoader,readConsistency);
\r
172 async(true); //TODO dropping messages with Async
\r
175 public static Data newInitedData() {
\r
176 Data data = new Data();
\r
177 Date now = new Date();
\r
178 data.yr_mon = Integer.parseInt(monthFormat.format(now));
\r
179 // data.day_time = Integer.parseInt(dayTimeFormat.format(now));
\r
183 public Result<List<Data>> readByYYYYMM(AuthzTrans trans, int yyyymm) {
\r
184 Result<ResultSet> rs = readByYRMN.exec(trans, "yr_mon", yyyymm);
\r
186 return Result.err(rs);
\r
188 return extract(defLoader,rs.value,null,dflt);
\r
192 * Gets the history for a user in the specified year and month
\r
193 * year - the year in yyyy format
\r
194 * month - the month in a year ...values 1 - 12
\r
196 public Result<List<Data>> readByUser(AuthzTrans trans, String user, int ... yyyymm) {
\r
197 if(yyyymm.length==0) {
\r
198 return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");
\r
200 Result<ResultSet> rs = readByUser.exec(trans, "user", user);
\r
202 return Result.err(rs);
\r
204 return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);
\r
207 public Result<List<Data>> readBySubject(AuthzTrans trans, String subject, String target, int ... yyyymm) {
\r
208 if(yyyymm.length==0) {
\r
209 return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");
\r
211 Result<ResultSet> rs = readBySubject.exec(trans, "subject", subject, target);
\r
213 return Result.err(rs);
\r
215 return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);
\r
218 private class YYYYMM implements Accept<Data> {
\r
219 private int[] yyyymm;
\r
220 public YYYYMM(int yyyymm[]) {
\r
221 this.yyyymm = yyyymm;
\r
224 public boolean ok(Data data) {
\r
225 int dym = data.yr_mon;
\r
226 for(int ym:yyyymm) {
\r