1 /*******************************************************************************
\r
2 * ============LICENSE_START====================================================
\r
4 * * ===========================================================================
\r
5 * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
\r
6 * * ===========================================================================
\r
7 * * Licensed under the Apache License, Version 2.0 (the "License");
\r
8 * * you may not use this file except in compliance with the License.
\r
9 * * You may obtain a copy of the License at
\r
11 * * http://www.apache.org/licenses/LICENSE-2.0
\r
13 * * Unless required by applicable law or agreed to in writing, software
\r
14 * * distributed under the License is distributed on an "AS IS" BASIS,
\r
15 * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
\r
16 * * See the License for the specific language governing permissions and
\r
17 * * limitations under the License.
\r
18 * * ============LICENSE_END====================================================
\r
20 * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
\r
22 ******************************************************************************/
\r
23 package org.onap.aaf.dao.aaf.cass;
\r
25 import java.nio.ByteBuffer;
\r
26 import java.text.SimpleDateFormat;
\r
27 import java.util.Date;
\r
28 import java.util.List;
\r
29 import java.util.UUID;
\r
31 import org.onap.aaf.authz.env.AuthzTrans;
\r
32 import org.onap.aaf.authz.layer.Result;
\r
33 import org.onap.aaf.dao.AbsCassDAO;
\r
34 import org.onap.aaf.dao.CassDAOImpl;
\r
35 import org.onap.aaf.dao.Loader;
\r
37 import com.datastax.driver.core.Cluster;
\r
38 import com.datastax.driver.core.ConsistencyLevel;
\r
39 import com.datastax.driver.core.ResultSet;
\r
40 import com.datastax.driver.core.Row;
\r
46 * History is a special case, because we don't want Updates or Deletes... Too likely to mess up history.
\r
48 * 9-9-2013 - Found a problem with using "Prepare". You cannot prepare anything with a "now()" in it, as
\r
49 * it is evaluated once during the prepare, and kept. That renders any use of "now()" pointless. Therefore
\r
50 * the Create function needs to be run fresh everytime.
\r
52 * Fixed in Cassandra 1.2.6 https://issues.apache.org/jira/browse/CASSANDRA-5616
\r
55 public class HistoryDAO extends CassDAOImpl<AuthzTrans, HistoryDAO.Data> {
\r
56 private static final String TABLE = "history";
\r
58 public static final SimpleDateFormat monthFormat = new SimpleDateFormat("yyyyMM");
\r
59 // private static final SimpleDateFormat dayTimeFormat = new SimpleDateFormat("ddHHmmss");
\r
61 private String[] helpers;
\r
63 private HistLoader defLoader;
\r
65 private AbsCassDAO<AuthzTrans, Data>.PSInfo readByUser, readBySubject, readByYRMN;
\r
67 public HistoryDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
\r
68 super(trans, HistoryDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);
\r
72 public HistoryDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {
\r
73 super(trans, HistoryDAO.class.getSimpleName(),aDao,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);
\r
78 private static final int KEYLIMIT = 1;
\r
79 public static class Data {
\r
83 public String action;
\r
84 public String target;
\r
85 public String subject;
\r
87 // Map<String, String> detail = null;
\r
88 // public Map<String, String> detail() {
\r
89 // if(detail == null) {
\r
90 // detail = new HashMap<String, String>();
\r
94 public ByteBuffer reconstruct;
\r
97 private static class HistLoader extends Loader<Data> {
\r
98 public HistLoader(int keylimit) {
\r
103 public Data load(Data data, Row row) {
\r
104 data.id = row.getUUID(0);
\r
105 data.yr_mon = row.getInt(1);
\r
106 data.user = row.getString(2);
\r
107 data.action = row.getString(3);
\r
108 data.target = row.getString(4);
\r
109 data.subject = row.getString(5);
\r
110 data.memo = row.getString(6);
\r
111 // data.detail = row.getMap(6, String.class, String.class);
\r
112 data.reconstruct = row.getBytes(7);
\r
117 protected void key(Data data, int idx, Object[] obj) {
\r
122 protected void body(Data data, int _idx, Object[] obj) {
\r
124 obj[idx]=data.yr_mon;
\r
125 obj[++idx]=data.user;
\r
126 obj[++idx]=data.action;
\r
127 obj[++idx]=data.target;
\r
128 obj[++idx]=data.subject;
\r
129 obj[++idx]=data.memo;
\r
130 // obj[++idx]=data.detail;
\r
131 obj[++idx]=data.reconstruct;
\r
135 private void init(AuthzTrans trans) {
\r
136 // Loader must match fields order
\r
137 defLoader = new HistLoader(KEYLIMIT);
\r
138 helpers = setCRUD(trans, TABLE, Data.class, defLoader);
\r
140 // Need a specialty Creator to handle the "now()"
\r
141 // 9/9/2013 - jg - Just great... now() is evaluated once on Client side, invalidating usage (what point is a now() from a long time in the past?
\r
142 // Unless this is fixed, we're putting in non-prepared statement
\r
143 // Solved in Cassandra. Make sure you are running 1.2.6 Cassandra or later. https://issues.apache.org/jira/browse/CASSANDRA-5616
\r
144 replace(CRUD.create, new PSInfo(trans, "INSERT INTO history (" + helpers[FIELD_COMMAS] +
\r
145 ") VALUES(now(),?,?,?,?,?,?,?)",
\r
146 new HistLoader(0) {
\r
148 protected void key(Data data, int idx, Object[] obj) {
\r
150 },writeConsistency)
\r
152 // disable(CRUD.Create);
\r
154 replace(CRUD.read, new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
\r
155 " FROM history WHERE id = ?", defLoader,readConsistency)
\r
156 // new HistLoader(2) {
\r
158 // protected void key(Data data, int idx, Object[] obj) {
\r
159 // obj[idx]=data.yr_mon;
\r
160 // obj[++idx]=data.id;
\r
164 disable(CRUD.update);
\r
165 disable(CRUD.delete);
\r
167 readByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
\r
168 " FROM history WHERE user = ?", defLoader,readConsistency);
\r
169 readBySubject = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
\r
170 " FROM history WHERE subject = ? and target = ? ALLOW FILTERING", defLoader,readConsistency);
\r
171 readByYRMN = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
\r
172 " FROM history WHERE yr_mon = ?", defLoader,readConsistency);
\r
173 async(true); //TODO dropping messages with Async
\r
176 public static Data newInitedData() {
\r
177 Data data = new Data();
\r
178 Date now = new Date();
\r
179 data.yr_mon = Integer.parseInt(monthFormat.format(now));
\r
180 // data.day_time = Integer.parseInt(dayTimeFormat.format(now));
\r
184 public Result<List<Data>> readByYYYYMM(AuthzTrans trans, int yyyymm) {
\r
185 Result<ResultSet> rs = readByYRMN.exec(trans, "yr_mon", yyyymm);
\r
187 return Result.err(rs);
\r
189 return extract(defLoader,rs.value,null,dflt);
\r
193 * Gets the history for a user in the specified year and month
\r
194 * year - the year in yyyy format
\r
195 * month - the month in a year ...values 1 - 12
\r
197 public Result<List<Data>> readByUser(AuthzTrans trans, String user, int ... yyyymm) {
\r
198 if(yyyymm.length==0) {
\r
199 return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");
\r
201 Result<ResultSet> rs = readByUser.exec(trans, "user", user);
\r
203 return Result.err(rs);
\r
205 return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);
\r
208 public Result<List<Data>> readBySubject(AuthzTrans trans, String subject, String target, int ... yyyymm) {
\r
209 if(yyyymm.length==0) {
\r
210 return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");
\r
212 Result<ResultSet> rs = readBySubject.exec(trans, "subject", subject, target);
\r
214 return Result.err(rs);
\r
216 return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);
\r
219 private class YYYYMM implements Accept<Data> {
\r
220 private int[] yyyymm;
\r
221 public YYYYMM(int yyyymm[]) {
\r
222 this.yyyymm = yyyymm;
\r
225 public boolean ok(Data data) {
\r
226 int dym = data.yr_mon;
\r
227 for(int ym:yyyymm) {
\r