1 /*******************************************************************************
\r
2 * ============LICENSE_START====================================================
\r
4 * * ===========================================================================
\r
5 * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
\r
6 * * ===========================================================================
\r
7 * * Licensed under the Apache License, Version 2.0 (the "License");
\r
8 * * you may not use this file except in compliance with the License.
\r
9 * * You may obtain a copy of the License at
\r
11 * * http://www.apache.org/licenses/LICENSE-2.0
\r
13 * * Unless required by applicable law or agreed to in writing, software
\r
14 * * distributed under the License is distributed on an "AS IS" BASIS,
\r
15 * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
\r
16 * * See the License for the specific language governing permissions and
\r
17 * * limitations under the License.
\r
18 * * ============LICENSE_END====================================================
\r
20 * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
\r
22 ******************************************************************************/
\r
23 package org.onap.aaf.dao.aaf.cass;
\r
25 import java.io.ByteArrayOutputStream;
\r
26 import java.io.DataInputStream;
\r
27 import java.io.DataOutputStream;
\r
28 import java.io.IOException;
\r
29 import java.nio.ByteBuffer;
\r
30 import java.util.Date;
\r
31 import java.util.List;
\r
33 import org.onap.aaf.authz.env.AuthzTrans;
\r
34 import org.onap.aaf.authz.layer.Result;
\r
35 import org.onap.aaf.dao.Bytification;
\r
36 import org.onap.aaf.dao.Cached;
\r
37 import org.onap.aaf.dao.CassDAOImpl;
\r
38 import org.onap.aaf.dao.DAOException;
\r
39 import org.onap.aaf.dao.Loader;
\r
40 import org.onap.aaf.dao.Streamer;
\r
41 import org.onap.aaf.dao.aaf.hl.Question;
\r
43 import org.onap.aaf.inno.env.APIException;
\r
44 import org.onap.aaf.inno.env.Slot;
\r
45 import org.onap.aaf.inno.env.util.Chrono;
\r
46 import com.datastax.driver.core.Cluster;
\r
47 import com.datastax.driver.core.Row;
\r
49 public class UserRoleDAO extends CassDAOImpl<AuthzTrans,UserRoleDAO.Data> {
\r
50 public static final String TABLE = "user_role";
\r
52 public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
\r
54 private static final String TRANS_UR_SLOT = "_TRANS_UR_SLOT_";
\r
55 public Slot transURSlot;
\r
57 private final HistoryDAO historyDAO;
\r
58 private final CacheInfoDAO infoDAO;
\r
60 private PSInfo psByUser, psByRole, psUserInRole;
\r
64 public UserRoleDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
\r
65 super(trans, UserRoleDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
\r
66 transURSlot = trans.slot(TRANS_UR_SLOT);
\r
70 historyDAO = new HistoryDAO(trans, this);
\r
71 infoDAO = new CacheInfoDAO(trans,this);
\r
74 public UserRoleDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {
\r
75 super(trans, UserRoleDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
\r
76 transURSlot = trans.slot(TRANS_UR_SLOT);
\r
82 private static final int KEYLIMIT = 2;
\r
83 public static class Data extends CacheableData implements Bytification {
\r
87 public String rname;
\r
88 public Date expires;
\r
91 public int[] invalidate(Cached<?,?> cache) {
\r
92 // Note: I'm not worried about Name collisions, because the formats are different:
\r
93 // myName ... etc versus
\r
95 // The "dot" makes the difference.
\r
97 seg(cache,user,role),
\r
104 public ByteBuffer bytify() throws IOException {
\r
105 ByteArrayOutputStream baos = new ByteArrayOutputStream();
\r
106 URLoader.deflt.marshal(this,new DataOutputStream(baos));
\r
107 return ByteBuffer.wrap(baos.toByteArray());
\r
111 public void reconstitute(ByteBuffer bb) throws IOException {
\r
112 URLoader.deflt.unmarshal(this, toDIS(bb));
\r
115 public void role(String ns, String rname) {
\r
117 this.rname = rname;
\r
118 this.role = ns + '.' + rname;
\r
121 public void role(RoleDAO.Data rdd) {
\r
124 role = rdd.fullName();
\r
128 public boolean role(AuthzTrans trans, Question ques, String role) {
\r
130 Result<NsSplit> rnss = ques.deriveNsSplit(trans, role);
\r
131 if(rnss.isOKhasData()) {
\r
132 ns = rnss.value.ns;
\r
133 rname = rnss.value.name;
\r
141 public String toString() {
\r
142 return user + '|' + ns + '|' + rname + '|' + Chrono.dateStamp(expires);
\r
148 private static class URLoader extends Loader<Data> implements Streamer<Data> {
\r
149 public static final int MAGIC=738469903;
\r
150 public static final int VERSION=1;
\r
151 public static final int BUFF_SIZE=48;
\r
153 public static final URLoader deflt = new URLoader(KEYLIMIT);
\r
155 public URLoader(int keylimit) {
\r
160 public Data load(Data data, Row row) {
\r
161 data.user = row.getString(0);
\r
162 data.role = row.getString(1);
\r
163 data.ns = row.getString(2);
\r
164 data.rname = row.getString(3);
\r
165 data.expires = row.getDate(4);
\r
170 protected void key(Data data, int _idx, Object[] obj) {
\r
172 obj[idx]=data.user;
\r
173 obj[++idx]=data.role;
\r
177 protected void body(Data data, int _idx, Object[] obj) {
\r
180 obj[++idx]=data.rname;
\r
181 obj[++idx]=data.expires;
\r
185 public void marshal(Data data, DataOutputStream os) throws IOException {
\r
186 writeHeader(os,MAGIC,VERSION);
\r
188 writeString(os, data.user);
\r
189 writeString(os, data.role);
\r
190 writeString(os, data.ns);
\r
191 writeString(os, data.rname);
\r
192 os.writeLong(data.expires==null?-1:data.expires.getTime());
\r
196 public void unmarshal(Data data, DataInputStream is) throws IOException {
\r
197 /*int version = */readHeader(is,MAGIC,VERSION);
\r
198 // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
\r
200 byte[] buff = new byte[BUFF_SIZE];
\r
201 data.user = readString(is,buff);
\r
202 data.role = readString(is,buff);
\r
203 data.ns = readString(is,buff);
\r
204 data.rname = readString(is,buff);
\r
205 long l = is.readLong();
\r
206 data.expires = l<0?null:new Date(l);
\r
211 private void init(AuthzTrans trans) {
\r
212 String[] helper = setCRUD(trans, TABLE, Data.class, URLoader.deflt);
\r
214 psByUser = new PSInfo(trans, SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE user = ?",
\r
217 protected void key(Data data, int idx, Object[] obj) {
\r
218 obj[idx]=data.user;
\r
220 },readConsistency);
\r
222 // Note: We understand this call may have poor performance, so only should be used in Management (Delete) func
\r
223 psByRole = new PSInfo(trans, SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE role = ? ALLOW FILTERING",
\r
226 protected void key(Data data, int idx, Object[] obj) {
\r
227 obj[idx]=data.role;
\r
229 },readConsistency);
\r
231 psUserInRole = new PSInfo(trans,SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE user = ? AND role = ?",
\r
232 URLoader.deflt,readConsistency);
\r
235 public Result<List<Data>> readByUser(AuthzTrans trans, String user) {
\r
236 return psByUser.read(trans, R_TEXT + " by User " + user, new Object[]{user});
\r
240 * Note: Use Sparingly. Cassandra's forced key structure means this will perform fairly poorly
\r
244 * @throws DAOException
\r
246 public Result<List<Data>> readByRole(AuthzTrans trans, String role) {
\r
247 return psByRole.read(trans, R_TEXT + " by Role " + role, new Object[]{role});
\r
251 * Direct Lookup of User Role
\r
252 * Don't forget to check for Expiration
\r
254 public Result<List<Data>> readByUserRole(AuthzTrans trans, String user, String role) {
\r
255 return psUserInRole.read(trans, R_TEXT + " by User " + user + " and Role " + role, new Object[]{user,role});
\r
260 * Log Modification statements to History
\r
261 * @param modified which CRUD action was done
\r
262 * @param data entity data that needs a log entry
\r
263 * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data
\r
266 protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
\r
267 boolean memo = override.length>0 && override[0]!=null;
\r
268 boolean subject = override.length>1 && override[1]!=null;
\r
270 HistoryDAO.Data hd = HistoryDAO.newInitedData();
\r
271 HistoryDAO.Data hdRole = HistoryDAO.newInitedData();
\r
273 hd.user = hdRole.user = trans.user();
\r
274 hd.action = modified.name();
\r
275 // Modifying User/Role is an Update to Role, not a Create. JG, 07-14-2015
\r
276 hdRole.action = CRUD.update.name();
\r
278 hdRole.target = RoleDAO.TABLE;
\r
279 hd.subject = subject?override[1] : (data.user + '|'+data.role);
\r
280 hdRole.subject = data.role;
\r
283 hd.memo = hdRole.memo = memo
\r
284 ? String.format("%s by %s", override[0], hd.user)
\r
285 : String.format("%s added to %s",data.user,data.role);
\r
288 hd.memo = hdRole.memo = memo
\r
289 ? String.format("%s by %s", override[0], hd.user)
\r
290 : String.format("%s - %s was updated",data.user,data.role);
\r
293 hd.memo = hdRole.memo = memo
\r
294 ? String.format("%s by %s", override[0], hd.user)
\r
295 : String.format("%s removed from %s",data.user,data.role);
\r
297 hd.reconstruct = hdRole.reconstruct = data.bytify();
\r
298 } catch (IOException e) {
\r
299 trans.warn().log(e,"Deleted UserRole could not be serialized");
\r
303 hd.memo = hdRole.memo = memo
\r
304 ? String.format("%s by %s", override[0], hd.user)
\r
308 if(historyDAO.create(trans, hd).status!=Status.OK) {
\r
309 trans.error().log("Cannot log to History");
\r
312 if(historyDAO.create(trans, hdRole).status!=Status.OK) {
\r
313 trans.error().log("Cannot log to History");
\r
315 // uses User as Segment
\r
316 if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {
\r
317 trans.error().log("Cannot touch CacheInfo");
\r