1 /*******************************************************************************
\r
2 * ============LICENSE_START====================================================
\r
4 * * ===========================================================================
\r
5 * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
\r
6 * * ===========================================================================
\r
7 * * Licensed under the Apache License, Version 2.0 (the "License");
\r
8 * * you may not use this file except in compliance with the License.
\r
9 * * You may obtain a copy of the License at
\r
11 * * http://www.apache.org/licenses/LICENSE-2.0
\r
13 * * Unless required by applicable law or agreed to in writing, software
\r
14 * * distributed under the License is distributed on an "AS IS" BASIS,
\r
15 * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
\r
16 * * See the License for the specific language governing permissions and
\r
17 * * limitations under the License.
\r
18 * * ============LICENSE_END====================================================
\r
20 * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
\r
22 ******************************************************************************/
\r
23 package com.att.dao.aaf.cass;
\r
25 import java.io.ByteArrayOutputStream;
\r
26 import java.io.DataInputStream;
\r
27 import java.io.DataOutputStream;
\r
28 import java.io.IOException;
\r
29 import java.nio.ByteBuffer;
\r
30 import java.util.HashSet;
\r
31 import java.util.List;
\r
32 import java.util.Set;
\r
34 import com.att.authz.env.AuthzTrans;
\r
35 import com.att.authz.layer.Result;
\r
36 import com.att.dao.Bytification;
\r
37 import com.att.dao.Cached;
\r
38 import com.att.dao.CassAccess;
\r
39 import com.att.dao.CassDAOImpl;
\r
40 import com.att.dao.Loader;
\r
41 import com.att.dao.Streamer;
\r
42 import com.att.dao.aaf.hl.Question;
\r
43 import com.att.inno.env.APIException;
\r
44 import com.att.inno.env.util.Split;
\r
45 import com.datastax.driver.core.Cluster;
\r
46 import com.datastax.driver.core.Row;
\r
47 import com.datastax.driver.core.exceptions.DriverException;
\r
49 public class RoleDAO extends CassDAOImpl<AuthzTrans,RoleDAO.Data> {
\r
51 public static final String TABLE = "role";
\r
52 public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
\r
54 private final HistoryDAO historyDAO;
\r
55 private final CacheInfoDAO infoDAO;
\r
57 private PSInfo psChildren, psNS, psName;
\r
59 public RoleDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
\r
60 super(trans, RoleDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
\r
62 historyDAO = new HistoryDAO(trans, this);
\r
63 infoDAO = new CacheInfoDAO(trans,this);
\r
67 public RoleDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {
\r
68 super(trans, RoleDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
\r
75 //////////////////////////////////////////
\r
76 // Data Definition, matches Cassandra DM
\r
77 //////////////////////////////////////////
\r
78 private static final int KEYLIMIT = 2;
\r
80 * Data class that matches the Cassandra Table "role"
\r
82 public static class Data extends CacheableData implements Bytification {
\r
85 public Set<String> perms;
\r
86 public String description;
\r
88 ////////////////////////////////////////
\r
90 public Set<String> perms(boolean mutable) {
\r
91 if (perms == null) {
\r
92 perms = new HashSet<String>();
\r
93 } else if (mutable && !(perms instanceof HashSet)) {
\r
94 perms = new HashSet<String>(perms);
\r
99 public static Data create(NsDAO.Data ns, String name) {
\r
100 NsSplit nss = new NsSplit(ns,name);
\r
101 RoleDAO.Data rv = new Data();
\r
107 public String fullName() {
\r
108 return ns + '.' + name;
\r
111 public String encode() {
\r
112 return ns + '|' + name;
\r
116 * Decode Perm String, including breaking into appropriate Namespace
\r
123 public static Result<Data> decode(AuthzTrans trans, Question q, String r) {
\r
124 String[] ss = Split.splitTrim('|', r,2);
\r
125 Data data = new Data();
\r
126 if(ss[1]==null) { // older 1 part encoding must be evaluated for NS
\r
127 Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);
\r
129 return Result.err(nss);
\r
131 data.ns=nss.value.ns;
\r
132 data.name=nss.value.name;
\r
133 } else { // new 4 part encoding
\r
137 return Result.ok(data);
\r
141 * Decode from UserRole Data
\r
145 public static RoleDAO.Data decode(UserRoleDAO.Data urdd) {
\r
146 RoleDAO.Data rd = new RoleDAO.Data();
\r
148 rd.name = urdd.rname;
\r
154 * Decode Perm String, including breaking into appropriate Namespace
\r
161 public static Result<String[]> decodeToArray(AuthzTrans trans, Question q, String p) {
\r
162 String[] ss = Split.splitTrim('|', p,2);
\r
163 if(ss[1]==null) { // older 1 part encoding must be evaluated for NS
\r
164 Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);
\r
166 return Result.err(nss);
\r
168 ss[0] = nss.value.ns;
\r
169 ss[1] = nss.value.name;
\r
171 return Result.ok(ss);
\r
175 public int[] invalidate(Cached<?,?> cache) {
\r
177 seg(cache,ns,name),
\r
184 public ByteBuffer bytify() throws IOException {
\r
185 ByteArrayOutputStream baos = new ByteArrayOutputStream();
\r
186 RoleLoader.deflt.marshal(this,new DataOutputStream(baos));
\r
187 return ByteBuffer.wrap(baos.toByteArray());
\r
191 public void reconstitute(ByteBuffer bb) throws IOException {
\r
192 RoleLoader.deflt.unmarshal(this, toDIS(bb));
\r
196 public String toString() {
\r
197 return ns + '.' + name;
\r
201 private static class RoleLoader extends Loader<Data> implements Streamer<Data> {
\r
202 public static final int MAGIC=923577343;
\r
203 public static final int VERSION=1;
\r
204 public static final int BUFF_SIZE=96;
\r
206 public static final RoleLoader deflt = new RoleLoader(KEYLIMIT);
\r
208 public RoleLoader(int keylimit) {
\r
213 public Data load(Data data, Row row) {
\r
214 // Int more efficient
\r
215 data.ns = row.getString(0);
\r
216 data.name = row.getString(1);
\r
217 data.perms = row.getSet(2,String.class);
\r
218 data.description = row.getString(3);
\r
223 protected void key(Data data, int _idx, Object[] obj) {
\r
226 obj[++idx]=data.name;
\r
230 protected void body(Data data, int _idx, Object[] obj) {
\r
232 obj[idx]=data.perms;
\r
233 obj[++idx]=data.description;
\r
237 public void marshal(Data data, DataOutputStream os) throws IOException {
\r
238 writeHeader(os,MAGIC,VERSION);
\r
239 writeString(os, data.ns);
\r
240 writeString(os, data.name);
\r
241 writeStringSet(os,data.perms);
\r
242 writeString(os, data.description);
\r
246 public void unmarshal(Data data, DataInputStream is) throws IOException {
\r
247 /*int version = */readHeader(is,MAGIC,VERSION);
\r
248 // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
\r
249 byte[] buff = new byte[BUFF_SIZE];
\r
250 data.ns = readString(is, buff);
\r
251 data.name = readString(is,buff);
\r
252 data.perms = readStringSet(is,buff);
\r
253 data.description = readString(is,buff);
\r
257 private void init(AuthzTrans trans) {
\r
258 String[] helpers = setCRUD(trans, TABLE, Data.class, RoleLoader.deflt);
\r
260 psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
\r
261 " WHERE ns = ?", new RoleLoader(1),readConsistency);
\r
263 psName = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
\r
264 " WHERE name = ?", new RoleLoader(1),readConsistency);
\r
266 psChildren = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
\r
267 " WHERE ns=? AND name > ? AND name < ?",
\r
268 new RoleLoader(3) {
\r
270 protected void key(Data data, int _idx, Object[] obj) {
\r
272 obj[idx] = data.ns;
\r
273 obj[++idx]=data.name + DOT;
\r
274 obj[++idx]=data.name + DOT_PLUS_ONE;
\r
276 },readConsistency);
\r
280 public Result<List<Data>> readNS(AuthzTrans trans, String ns) {
\r
281 return psNS.read(trans, R_TEXT + " NS " + ns, new Object[]{ns});
\r
284 public Result<List<Data>> readName(AuthzTrans trans, String name) {
\r
285 return psName.read(trans, R_TEXT + name, new Object[]{name});
\r
288 public Result<List<Data>> readChildren(AuthzTrans trans, String ns, String role) {
\r
289 if(role.length()==0 || "*".equals(role)) {
\r
290 return psChildren.read(trans, R_TEXT, new Object[]{ns, FIRST_CHAR, LAST_CHAR});
\r
292 return psChildren.read(trans, R_TEXT, new Object[]{ns, role+DOT, role+DOT_PLUS_ONE});
\r
297 * Add a single Permission to the Role's Permission Collection
\r
306 public Result<Void> addPerm(AuthzTrans trans, RoleDAO.Data role, PermDAO.Data perm) {
\r
307 // Note: Prepared Statements for Collection updates aren't supported
\r
308 String pencode = perm.encode();
\r
310 getSession(trans).execute(UPDATE_SP + TABLE + " SET perms = perms + {'" +
\r
311 pencode + "'} WHERE " +
\r
312 "ns = '" + role.ns + "' AND name = '" + role.name + "';");
\r
313 } catch (DriverException | APIException | IOException e) {
\r
314 reportPerhapsReset(trans,e);
\r
315 return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
\r
318 wasModified(trans, CRUD.update, role, "Added permission " + pencode + " to role " + role.fullName());
\r
319 return Result.ok();
\r
323 * Remove a single Permission from the Role's Permission Collection
\r
331 public Result<Void> delPerm(AuthzTrans trans, RoleDAO.Data role, PermDAO.Data perm) {
\r
332 // Note: Prepared Statements for Collection updates aren't supported
\r
334 String pencode = perm.encode();
\r
338 getSession(trans).execute(UPDATE_SP + TABLE + " SET perms = perms - {'" +
\r
339 pencode + "'} WHERE " +
\r
340 "ns = '" + role.ns + "' AND name = '" + role.name + "';");
\r
341 } catch (DriverException | APIException | IOException e) {
\r
342 reportPerhapsReset(trans,e);
\r
343 return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
\r
346 //TODO how can we tell when it doesn't?
\r
347 wasModified(trans, CRUD.update, role, "Removed permission " + pencode + " from role " + role.fullName() );
\r
348 return Result.ok();
\r
352 * Add description to role
\r
357 * @param description
\r
360 public Result<Void> addDescription(AuthzTrans trans, String ns, String name, String description) {
\r
362 getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '"
\r
363 + description + "' WHERE ns = '" + ns + "' AND name = '" + name + "';");
\r
364 } catch (DriverException | APIException | IOException e) {
\r
365 reportPerhapsReset(trans,e);
\r
366 return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
\r
369 Data data = new Data();
\r
372 wasModified(trans, CRUD.update, data, "Added description " + description + " to role " + data.fullName(), null );
\r
373 return Result.ok();
\r
378 * Log Modification statements to History
\r
379 * @param modified which CRUD action was done
\r
380 * @param data entity data that needs a log entry
\r
381 * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data
\r
384 protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
\r
385 boolean memo = override.length>0 && override[0]!=null;
\r
386 boolean subject = override.length>1 && override[1]!=null;
\r
388 HistoryDAO.Data hd = HistoryDAO.newInitedData();
\r
389 hd.user = trans.user();
\r
390 hd.action = modified.name();
\r
392 hd.subject = subject ? override[1] : data.fullName();
\r
393 hd.memo = memo ? override[0] : (data.fullName() + " was " + modified.name() + 'd' );
\r
394 if(modified==CRUD.delete) {
\r
396 hd.reconstruct = data.bytify();
\r
397 } catch (IOException e) {
\r
398 trans.error().log(e,"Could not serialize RoleDAO.Data");
\r
402 if(historyDAO.create(trans, hd).status!=Status.OK) {
\r
403 trans.error().log("Cannot log to History");
\r
405 if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {
\r
406 trans.error().log("Cannot touch CacheInfo for Role");
\r