1 /*******************************************************************************
\r
2 * ============LICENSE_START====================================================
\r
4 * * ===========================================================================
\r
5 * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
\r
6 * * Copyright © 2017 Amdocs
\r
7 * * ===========================================================================
\r
8 * * Licensed under the Apache License, Version 2.0 (the "License");
\r
9 * * you may not use this file except in compliance with the License.
\r
10 * * You may obtain a copy of the License at
\r
12 * * http://www.apache.org/licenses/LICENSE-2.0
\r
14 * * Unless required by applicable law or agreed to in writing, software
\r
15 * * distributed under the License is distributed on an "AS IS" BASIS,
\r
16 * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
\r
17 * * See the License for the specific language governing permissions and
\r
18 * * limitations under the License.
\r
19 * * ============LICENSE_END====================================================
\r
21 * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
\r
23 ******************************************************************************/
\r
24 package com.att.dao.aaf.cass;
\r
26 import java.io.ByteArrayOutputStream;
\r
27 import java.io.DataInputStream;
\r
28 import java.io.DataOutputStream;
\r
29 import java.io.IOException;
\r
30 import java.nio.ByteBuffer;
\r
31 import java.util.Date;
\r
32 import java.util.List;
\r
34 import com.att.authz.env.AuthzTrans;
\r
35 import com.att.authz.layer.Result;
\r
36 import com.att.dao.Bytification;
\r
37 import com.att.dao.CIDAO;
\r
38 import com.att.dao.Cached;
\r
39 import com.att.dao.CassDAOImpl;
\r
40 import com.att.dao.Loader;
\r
41 import com.att.dao.Streamer;
\r
42 import com.att.inno.env.APIException;
\r
43 import com.att.inno.env.util.Chrono;
\r
44 import com.datastax.driver.core.Cluster;
\r
45 import com.datastax.driver.core.Row;
\r
48 * CredDAO manages credentials.
\r
51 public class CredDAO extends CassDAOImpl<AuthzTrans,CredDAO.Data> {
\r
52 public static final String TABLE = "cred";
\r
53 public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
\r
54 public static final int RAW = -1;
\r
55 public static final int BASIC_AUTH = 1;
\r
56 public static final int BASIC_AUTH_SHA256 = 2;
\r
57 public static final int CERT_SHA256_RSA =200;
\r
59 private HistoryDAO historyDAO;
\r
60 private CIDAO<AuthzTrans> infoDAO;
\r
61 private PSInfo psNS;
\r
62 private PSInfo psID;
\r
64 public CredDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
\r
65 super(trans, CredDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
\r
69 public CredDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) throws APIException, IOException {
\r
70 super(trans, CredDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
\r
76 public static final int KEYLIMIT = 3;
\r
77 public static class Data extends CacheableData implements Bytification {
\r
80 public Integer type;
\r
81 public Date expires;
\r
82 public Integer other;
\r
84 public String notes;
\r
85 public ByteBuffer cred; // this is a blob in cassandra
\r
89 public int[] invalidate(Cached<?,?> cache) {
\r
91 seg(cache,id) // cache is for all entities
\r
96 public ByteBuffer bytify() throws IOException {
\r
97 ByteArrayOutputStream baos = new ByteArrayOutputStream();
\r
98 CredLoader.deflt.marshal(this,new DataOutputStream(baos));
\r
99 return ByteBuffer.wrap(baos.toByteArray());
\r
103 public void reconstitute(ByteBuffer bb) throws IOException {
\r
104 CredLoader.deflt.unmarshal(this, toDIS(bb));
\r
107 public String toString() {
\r
108 return id + ' ' + type + ' ' + Chrono.dateTime(expires);
\r
112 private static class CredLoader extends Loader<Data> implements Streamer<Data>{
\r
113 public static final int MAGIC=153323443;
\r
114 public static final int VERSION=1;
\r
115 public static final int BUFF_SIZE=48; // Note:
\r
117 public static final CredLoader deflt = new CredLoader(KEYLIMIT);
\r
118 public CredLoader(int keylimit) {
\r
123 public Data load(Data data, Row row) {
\r
124 data.id = row.getString(0);
\r
125 data.type = row.getInt(1); // NOTE: in datastax driver, If the int value is NULL, 0 is returned!
\r
126 data.expires = row.getDate(2);
\r
127 data.other = row.getInt(3);
\r
128 data.ns = row.getString(4);
\r
129 data.notes = row.getString(5);
\r
130 data.cred = row.getBytesUnsafe(6);
\r
135 protected void key(Data data, int _idx, Object[] obj) {
\r
138 obj[idx] = data.id;
\r
139 obj[++idx] = data.type;
\r
140 obj[++idx] = data.expires;
\r
144 protected void body(Data data, int idx, Object[] obj) {
\r
146 obj[i=idx] = data.other;
\r
147 obj[++i] = data.ns;
\r
148 obj[++i] = data.notes;
\r
149 obj[++i] = data.cred;
\r
153 public void marshal(Data data, DataOutputStream os) throws IOException {
\r
154 writeHeader(os,MAGIC,VERSION);
\r
155 writeString(os, data.id);
\r
156 os.writeInt(data.type);
\r
157 os.writeLong(data.expires==null?-1:data.expires.getTime());
\r
158 os.writeInt(data.other==null?0:data.other);
\r
159 writeString(os, data.ns);
\r
160 writeString(os, data.notes);
\r
161 if(data.cred==null) {
\r
164 int l = data.cred.limit()-data.cred.position();
\r
166 os.write(data.cred.array(),data.cred.position(),l);
\r
171 public void unmarshal(Data data, DataInputStream is) throws IOException {
\r
172 /*int version = */readHeader(is,MAGIC,VERSION);
\r
173 // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
\r
174 byte[] buff = new byte[BUFF_SIZE];
\r
175 data.id = readString(is,buff);
\r
176 data.type = is.readInt();
\r
178 long l = is.readLong();
\r
179 data.expires = l<0?null:new Date(l);
\r
180 data.other = is.readInt();
\r
181 data.ns = readString(is,buff);
\r
182 data.notes = readString(is,buff);
\r
184 int i = is.readInt();
\r
188 byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads
\r
190 data.cred = ByteBuffer.wrap(bytes);
\r
195 private void init(AuthzTrans trans) throws APIException, IOException {
\r
197 if(historyDAO==null) {
\r
198 historyDAO = new HistoryDAO(trans,this);
\r
200 if(infoDAO==null) {
\r
201 infoDAO = new CacheInfoDAO(trans,this);
\r
205 String[] helpers = setCRUD(trans, TABLE, Data.class, CredLoader.deflt);
\r
207 psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
\r
208 " WHERE ns = ?", CredLoader.deflt,readConsistency);
\r
210 psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
\r
211 " WHERE id = ?", CredLoader.deflt,readConsistency);
\r
214 public Result<List<Data>> readNS(AuthzTrans trans, String ns) {
\r
215 return psNS.read(trans, R_TEXT, new Object[]{ns});
\r
218 public Result<List<Data>> readID(AuthzTrans trans, String id) {
\r
219 return psID.read(trans, R_TEXT, new Object[]{id});
\r
223 * Log Modification statements to History
\r
225 * @param modified which CRUD action was done
\r
226 * @param data entity data that needs a log entry
\r
227 * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data
\r
230 protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
\r
231 boolean memo = override.length>0 && override[0]!=null;
\r
232 boolean subject = override.length>1 && override[1]!=null;
\r
234 HistoryDAO.Data hd = HistoryDAO.newInitedData();
\r
235 hd.user = trans.user();
\r
236 hd.action = modified.name();
\r
238 hd.subject = subject?override[1]: data.id;
\r
240 ? String.format("%s by %s", override[0], hd.user)
\r
241 : (modified.name() + "d credential for " + data.id);
\r
243 if(modified==CRUD.delete) {
\r
245 hd.reconstruct = data.bytify();
\r
246 } catch (IOException e) {
\r
247 trans.error().log(e,"Could not serialize CredDAO.Data");
\r
251 if(historyDAO.create(trans, hd).status!=Status.OK) {
\r
252 trans.error().log("Cannot log to History");
\r
254 if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).status!=Status.OK) {
\r
255 trans.error().log("Cannot touch Cred");
\r