1 /*******************************************************************************
\r
2 * ============LICENSE_START====================================================
\r
4 * * ===========================================================================
\r
5 * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
\r
6 * * ===========================================================================
\r
7 * * Licensed under the Apache License, Version 2.0 (the "License");
\r
8 * * you may not use this file except in compliance with the License.
\r
9 * * You may obtain a copy of the License at
\r
11 * * http://www.apache.org/licenses/LICENSE-2.0
\r
13 * * Unless required by applicable law or agreed to in writing, software
\r
14 * * distributed under the License is distributed on an "AS IS" BASIS,
\r
15 * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
\r
16 * * See the License for the specific language governing permissions and
\r
17 * * limitations under the License.
\r
18 * * ============LICENSE_END====================================================
\r
20 * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
\r
22 ******************************************************************************/
\r
23 package org.onap.aaf.dao.aaf.cass;
\r
25 import java.io.ByteArrayOutputStream;
\r
26 import java.io.DataInputStream;
\r
27 import java.io.DataOutputStream;
\r
28 import java.io.IOException;
\r
29 import java.math.BigInteger;
\r
30 import java.nio.ByteBuffer;
\r
31 import java.util.List;
\r
33 import org.onap.aaf.authz.env.AuthzTrans;
\r
34 import org.onap.aaf.authz.layer.Result;
\r
35 import org.onap.aaf.dao.Bytification;
\r
36 import org.onap.aaf.dao.CIDAO;
\r
37 import org.onap.aaf.dao.Cached;
\r
38 import org.onap.aaf.dao.CassDAOImpl;
\r
39 import org.onap.aaf.dao.Loader;
\r
40 import org.onap.aaf.dao.Streamer;
\r
42 import org.onap.aaf.inno.env.APIException;
\r
43 import com.datastax.driver.core.Cluster;
\r
44 import com.datastax.driver.core.Row;
\r
47 * CredDAO manages credentials.
\r
50 public class CertDAO extends CassDAOImpl<AuthzTrans,CertDAO.Data> {
\r
51 public static final String TABLE = "x509";
\r
52 public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
\r
54 private HistoryDAO historyDAO;
\r
55 private CIDAO<AuthzTrans> infoDAO;
\r
56 private PSInfo psX500,psID;
\r
58 public CertDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
\r
59 super(trans, CertDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
\r
63 public CertDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) throws APIException, IOException {
\r
64 super(trans, CertDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
\r
70 public static final int KEYLIMIT = 2;
\r
71 public static class Data extends CacheableData implements Bytification {
\r
74 public BigInteger serial;
\r
80 public int[] invalidate(Cached<?,?> cache) {
\r
82 seg(cache,ca,serial)
\r
87 public ByteBuffer bytify() throws IOException {
\r
88 ByteArrayOutputStream baos = new ByteArrayOutputStream();
\r
89 CertLoader.deflt.marshal(this,new DataOutputStream(baos));
\r
90 return ByteBuffer.wrap(baos.toByteArray());
\r
94 public void reconstitute(ByteBuffer bb) throws IOException {
\r
95 CertLoader.deflt.unmarshal(this, toDIS(bb));
\r
99 private static class CertLoader extends Loader<Data> implements Streamer<Data>{
\r
100 public static final int MAGIC=85102934;
\r
101 public static final int VERSION=1;
\r
102 public static final int BUFF_SIZE=48; // Note:
\r
104 public static final CertLoader deflt = new CertLoader(KEYLIMIT);
\r
105 public CertLoader(int keylimit) {
\r
110 public Data load(Data data, Row row) {
\r
111 data.ca = row.getString(0);
\r
112 ByteBuffer bb = row.getBytesUnsafe(1);
\r
113 byte[] bytes = new byte[bb.remaining()];
\r
115 data.serial = new BigInteger(bytes);
\r
116 data.id = row.getString(2);
\r
117 data.x500 = row.getString(3);
\r
118 data.x509 = row.getString(4);
\r
123 protected void key(Data data, int idx, Object[] obj) {
\r
124 obj[idx] = data.ca;
\r
125 obj[++idx] = ByteBuffer.wrap(data.serial.toByteArray());
\r
129 protected void body(Data data, int _idx, Object[] obj) {
\r
132 obj[idx] = data.id;
\r
133 obj[++idx] = data.x500;
\r
134 obj[++idx] = data.x509;
\r
140 public void marshal(Data data, DataOutputStream os) throws IOException {
\r
141 writeHeader(os,MAGIC,VERSION);
\r
142 writeString(os, data.id);
\r
143 writeString(os, data.x500);
\r
144 writeString(os, data.x509);
\r
145 writeString(os, data.ca);
\r
146 if(data.serial==null) {
\r
149 byte[] dsba = data.serial.toByteArray();
\r
150 int l = dsba.length;
\r
152 os.write(dsba,0,l);
\r
157 public void unmarshal(Data data, DataInputStream is) throws IOException {
\r
158 /*int version = */readHeader(is,MAGIC,VERSION);
\r
159 // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
\r
160 byte[] buff = new byte[BUFF_SIZE];
\r
161 data.id = readString(is,buff);
\r
162 data.x500 = readString(is,buff);
\r
163 data.x509 = readString(is,buff);
\r
164 data.ca = readString(is,buff);
\r
165 int i = is.readInt();
\r
169 byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads
\r
171 data.serial = new BigInteger(bytes);
\r
176 public Result<List<CertDAO.Data>> read(AuthzTrans trans, Object ... key) {
\r
177 // Translate BigInteger to Byte array for lookup
\r
178 return super.read(trans, key[0],ByteBuffer.wrap(((BigInteger)key[1]).toByteArray()));
\r
181 private void init(AuthzTrans trans) throws APIException, IOException {
\r
183 if(historyDAO==null) {
\r
184 historyDAO = new HistoryDAO(trans,this);
\r
186 if(infoDAO==null) {
\r
187 infoDAO = new CacheInfoDAO(trans,this);
\r
190 String[] helpers = setCRUD(trans, TABLE, Data.class, CertLoader.deflt);
\r
192 psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
\r
193 " WHERE id = ?", CertLoader.deflt,readConsistency);
\r
195 psX500 = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
\r
196 " WHERE x500 = ?", CertLoader.deflt,readConsistency);
\r
200 public Result<List<Data>> readX500(AuthzTrans trans, String x500) {
\r
201 return psX500.read(trans, R_TEXT, new Object[]{x500});
\r
204 public Result<List<Data>> readID(AuthzTrans trans, String id) {
\r
205 return psID.read(trans, R_TEXT, new Object[]{id});
\r
209 * Log Modification statements to History
\r
211 * @param modified which CRUD action was done
\r
212 * @param data entity data that needs a log entry
\r
213 * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data
\r
216 protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
\r
217 boolean memo = override.length>0 && override[0]!=null;
\r
218 boolean subject = override.length>1 && override[1]!=null;
\r
220 HistoryDAO.Data hd = HistoryDAO.newInitedData();
\r
221 hd.user = trans.user();
\r
222 hd.action = modified.name();
\r
224 hd.subject = subject?override[1]: data.id;
\r
226 ? String.format("%s by %s", override[0], hd.user)
\r
227 : (modified.name() + "d certificate info for " + data.id);
\r
229 if(modified==CRUD.delete) {
\r
231 hd.reconstruct = data.bytify();
\r
232 } catch (IOException e) {
\r
233 trans.error().log(e,"Could not serialize CertDAO.Data");
\r
237 if(historyDAO.create(trans, hd).status!=Status.OK) {
\r
238 trans.error().log("Cannot log to History");
\r
240 if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).status!=Status.OK) {
\r
241 trans.error().log("Cannot touch Cert");
\r