AT&T 2.0.19 Code drop, stage 5
[aaf/authz.git] / authz-cass / src / main / java / org / onap / aaf / dao / aaf / cass / CertDAO.java
diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CertDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CertDAO.java
deleted file mode 100644 (file)
index 4ed6a3e..0000000
+++ /dev/null
@@ -1,244 +0,0 @@
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- *  *      http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- *  * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import java.io.ByteArrayOutputStream;\r
-import java.io.DataInputStream;\r
-import java.io.DataOutputStream;\r
-import java.io.IOException;\r
-import java.math.BigInteger;\r
-import java.nio.ByteBuffer;\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.Bytification;\r
-import org.onap.aaf.dao.CIDAO;\r
-import org.onap.aaf.dao.Cached;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.Loader;\r
-import org.onap.aaf.dao.Streamer;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.Row;\r
-\r
-/**\r
- * CredDAO manages credentials. \r
- * Date: 7/19/13\r
- */\r
-public class CertDAO extends CassDAOImpl<AuthzTrans,CertDAO.Data> {\r
-    public static final String TABLE = "x509";\r
-    public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F\r
-    \r
-    private HistoryDAO historyDAO;\r
-       private CIDAO<AuthzTrans> infoDAO;\r
-       private PSInfo psX500,psID;\r
-       \r
-    public CertDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {\r
-        super(trans, CertDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
-        init(trans);\r
-    }\r
-\r
-    public CertDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) throws APIException, IOException {\r
-        super(trans, CertDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
-        historyDAO = hDao;\r
-        infoDAO = ciDao;\r
-        init(trans);\r
-    }\r
-    \r
-    public static final int KEYLIMIT = 2;\r
-       public static class Data extends CacheableData implements Bytification {\r
-       \r
-        public String                                  ca;\r
-               public BigInteger                               serial;\r
-        public String                          id;\r
-        public String                                  x500;\r
-        public String                                  x509;\r
-\r
-        @Override\r
-               public int[] invalidate(Cached<?,?> cache) {\r
-               return new int[] {\r
-                       seg(cache,ca,serial)\r
-               };\r
-               }\r
-        \r
-               @Override\r
-               public ByteBuffer bytify() throws IOException {\r
-                       ByteArrayOutputStream baos = new ByteArrayOutputStream();\r
-                       CertLoader.deflt.marshal(this,new DataOutputStream(baos));\r
-                       return ByteBuffer.wrap(baos.toByteArray());\r
-               }\r
-               \r
-               @Override\r
-               public void reconstitute(ByteBuffer bb) throws IOException {\r
-                       CertLoader.deflt.unmarshal(this, toDIS(bb));\r
-               }\r
-    }\r
-\r
-    private static class CertLoader extends Loader<Data> implements Streamer<Data>{\r
-               public static final int MAGIC=85102934;\r
-       public static final int VERSION=1;\r
-       public static final int BUFF_SIZE=48; // Note: \r
-\r
-       public static final CertLoader deflt = new CertLoader(KEYLIMIT);\r
-       public CertLoader(int keylimit) {\r
-            super(keylimit);\r
-        }\r
-\r
-       @Override\r
-        public Data load(Data data, Row row) {\r
-               data.ca = row.getString(0);\r
-            ByteBuffer bb = row.getBytesUnsafe(1);\r
-            byte[] bytes = new byte[bb.remaining()];\r
-            bb.get(bytes);\r
-            data.serial = new BigInteger(bytes);\r
-            data.id = row.getString(2);\r
-            data.x500 = row.getString(3);\r
-            data.x509 = row.getString(4);\r
-            return data;\r
-        }\r
-\r
-        @Override\r
-        protected void key(Data data, int idx, Object[] obj) {\r
-            obj[idx] = data.ca;\r
-            obj[++idx] = ByteBuffer.wrap(data.serial.toByteArray());\r
-        }\r
-\r
-        @Override\r
-        protected void body(Data data, int _idx, Object[] obj) {\r
-               int idx = _idx;\r
-\r
-            obj[idx] = data.id;\r
-            obj[++idx] = data.x500;\r
-            obj[++idx] = data.x509;\r
-\r
-            \r
-        }\r
-\r
-               @Override\r
-               public void marshal(Data data, DataOutputStream os) throws IOException {\r
-                       writeHeader(os,MAGIC,VERSION);\r
-                       writeString(os, data.id);\r
-                       writeString(os, data.x500);\r
-                       writeString(os, data.x509);\r
-                       writeString(os, data.ca);\r
-                       if(data.serial==null) {\r
-                               os.writeInt(-1);\r
-                       } else {\r
-                               byte[] dsba = data.serial.toByteArray();\r
-                               int l = dsba.length;\r
-                               os.writeInt(l);\r
-                               os.write(dsba,0,l);\r
-                       }\r
-               }\r
-\r
-               @Override\r
-               public void unmarshal(Data data, DataInputStream is) throws IOException {\r
-                       /*int version = */readHeader(is,MAGIC,VERSION);\r
-                       // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields\r
-                       byte[] buff = new byte[BUFF_SIZE];\r
-                       data.id = readString(is,buff);\r
-                       data.x500 = readString(is,buff);\r
-                       data.x509 = readString(is,buff);\r
-                       data.ca = readString(is,buff);\r
-                       int i = is.readInt();\r
-                       if(i<0) {\r
-                               data.serial=null;\r
-                       } else {\r
-                               byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads\r
-                               is.read(bytes);\r
-                               data.serial = new BigInteger(bytes);\r
-                       }\r
-               }\r
-    }\r
-    \r
-    public Result<List<CertDAO.Data>> read(AuthzTrans trans, Object ... key) {\r
-       // Translate BigInteger to Byte array for lookup\r
-       return super.read(trans, key[0],ByteBuffer.wrap(((BigInteger)key[1]).toByteArray()));\r
-    }\r
-\r
-    private void init(AuthzTrans trans) throws APIException, IOException {\r
-        // Set up sub-DAOs\r
-        if(historyDAO==null) {\r
-               historyDAO = new HistoryDAO(trans,this);\r
-        }\r
-               if(infoDAO==null) {\r
-                       infoDAO = new CacheInfoDAO(trans,this);\r
-               }\r
-\r
-               String[] helpers = setCRUD(trans, TABLE, Data.class, CertLoader.deflt);\r
-\r
-               psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +\r
-                               " WHERE id = ?", CertLoader.deflt,readConsistency);\r
-\r
-               psX500 = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +\r
-                               " WHERE x500 = ?", CertLoader.deflt,readConsistency);\r
-               \r
-    }\r
-    \r
-       public Result<List<Data>> readX500(AuthzTrans trans, String x500) {\r
-               return psX500.read(trans, R_TEXT, new Object[]{x500});\r
-       }\r
-\r
-       public Result<List<Data>> readID(AuthzTrans trans, String id) {\r
-               return psID.read(trans, R_TEXT, new Object[]{id});\r
-       }\r
-\r
-    /**\r
-     * Log Modification statements to History\r
-     *\r
-     * @param modified        which CRUD action was done\r
-     * @param data            entity data that needs a log entry\r
-     * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data\r
-     */\r
-    @Override\r
-    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {\r
-       boolean memo = override.length>0 && override[0]!=null;\r
-       boolean subject = override.length>1 && override[1]!=null;\r
-\r
-        HistoryDAO.Data hd = HistoryDAO.newInitedData();\r
-        hd.user = trans.user();\r
-        hd.action = modified.name();\r
-        hd.target = TABLE;\r
-        hd.subject = subject?override[1]: data.id;\r
-        hd.memo = memo\r
-                ? String.format("%s by %s", override[0], hd.user)\r
-                : (modified.name() + "d certificate info for " + data.id);\r
-        // Detail?\r
-               if(modified==CRUD.delete) {\r
-                               try {\r
-                                       hd.reconstruct = data.bytify();\r
-                               } catch (IOException e) {\r
-                                       trans.error().log(e,"Could not serialize CertDAO.Data");\r
-                               }\r
-                       }\r
-\r
-        if(historyDAO.create(trans, hd).status!=Status.OK) {\r
-               trans.error().log("Cannot log to History");\r
-        }\r
-        if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).status!=Status.OK) {\r
-               trans.error().log("Cannot touch Cert");\r
-        }\r
-    }\r
-}\r