Update project structure to org.onap.aaf
[aaf/authz.git] / authz-cass / src / main / java / org / onap / aaf / dao / aaf / cass / CredDAO.java
diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CredDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CredDAO.java
new file mode 100644 (file)
index 0000000..dad5fdb
--- /dev/null
@@ -0,0 +1,258 @@
+/*******************************************************************************\r
+ * ============LICENSE_START====================================================\r
+ * * org.onap.aaf\r
+ * * ===========================================================================\r
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
+ * * ===========================================================================\r
+ * * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * * you may not use this file except in compliance with the License.\r
+ * * You may obtain a copy of the License at\r
+ * * \r
+ *  *      http://www.apache.org/licenses/LICENSE-2.0\r
+ * * \r
+ *  * Unless required by applicable law or agreed to in writing, software\r
+ * * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * * See the License for the specific language governing permissions and\r
+ * * limitations under the License.\r
+ * * ============LICENSE_END====================================================\r
+ * *\r
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
+ * *\r
+ ******************************************************************************/\r
+package org.onap.aaf.dao.aaf.cass;\r
+\r
+import java.io.ByteArrayOutputStream;\r
+import java.io.DataInputStream;\r
+import java.io.DataOutputStream;\r
+import java.io.IOException;\r
+import java.nio.ByteBuffer;\r
+import java.util.Date;\r
+import java.util.List;\r
+\r
+import org.onap.aaf.authz.env.AuthzTrans;\r
+import org.onap.aaf.authz.layer.Result;\r
+import org.onap.aaf.dao.Bytification;\r
+import org.onap.aaf.dao.CIDAO;\r
+import org.onap.aaf.dao.Cached;\r
+import org.onap.aaf.dao.CassDAOImpl;\r
+import org.onap.aaf.dao.Loader;\r
+import org.onap.aaf.dao.Streamer;\r
+\r
+import org.onap.aaf.inno.env.APIException;\r
+import org.onap.aaf.inno.env.util.Chrono;\r
+import com.datastax.driver.core.Cluster;\r
+import com.datastax.driver.core.Row;\r
+\r
+/**\r
+ * CredDAO manages credentials. \r
+ * Date: 7/19/13\r
+ */\r
+public class CredDAO extends CassDAOImpl<AuthzTrans,CredDAO.Data> {\r
+    public static final String TABLE = "cred";\r
+    public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F\r
+       public static final int RAW = -1;\r
+    public static final int BASIC_AUTH = 1;\r
+    public static final int BASIC_AUTH_SHA256 = 2;\r
+    public static final int CERT_SHA256_RSA =200;\r
+    \r
+    private HistoryDAO historyDAO;\r
+       private CIDAO<AuthzTrans> infoDAO;\r
+       private PSInfo psNS;\r
+       private PSInfo psID;\r
+       \r
+    public CredDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {\r
+        super(trans, CredDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
+        init(trans);\r
+    }\r
+\r
+    public CredDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) throws APIException, IOException {\r
+        super(trans, CredDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
+        historyDAO = hDao;\r
+        infoDAO = ciDao;\r
+        init(trans);\r
+    }\r
+\r
+    public static final int KEYLIMIT = 3;\r
+       public static class Data extends CacheableData implements Bytification {\r
+       \r
+               public String                           id;\r
+        public Integer                         type;\r
+        public Date                                    expires;\r
+        public Integer                                 other;\r
+               public String                                   ns;\r
+               public String                                   notes;\r
+        public ByteBuffer                              cred;  //   this is a blob in cassandra\r
+\r
+\r
+        @Override\r
+               public int[] invalidate(Cached<?,?> cache) {\r
+               return new int[] {\r
+                       seg(cache,id) // cache is for all entities\r
+               };\r
+               }\r
+        \r
+               @Override\r
+               public ByteBuffer bytify() throws IOException {\r
+                       ByteArrayOutputStream baos = new ByteArrayOutputStream();\r
+                       CredLoader.deflt.marshal(this,new DataOutputStream(baos));\r
+                       return ByteBuffer.wrap(baos.toByteArray());\r
+               }\r
+               \r
+               @Override\r
+               public void reconstitute(ByteBuffer bb) throws IOException {\r
+                       CredLoader.deflt.unmarshal(this, toDIS(bb));\r
+               }\r
+\r
+               public String toString() {\r
+                       return id + ' ' + type + ' ' + Chrono.dateTime(expires);\r
+               }\r
+    }\r
+\r
+    private static class CredLoader extends Loader<Data> implements Streamer<Data>{\r
+               public static final int MAGIC=153323443;\r
+       public static final int VERSION=1;\r
+       public static final int BUFF_SIZE=48; // Note: \r
+\r
+       public static final CredLoader deflt = new CredLoader(KEYLIMIT);\r
+       public CredLoader(int keylimit) {\r
+            super(keylimit);\r
+        }\r
+\r
+       @Override\r
+        public Data load(Data data, Row row) {\r
+            data.id = row.getString(0);\r
+            data.type = row.getInt(1);    // NOTE: in datastax driver,  If the int value is NULL, 0 is returned!\r
+            data.expires = row.getDate(2);\r
+            data.other = row.getInt(3);\r
+            data.ns = row.getString(4);     \r
+            data.notes = row.getString(5);\r
+            data.cred = row.getBytesUnsafe(6);            \r
+            return data;\r
+        }\r
+\r
+        @Override\r
+        protected void key(Data data, int _idx, Object[] obj) {\r
+           int idx = _idx;\r
+\r
+            obj[idx] = data.id;\r
+            obj[++idx] = data.type;\r
+            obj[++idx] = data.expires;\r
+        }\r
+\r
+        @Override\r
+        protected void body(Data data, int idx, Object[] obj) {\r
+            int i;\r
+            obj[i=idx] = data.other;\r
+            obj[++i] = data.ns;\r
+            obj[++i] = data.notes;\r
+            obj[++i] = data.cred;\r
+        }\r
+\r
+               @Override\r
+               public void marshal(Data data, DataOutputStream os) throws IOException {\r
+                       writeHeader(os,MAGIC,VERSION);\r
+                       writeString(os, data.id);\r
+                       os.writeInt(data.type); \r
+                       os.writeLong(data.expires==null?-1:data.expires.getTime());\r
+                       os.writeInt(data.other==null?0:data.other);\r
+                       writeString(os, data.ns);\r
+                       writeString(os, data.notes);\r
+                       if(data.cred==null) {\r
+                               os.writeInt(-1);\r
+                       } else {\r
+                               int l = data.cred.limit()-data.cred.position();\r
+                               os.writeInt(l);\r
+                               os.write(data.cred.array(),data.cred.position(),l);\r
+                       }\r
+               }\r
+\r
+               @Override\r
+               public void unmarshal(Data data, DataInputStream is) throws IOException {\r
+                       /*int version = */readHeader(is,MAGIC,VERSION);\r
+                       // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields\r
+                       byte[] buff = new byte[BUFF_SIZE];\r
+                       data.id = readString(is,buff);\r
+                       data.type = is.readInt();\r
+                       \r
+                       long l = is.readLong();\r
+                       data.expires = l<0?null:new Date(l);\r
+                       data.other = is.readInt();\r
+                       data.ns = readString(is,buff);\r
+                       data.notes = readString(is,buff);\r
+                       \r
+                       int i = is.readInt();\r
+                       if(i<0) {\r
+                               data.cred=null;\r
+                       } else {\r
+                               byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads\r
+                               is.read(bytes);\r
+                               data.cred = ByteBuffer.wrap(bytes);\r
+                       }\r
+               }\r
+    }\r
+\r
+    private void init(AuthzTrans trans) throws APIException, IOException {\r
+        // Set up sub-DAOs\r
+        if(historyDAO==null) {\r
+               historyDAO = new HistoryDAO(trans,this);\r
+        }\r
+               if(infoDAO==null) {\r
+                       infoDAO = new CacheInfoDAO(trans,this);\r
+               }\r
+               \r
+\r
+               String[] helpers = setCRUD(trans, TABLE, Data.class, CredLoader.deflt);\r
+               \r
+               psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +\r
+                               " WHERE ns = ?", CredLoader.deflt,readConsistency);\r
+               \r
+               psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +\r
+                               " WHERE id = ?", CredLoader.deflt,readConsistency);\r
+    }\r
+    \r
+       public Result<List<Data>> readNS(AuthzTrans trans, String ns) {\r
+               return psNS.read(trans, R_TEXT, new Object[]{ns});\r
+       }\r
+       \r
+       public Result<List<Data>> readID(AuthzTrans trans, String id) {\r
+               return psID.read(trans, R_TEXT, new Object[]{id});\r
+       }\r
+       \r
+    /**\r
+     * Log Modification statements to History\r
+     *\r
+     * @param modified        which CRUD action was done\r
+     * @param data            entity data that needs a log entry\r
+     * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data\r
+     */\r
+    @Override\r
+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {\r
+       boolean memo = override.length>0 && override[0]!=null;\r
+       boolean subject = override.length>1 && override[1]!=null;\r
+\r
+        HistoryDAO.Data hd = HistoryDAO.newInitedData();\r
+        hd.user = trans.user();\r
+        hd.action = modified.name();\r
+        hd.target = TABLE;\r
+        hd.subject = subject?override[1]: data.id;\r
+        hd.memo = memo\r
+                ? String.format("%s by %s", override[0], hd.user)\r
+                : (modified.name() + "d credential for " + data.id);\r
+        // Detail?\r
+               if(modified==CRUD.delete) {\r
+                               try {\r
+                                       hd.reconstruct = data.bytify();\r
+                               } catch (IOException e) {\r
+                                       trans.error().log(e,"Could not serialize CredDAO.Data");\r
+                               }\r
+                       }\r
+\r
+        if(historyDAO.create(trans, hd).status!=Status.OK) {\r
+               trans.error().log("Cannot log to History");\r
+        }\r
+        if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).status!=Status.OK) {\r
+               trans.error().log("Cannot touch Cred");\r
+        }\r
+    }\r
+}\r