Update aaf client module
[aaf/authz.git] / authz-cass / src / main / java / com / att / dao / aaf / cass / DelegateDAO.java
diff --git a/authz-cass/src/main/java/com/att/dao/aaf/cass/DelegateDAO.java b/authz-cass/src/main/java/com/att/dao/aaf/cass/DelegateDAO.java
deleted file mode 100644 (file)
index 6d6534e..0000000
+++ /dev/null
@@ -1,138 +0,0 @@
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- *  *      http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- *  * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package com.att.dao.aaf.cass;\r
-\r
-import java.io.ByteArrayOutputStream;\r
-import java.io.DataInputStream;\r
-import java.io.DataOutputStream;\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.util.Date;\r
-import java.util.List;\r
-\r
-import com.att.authz.env.AuthzTrans;\r
-import com.att.authz.layer.Result;\r
-import com.att.dao.AbsCassDAO;\r
-import com.att.dao.Bytification;\r
-import com.att.dao.CassDAOImpl;\r
-import com.att.dao.Loader;\r
-import com.att.dao.Streamer;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.Row;\r
-\r
-public class DelegateDAO extends CassDAOImpl<AuthzTrans, DelegateDAO.Data> {\r
-\r
-       public static final String TABLE = "delegate";\r
-       private PSInfo psByDelegate;\r
-       \r
-       public DelegateDAO(AuthzTrans trans, Cluster cluster, String keyspace) {\r
-               super(trans, DelegateDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
-               init(trans);\r
-       }\r
-\r
-       public DelegateDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {\r
-               super(trans, DelegateDAO.class.getSimpleName(),aDao,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
-               init(trans);\r
-       }\r
-       \r
-       private static final int KEYLIMIT = 1;\r
-       public static class Data implements Bytification {\r
-               public String user;\r
-               public String delegate;\r
-               public Date expires;\r
-\r
-               @Override\r
-               public ByteBuffer bytify() throws IOException {\r
-                       ByteArrayOutputStream baos = new ByteArrayOutputStream();\r
-                       DelegateLoader.dflt.marshal(this,new DataOutputStream(baos));\r
-                       return ByteBuffer.wrap(baos.toByteArray());\r
-               }\r
-               \r
-               @Override\r
-               public void reconstitute(ByteBuffer bb) throws IOException {\r
-                       DelegateLoader.dflt.unmarshal(this, toDIS(bb));\r
-               }\r
-       }\r
-       \r
-       private static class DelegateLoader extends Loader<Data> implements Streamer<Data>{\r
-               public static final int MAGIC=0xD823ACF2;\r
-       public static final int VERSION=1;\r
-       public static final int BUFF_SIZE=48;\r
-\r
-               public static final DelegateLoader dflt = new DelegateLoader(KEYLIMIT);\r
-\r
-               public DelegateLoader(int keylimit) {\r
-                       super(keylimit);\r
-               }\r
-               \r
-               @Override\r
-               public Data load(Data data, Row row) {\r
-                       data.user = row.getString(0);\r
-                       data.delegate = row.getString(1);\r
-                       data.expires = row.getDate(2);\r
-                       return data;\r
-               }\r
-\r
-               @Override\r
-               protected void key(Data data, int idx, Object[] obj) {\r
-                       obj[idx]=data.user;\r
-               }\r
-\r
-               @Override\r
-               protected void body(Data data, int _idx, Object[] obj) {\r
-                       int idx = _idx;\r
-\r
-                       obj[idx]=data.delegate;\r
-                       obj[++idx]=data.expires;\r
-               }\r
-\r
-               @Override\r
-               public void marshal(Data data, DataOutputStream os) throws IOException {\r
-                       writeHeader(os,MAGIC,VERSION);\r
-                       writeString(os, data.user);\r
-                       writeString(os, data.delegate);\r
-                       os.writeLong(data.expires.getTime());\r
-               }\r
-\r
-               @Override\r
-               public void unmarshal(Data data, DataInputStream is) throws IOException {\r
-                       /*int version = */readHeader(is,MAGIC,VERSION);\r
-                       // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields\r
-                       byte[] buff = new byte[BUFF_SIZE];\r
-                       data.user = readString(is, buff);\r
-                       data.delegate = readString(is,buff);\r
-                       data.expires = new Date(is.readLong());\r
-               }\r
-       }       \r
-       \r
-       private void init(AuthzTrans trans) {\r
-               String[] helpers = setCRUD(trans, TABLE, Data.class, DelegateLoader.dflt);\r
-               psByDelegate = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +\r
-                               " WHERE delegate = ?", new DelegateLoader(1),readConsistency);\r
-\r
-       }\r
-\r
-       public Result<List<DelegateDAO.Data>> readByDelegate(AuthzTrans trans, String delegate) {\r
-               return psByDelegate.read(trans, R_TEXT, new Object[]{delegate});\r
-       }\r
-}\r