+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>\r
-<!--\r
- ============LICENSE_START====================================================\r
- * org.onap.aaf\r
- * ===========================================================================\r
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * ===========================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- * \r
- * http://www.apache.org/licenses/LICENSE-2.0\r
- * \r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END====================================================\r
- *\r
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- *\r
--->\r
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\r
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">\r
- <modelVersion>4.0.0</modelVersion>\r
- <parent>\r
- <groupId>org.onap.aaf.authz</groupId>\r
- <artifactId>parent</artifactId>\r
- <version>1.0.1-SNAPSHOT</version>\r
- <relativePath>../pom.xml</relativePath>\r
- </parent>\r
- \r
- <artifactId>authz-cass</artifactId>\r
- <name>Authz Cass</name>\r
- <description>Cassandra DAOs for Authz</description>\r
- <packaging>jar</packaging>\r
- <url>https://github.com/att/AAF</url>\r
-\r
- <developers>\r
- <developer>\r
- <name>Jonathan Gathman</name>\r
- <email></email>\r
- <organization>ATT</organization>\r
- <organizationUrl></organizationUrl>\r
- </developer>\r
- </developers>\r
- <properties>\r
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\r
- <project.cadiVersion>1.0.0-SNAPSHOT</project.cadiVersion>\r
- \r
- <!-- SONAR -->\r
- <jacoco.version>0.7.7.201606060606</jacoco.version>\r
- <sonar.skip>true</sonar.skip>\r
- <sonar-jacoco-listeners.version>3.2</sonar-jacoco-listeners.version>\r
- <sonar.core.codeCoveragePlugin>jacoco</sonar.core.codeCoveragePlugin>\r
- <!-- Default Sonar configuration -->\r
- <sonar.jacoco.reportPath>target/code-coverage/jacoco-ut.exec</sonar.jacoco.reportPath>\r
- <sonar.jacoco.itReportPath>target/code-coverage/jacoco-it.exec</sonar.jacoco.itReportPath>\r
- <!-- Note: This list should match jacoco-maven-plugin's exclusion list below -->\r
- <sonar.exclusions>**/gen/**,**/generated-sources/**,**/yang-gen**,**/pax/**</sonar.exclusions>\r
- <nexusproxy>https://nexus.onap.org</nexusproxy>\r
- <snapshotNexusPath>/content/repositories/snapshots/</snapshotNexusPath>\r
- <releaseNexusPath>/content/repositories/releases/</releaseNexusPath>\r
- <stagingNexusPath>/content/repositories/staging/</stagingNexusPath>\r
- <sitePath>/content/sites/site/org/onap/aaf/authz/${project.artifactId}/${project.version}</sitePath>\r
- </properties>\r
- <dependencies>\r
- <dependency>\r
- <groupId>org.onap.aaf.authz</groupId>\r
- <artifactId>authz-core</artifactId>\r
- <version>${project.version}</version>\r
- </dependency>\r
-\r
- <dependency>\r
- <groupId>org.onap.aaf.cadi</groupId>\r
- <artifactId>cadi-aaf</artifactId>\r
- <version>${project.cadiVersion}</version>\r
- </dependency>\r
-\r
- <dependency>\r
- <groupId>com.datastax.cassandra</groupId>\r
- <artifactId>cassandra-driver-core</artifactId>\r
- <version>2.1.10</version>\r
- </dependency> \r
- \r
- <!-- Cassandra prefers Snappy and LZ4 libs for performance -->\r
- <dependency>\r
- <groupId>org.xerial.snappy</groupId>\r
- <artifactId>snappy-java</artifactId>\r
- <version>1.1.1-M1</version>\r
- </dependency>\r
- \r
- <dependency>\r
- <groupId>net.jpountz.lz4</groupId>\r
- <artifactId>lz4</artifactId>\r
- <version>1.2.0</version>\r
- </dependency>\r
- \r
- <dependency>\r
- <groupId>com.googlecode.jcsv</groupId>\r
- <artifactId>jcsv</artifactId>\r
- <version>1.4.0</version>\r
- </dependency>\r
- \r
- <dependency>\r
- <groupId>org.slf4j</groupId>\r
- <artifactId>slf4j-log4j12</artifactId>\r
- <scope>test</scope>\r
- </dependency>\r
- \r
- \r
- </dependencies>\r
- <build>\r
- <plugins>\r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-jarsigner-plugin</artifactId>\r
- </plugin>\r
- \r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-javadoc-plugin</artifactId>\r
- <version>2.10.4</version>\r
- <configuration>\r
- <failOnError>false</failOnError>\r
- </configuration>\r
- <executions>\r
- <execution>\r
- <id>attach-javadocs</id>\r
- <goals>\r
- <goal>jar</goal>\r
- </goals>\r
- </execution>\r
- </executions>\r
- </plugin> \r
- \r
- \r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-source-plugin</artifactId>\r
- <version>2.2.1</version>\r
- <executions>\r
- <execution>\r
- <id>attach-sources</id>\r
- <goals>\r
- <goal>jar-no-fork</goal>\r
- </goals>\r
- </execution>\r
- </executions>\r
- </plugin>\r
-<plugin>\r
- <groupId>org.sonatype.plugins</groupId>\r
- <artifactId>nexus-staging-maven-plugin</artifactId>\r
- <version>1.6.7</version>\r
- <extensions>true</extensions>\r
- <configuration>\r
- <nexusUrl>${nexusproxy}</nexusUrl>\r
- <stagingProfileId>176c31dfe190a</stagingProfileId>\r
- <serverId>ecomp-staging</serverId>\r
- </configuration>\r
- </plugin> \r
- <plugin>\r
- <groupId>org.jacoco</groupId>\r
- <artifactId>jacoco-maven-plugin</artifactId>\r
- <version>${jacoco.version}</version>\r
- <configuration>\r
- <excludes>\r
- <exclude>**/gen/**</exclude>\r
- <exclude>**/generated-sources/**</exclude>\r
- <exclude>**/yang-gen/**</exclude>\r
- <exclude>**/pax/**</exclude>\r
- </excludes>\r
- </configuration>\r
- <executions>\r
-\r
- <execution>\r
- <id>pre-unit-test</id>\r
- <goals>\r
- <goal>prepare-agent</goal>\r
- </goals>\r
- <configuration>\r
- <destFile>${project.build.directory}/code-coverage/jacoco-ut.exec</destFile>\r
- <propertyName>surefireArgLine</propertyName>\r
- </configuration>\r
- </execution>\r
- \r
- \r
- <execution>\r
- <id>post-unit-test</id>\r
- <phase>test</phase>\r
- <goals>\r
- <goal>report</goal>\r
- </goals>\r
- <configuration>\r
- <dataFile>${project.build.directory}/code-coverage/jacoco-ut.exec</dataFile>\r
- <outputDirectory>${project.reporting.outputDirectory}/jacoco-ut</outputDirectory>\r
- </configuration>\r
- </execution>\r
- <execution>\r
- <id>pre-integration-test</id>\r
- <phase>pre-integration-test</phase>\r
- <goals>\r
- <goal>prepare-agent</goal>\r
- </goals>\r
- <configuration>\r
- <destFile>${project.build.directory}/code-coverage/jacoco-it.exec</destFile>\r
-\r
- <propertyName>failsafeArgLine</propertyName>\r
- </configuration>\r
- </execution>\r
-\r
- \r
- <execution>\r
- <id>post-integration-test</id>\r
- <phase>post-integration-test</phase>\r
- <goals>\r
- <goal>report</goal>\r
- </goals>\r
- <configuration>\r
- <dataFile>${project.build.directory}/code-coverage/jacoco-it.exec</dataFile>\r
- <outputDirectory>${project.reporting.outputDirectory}/jacoco-it</outputDirectory>\r
- </configuration>\r
- </execution>\r
- </executions>\r
- </plugin> \r
-\r
- </plugins>\r
- </build>\r
- <distributionManagement>\r
- <repository>\r
- <id>ecomp-releases</id>\r
- <name>AAF Release Repository</name>\r
- <url>${nexusproxy}${releaseNexusPath}</url>\r
- </repository>\r
- <snapshotRepository>\r
- <id>ecomp-snapshots</id>\r
- <name>AAF Snapshot Repository</name>\r
- <url>${nexusproxy}${snapshotNexusPath}</url>\r
- </snapshotRepository>\r
- <site>\r
- <id>ecomp-site</id>\r
- <url>dav:${nexusproxy}${sitePath}</url>\r
- </site>\r
- </distributionManagement>\r
- \r
-</project>\r
-\r
+++ /dev/null
-//
-// Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
-//
-USE authz;
-
-// Create Root pass
-INSERT INTO cred (id,ns,type,cred,expires)
- VALUES ('dgl@openecomp.org','org.openecomp',1,0xab3831f27b39d7a039f9a92aa2bbfe51,'2020-12-31');
-
-// Create 'com' root NS
-INSERT INTO ns (name,scope,description,parent,type)
- VALUES('com',1,'Root Namespace',null,1);
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('com','admin',{'com.access|*|*'},'Com Admins');
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('com','owner',{'com.access|*|read'},'Com Owners');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('com','access','*','read',{'com.owner'},'Com Read Access');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('com','access','*','*',{'com.admin'},'Com Write Access');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','com.owner','2020-12-31','com','owner');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','com.admin','2020-12-31','com','admin');
-
-// Create org root NS
-INSERT INTO ns (name,scope,description,parent,type)
- VALUES('org',1,'Root Namespace Org',null,1);
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('org','admin',{'org.access|*|*'},'Com Admins');
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('org','owner',{'org.access|*|read'},'Com Owners');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('org','access','*','read',{'org.owner'},'Com Read Access');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('org','access','*','*',{'org.admin'},'Com Write Access');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','org.owner','2020-12-31','org','owner');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','org.admin','2020-12-31','org','admin');
-
-
-// Create com.att
-
-INSERT INTO ns (name,scope,description,parent,type)
- VALUES('com.att',2,'AT&T Namespace','com',2);
-
-INSERT INTO role(ns, name, perms,description)
- VALUES('com.att','admin',{'com.att.access|*|*'},'AT&T Admins');
-
-INSERT INTO role(ns, name, perms,description)
- VALUES('com.att','owner',{'com.att.access|*|read'},'AT&T Owners');
-
-INSERT INTO perm(ns, type, instance, action, roles,description)
- VALUES ('com.att','access','*','read',{'com.att.owner'},'AT&T Read Access');
-
-INSERT INTO perm(ns, type, instance, action, roles,description)
- VALUES ('com.att','access','*','*',{'com.att.admin'},'AT&T Write Access');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','com.att.owner','2020-12-31','com.att','owner');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','com.att.admin','2020-12-31','com.att','admin');
-
-// Create com.att.aaf
-
-INSERT INTO ns (name,scope,description,parent,type)
- VALUES('com.att.aaf',3,'Application Authorization Framework','com.att',3);
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('com.att.aaf','admin',{'com.att.aaf.access|*|*'},'AAF Admins');
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('com.att.aaf','owner',{'com.att.aaf.access|*|read'},'AAF Owners');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('com.att.aaf','access','*','read',{'com.att.aaf.owner'},'AAF Read Access');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('com.att.aaf','access','*','*',{'com.att.aaf.admin'},'AAF Write Access');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','com.att.aaf.admin','2020-12-31','com.att.aaf','admin');
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','com.att.aaf.owner','2020-12-31','com.att.aaf','owner');
-
-
-// Create org.openecomp
-INSERT INTO ns (name,scope,description,parent,type)
- VALUES('org.openecomp',2,'Open EComp NS','com.att',2);
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('org.openecomp','admin',{'org.openecomp.access|*|*'},'OpenEcomp Admins');
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('org.openecomp','owner',{'org.openecomp.access|*|read'},'OpenEcomp Owners');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('org.openecomp','access','*','read',{'org.openecomp.owner'},'OpenEcomp Read Access');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('org.openecomp','access','*','*',{'org.openecomp.admin'},'OpenEcomp Write Access');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','org.openecomp.admin','2020-12-31','org.openecomp','admin');
+++ /dev/null
-//
-// Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
-//
-// For Developer Machine single instance
-//
- CREATE KEYSPACE authz
- WITH REPLICATION = {'class' : 'SimpleStrategy','replication_factor':1};
-
-USE authz;
-
-//
-// CORE Table function
-//
-
-// Namespace - establish hierarchical authority to modify
-// Permissions and Roles
-// "scope" is flag to determine Policy. Typical important scope
-// is "company" (1)
-CREATE TABLE ns (
- name varchar,
- scope int, // deprecated 2.0.11
- description varchar,
- parent varchar,
- type int,
- PRIMARY KEY (name)
-);
-CREATE INDEX ns_parent on ns(parent);
-
-
-CREATE TABLE ns_attrib (
- ns varchar,
- key varchar,
- value varchar,
- PRIMARY KEY (ns,key)
-);
-create index ns_attrib_key on ns_attrib(key);
-
-// Will be cached
-CREATE TABLE role (
- ns varchar,
- name varchar,
- perms set<varchar>, // Use "Key" of "name|type|action"
- description varchar,
- PRIMARY KEY (ns,name)
-);
-CREATE INDEX role_name ON role(name);
-
-// Will be cached
-CREATE TABLE perm (
- ns varchar,
- type varchar,
- instance varchar,
- action varchar,
- roles set<varchar>, // Need to find Roles given Permissions
- description varchar,
- PRIMARY KEY (ns,type,instance,action)
-);
-
-// This table is user for Authorization
-CREATE TABLE user_role (
- user varchar,
- role varchar, // deprecated: change to ns/rname after 2.0.11
- ns varchar,
- rname varchar,
- expires timestamp,
- PRIMARY KEY(user,role)
- );
-CREATE INDEX user_role_ns ON user_role(ns);
-CREATE INDEX user_role_role ON user_role(role);
-
-// This table is only for the case where return User Credential (MechID) Authentication
-CREATE TABLE cred (
- id varchar,
- type int,
- expires timestamp,
- ns varchar,
- other int,
- notes varchar,
- cred blob,
- prev blob,
- PRIMARY KEY (id,type,expires)
- );
-CREATE INDEX cred_ns ON cred(ns);
-
-// Certificate Cross Table
-// coordinated with CRED type 2
-CREATE TABLE cert (
- fingerprint blob,
- id varchar,
- x500 varchar,
- expires timestamp,
- PRIMARY KEY (fingerprint)
- );
-CREATE INDEX cert_id ON cert(id);
-CREATE INDEX cert_x500 ON cert(x500);
-
-CREATE TABLE notify (
- user text,
- type int,
- last timestamp,
- checksum int,
- PRIMARY KEY (user,type)
-);
-
-CREATE TABLE x509 (
- ca text,
- serial blob,
- id text,
- x500 text,
- x509 text,
- PRIMARY KEY (ca,serial)
-);
-
-
-CREATE INDEX x509_id ON x509 (id);
-CREATE INDEX x509_x500 ON x509 (x500);
-
-//
-// Deployment Artifact (for Certman)
-//
-CREATE TABLE artifact (
- mechid text,
- machine text,
- type Set<text>,
- sponsor text,
- ca text,
- dir text,
- appName text,
- os_user text,
- notify text,
- expires timestamp,
- renewDays int,
- PRIMARY KEY (mechid,machine)
-);
-CREATE INDEX artifact_machine ON artifact(machine);
-
-//
-// Non-Critical Table functions
-//
-// Table Info - for Caching
-CREATE TABLE cache (
- name varchar,
- seg int, // cache Segment
- touched timestamp,
- PRIMARY KEY(name,seg)
-);
-
-CREATE TABLE history (
- id timeuuid,
- yr_mon int,
- user varchar,
- action varchar,
- target varchar, // user, user_role,
- subject varchar, // field for searching main portion of target key
- memo varchar, //description of the action
- reconstruct blob, //serialized form of the target
- // detail Map<varchar, varchar>, // additional information
- PRIMARY KEY (id)
-);
-CREATE INDEX history_yr_mon ON history(yr_mon);
-CREATE INDEX history_user ON history(user);
-CREATE INDEX history_subject ON history(subject);
-
-//
-// A place to hold objects to be created at a future time.
-//
-CREATE TABLE future (
- id uuid, // uniquify
- target varchar, // Target Table
- memo varchar, // Description
- start timestamp, // When it should take effect
- expires timestamp, // When not longer valid
- construct blob, // How to construct this object (like History)
- PRIMARY KEY(id)
-);
-CREATE INDEX future_idx ON future(target);
-CREATE INDEX future_start_idx ON future(start);
-
-
-CREATE TABLE approval (
- id timeuuid, // unique Key
- ticket uuid, // Link to Future Record
- user varchar, // the user who needs to be approved
- approver varchar, // user approving
- type varchar, // approver types i.e. Supervisor, Owner
- status varchar, // approval status. pending, approved, denied
- memo varchar, // Text for Approval to know what's going on
- operation varchar, // List operation to perform
- PRIMARY KEY(id)
- );
-CREATE INDEX appr_approver_idx ON approval(approver);
-CREATE INDEX appr_user_idx ON approval(user);
-CREATE INDEX appr_ticket_idx ON approval(ticket);
-CREATE INDEX appr_status_idx ON approval(status);
-
-CREATE TABLE delegate (
- user varchar,
- delegate varchar,
- expires timestamp,
- PRIMARY KEY (user)
-);
-CREATE INDEX delg_delg_idx ON delegate(delegate);
-
-//
-// Used by authz-batch processes to ensure only 1 runs at a time
-//
-CREATE TABLE run_lock (
- class text,
- host text,
- start timestamp,
- PRIMARY KEY ((class))
-);
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import java.io.IOException;\r
-import java.util.ArrayList;\r
-import java.util.Deque;\r
-import java.util.List;\r
-import java.util.concurrent.ConcurrentLinkedDeque;\r
-\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.Slot;\r
-import org.onap.aaf.inno.env.TimeTaken;\r
-import org.onap.aaf.inno.env.TransStore;\r
-import com.datastax.driver.core.BoundStatement;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.ConsistencyLevel;\r
-import com.datastax.driver.core.ResultSet;\r
-import com.datastax.driver.core.ResultSetFuture;\r
-import com.datastax.driver.core.Row;\r
-import com.datastax.driver.core.Session;\r
-import com.datastax.driver.core.exceptions.DriverException;\r
-\r
-public abstract class AbsCassDAO<TRANS extends TransStore,DATA> {\r
- protected static final char DOT = '.';\r
- protected static final char DOT_PLUS_ONE = '.'+1;\r
- protected static final String FIRST_CHAR = Character.toString((char)0);\r
- protected static final String LAST_CHAR = Character.toString((char)Character.MAX_VALUE);\r
- protected static final int FIELD_COMMAS = 0;\r
- protected static final int QUESTION_COMMAS = 1;\r
- protected static final int ASSIGNMENT_COMMAS = 2;\r
- protected static final int WHERE_ANDS = 3;\r
- \r
- private Cluster cluster; \r
- private Session session;\r
- private final String keyspace;\r
- // If this is null, then we own session\r
- private final AbsCassDAO<TRANS,?> owningDAO;\r
- protected Class<DATA> dataClass;\r
- private final String name;\r
- private static Slot sessionSlot;\r
- //private static final ArrayList<AbsCassDAO<? extends TransStore,?>.PSInfo> psinfos = new ArrayList<AbsCassDAO<TransStore,?>.PSInfo>();\r
- private static final ArrayList<AbsCassDAO<? extends TransStore,?>.PSInfo> psinfos = new ArrayList<AbsCassDAO<? extends TransStore,?>.PSInfo>();\r
- private static final List<Object> EMPTY = new ArrayList<Object>(0);\r
- private static final Deque<ResetRequest> resetDeque = new ConcurrentLinkedDeque<ResetRequest>();\r
- private static boolean resetTrigger = false;\r
- private static long nextAvailableReset = 0;\r
- \r
-\r
- public AbsCassDAO(TRANS trans, String name, Cluster cluster, String keyspace, Class<DATA> dataClass) {\r
- this.name = name;\r
- this.cluster = cluster;\r
- this.keyspace = keyspace;\r
- owningDAO = null; // we own session\r
- session = null;\r
- this.dataClass = dataClass;\r
- \r
- }\r
-\r
- public AbsCassDAO(TRANS trans, String name, AbsCassDAO<TRANS,?> aDao, Class<DATA> dataClass) { \r
- this.name = name;\r
- cluster = aDao.cluster;\r
- keyspace = aDao.keyspace;\r
- session = null;\r
- owningDAO = aDao; // We do not own session\r
- this.dataClass = dataClass;\r
- }\r
- \r
- public static void setSessionSlot(Slot slot) {\r
- sessionSlot = slot;\r
- }\r
-\r
- //Note: Lower case ON PURPOSE. These names used to create History Messages\r
- public enum CRUD {\r
- create,read,update,delete\r
- ;\r
-\r
-}\r
-\r
- public class PSInfo {\r
- private BoundStatement ps;\r
- private final int size;\r
- private final Loader<DATA> loader;\r
- private final CRUD crud; // Store CRUD, because it makes a difference in Object Order, see Loader\r
- private final String cql;\r
- private final ConsistencyLevel consistency;\r
-\r
-\r
- /**\r
- * Create a PSInfo and create Prepared Statement\r
- * \r
- * @param trans\r
- * @param theCQL\r
- * @param loader\r
- */\r
- public PSInfo(TRANS trans, String theCQL, Loader<DATA> loader, ConsistencyLevel consistency) {\r
- this.loader = loader;\r
- this.consistency=consistency;\r
- psinfos.add(this);\r
-\r
- cql = theCQL.trim().toUpperCase();\r
- if(cql.startsWith("INSERT")) {\r
- crud = CRUD.create;\r
- } else if(cql.startsWith("UPDATE")) {\r
- crud = CRUD.update;\r
- } else if(cql.startsWith("DELETE")) {\r
- crud = CRUD.delete;\r
- } else {\r
- crud = CRUD.read;\r
- }\r
- \r
- int idx = 0, count=0;\r
- while((idx=cql.indexOf('?',idx))>=0) {\r
- ++idx;\r
- ++count;\r
- }\r
- size=count;\r
- }\r
- \r
- public synchronized void reset() {\r
- ps = null;\r
- }\r
- \r
- private BoundStatement ps(TransStore trans) throws APIException, IOException {\r
- if(ps==null) {\r
- synchronized(this) {\r
- if(ps==null) {\r
- TimeTaken tt = trans.start("Preparing PSInfo " + crud.toString().toUpperCase() + " on " + name,Env.SUB);\r
- try {\r
- ps = new BoundStatement(getSession(trans).prepare(cql));\r
- ps.setConsistencyLevel(consistency);\r
- } catch (DriverException e) {\r
- reportPerhapsReset(trans,e);\r
- throw e;\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- }\r
- }\r
- return ps;\r
- }\r
-\r
- /**\r
- * Execute a Prepared Statement by extracting from DATA object\r
- * \r
- * @param trans\r
- * @param text\r
- * @param data\r
- * @return\r
- */\r
- public Result<ResultSetFuture> execAsync(TRANS trans, String text, DATA data) {\r
- TimeTaken tt = trans.start(text, Env.REMOTE);\r
- try {\r
- return Result.ok(getSession(trans).executeAsync(\r
- ps(trans).bind(loader.extract(data, size, crud))));\r
- } catch (DriverException | APIException | IOException e) {\r
- AbsCassDAO.this.reportPerhapsReset(trans,e);\r
- return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /**\r
- * Execute a Prepared Statement on Object[] key\r
- * \r
- * @param trans\r
- * @param text\r
- * @param objs\r
- * @return\r
- */\r
- public Result<ResultSetFuture> execAsync(TRANS trans, String text, Object ... objs) {\r
- TimeTaken tt = trans.start(text, Env.REMOTE);\r
- try {\r
- return Result.ok(getSession(trans).executeAsync(ps(trans).bind(objs)));\r
- } catch (DriverException | APIException | IOException e) {\r
- AbsCassDAO.this.reportPerhapsReset(trans,e);\r
- return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- /* \r
- * Note:\r
- * \r
- */\r
-\r
- /**\r
- * Execute a Prepared Statement by extracting from DATA object\r
- * \r
- * @param trans\r
- * @param text\r
- * @param data\r
- * @return\r
- */\r
- public Result<ResultSet> exec(TRANS trans, String text, DATA data) {\r
- TimeTaken tt = trans.start(text, Env.REMOTE);\r
- try {\r
- /*\r
- * "execute" (and executeAsync)\r
- * Executes the provided query.\r
- This method blocks until at least some result has been received from the database. However, \r
- for SELECT queries, it does not guarantee that the result has been received in full. But it \r
- does guarantee that some response has been received from the database, and in particular \r
- guarantee that if the request is invalid, an exception will be thrown by this method.\r
-\r
- Parameters:\r
- statement - the CQL query to execute (that can be any Statement).\r
- Returns:\r
- the result of the query. That result will never be null but can be empty (and will \r
- be for any non SELECT query).\r
- */\r
- return Result.ok(getSession(trans).execute(\r
- ps(trans).bind(loader.extract(data, size, crud))));\r
- } catch (DriverException | APIException | IOException e) {\r
- AbsCassDAO.this.reportPerhapsReset(trans,e);\r
- return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /**\r
- * Execute a Prepared Statement on Object[] key\r
- * \r
- * @param trans\r
- * @param text\r
- * @param objs\r
- * @return\r
- */\r
- public Result<ResultSet> exec(TRANS trans, String text, Object ... objs) {\r
- TimeTaken tt = trans.start(text, Env.REMOTE);\r
- try {\r
- return Result.ok(getSession(trans).execute(ps(trans).bind(objs)));\r
- } catch (DriverException | APIException | IOException e) {\r
- AbsCassDAO.this.reportPerhapsReset(trans,e);\r
- return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /**\r
- * Read the Data from Cassandra given a Prepared Statement (defined by the\r
- * DAO Instance)\r
- *\r
- * This is common behavior among all DAOs.\r
- * @throws DAOException\r
- */\r
- public Result<List<DATA>> read(TRANS trans, String text, Object[] key) {\r
- TimeTaken tt = trans.start(text,Env.REMOTE);\r
- \r
- ResultSet rs;\r
- try {\r
- rs = getSession(trans).execute(key==null?ps(trans):ps(trans).bind(key));\r
-/// TEST CODE for Exception \r
-// boolean force = true; \r
-// if(force) {\r
-// Map<InetSocketAddress, Throwable> misa = new HashMap<InetSocketAddress,Throwable>();\r
-// //misa.put(new InetSocketAddress(444),new Exception("no host was tried"));\r
-// misa.put(new InetSocketAddress(444),new Exception("Connection has been closed"));\r
-// throw new com.datastax.driver.core.exceptions.NoHostAvailableException(misa);\r
-//// throw new com.datastax.driver.core.exceptions.AuthenticationException(new InetSocketAddress(9999),"no host was tried");\r
-// }\r
-//// END TEST CODE\r
- } catch (DriverException | APIException | IOException e) {\r
- AbsCassDAO.this.reportPerhapsReset(trans,e);\r
- return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);\r
- } finally {\r
- tt.done();\r
- }\r
- \r
- return extract(loader,rs,null /*let Array be created if necessary*/,dflt);\r
- }\r
- \r
- public Result<List<DATA>> read(TRANS trans, String text, DATA data) {\r
- return read(trans,text, loader.extract(data, size, crud));\r
- }\r
- \r
- public Object[] keyFrom(DATA data) {\r
- return loader.extract(data, size, CRUD.delete); // Delete is key only\r
- }\r
-\r
- /*\r
- * Note: in case PSInfos are deleted, we want to remove them from list. This is not expected, \r
- * but we don't want a data leak if it does. Finalize doesn't have to happen quickly\r
- */\r
- @Override\r
- protected void finalize() throws Throwable {\r
- psinfos.remove(this);\r
- }\r
- }\r
-\r
- protected final Accept<DATA> dflt = new Accept<DATA>() {\r
- @Override\r
- public boolean ok(DATA data) {\r
- return true;\r
- }\r
- };\r
-\r
-\r
- @SuppressWarnings("unchecked")\r
- protected final Result<List<DATA>> extract(Loader<DATA> loader, ResultSet rs, List<DATA> indata, Accept<DATA> accept) {\r
- List<Row> rows = rs.all();\r
- if(rows.isEmpty()) {\r
- return Result.ok((List<DATA>)EMPTY); // Result sets now .emptyList(true);\r
- } else {\r
- DATA d;\r
- List<DATA> data = indata==null?new ArrayList<DATA>(rows.size()):indata;\r
- \r
- for(Row row : rows) {\r
- try {\r
- d = loader.load(dataClass.newInstance(),row);\r
- if(accept.ok(d)) {\r
- data.add(d);\r
- }\r
- } catch(Exception e) {\r
- return Result.err(e);\r
- }\r
- }\r
- return Result.ok(data);\r
- }\r
- }\r
- \r
- private static final String NEW_CASSANDRA_SESSION_CREATED = "New Cassandra Session Created";\r
- private static final String NEW_CASSANDRA_CLUSTER_OBJECT_CREATED = "New Cassandra Cluster Object Created";\r
- private static final String NEW_CASSANDRA_SESSION = "New Cassandra Session";\r
-\r
- private static class ResetRequest {\r
- //package on purpose\r
- Session session;\r
- long timestamp;\r
- \r
- public ResetRequest(Session session) {\r
- this.session = session;\r
- timestamp = System.currentTimeMillis();\r
- }\r
- }\r
-\r
- \r
- public static final void primePSIs(TransStore trans) throws APIException, IOException {\r
- for(AbsCassDAO<? extends TransStore, ?>.PSInfo psi : psinfos) {\r
- if(psi.ps==null) {\r
- psi.ps(trans);\r
- }\r
- }\r
- }\r
- \r
- public final Session getSession(TransStore trans) throws APIException, IOException {\r
- // Try to use Trans' session, if exists\r
- if(sessionSlot!=null) { // try to get from Trans\r
- Session sess = trans.get(sessionSlot, null);\r
- if(sess!=null) {\r
- return sess;\r
- }\r
- }\r
- \r
- // If there's an owning DAO, use it's session\r
- if(owningDAO!=null) {\r
- return owningDAO.getSession(trans);\r
- }\r
- \r
- // OK, nothing else works... get our own.\r
- if(session==null || resetTrigger) {\r
- Cluster tempCluster = null;\r
- Session tempSession = null;\r
- try {\r
- synchronized(NEW_CASSANDRA_SESSION_CREATED) {\r
- boolean reset = false;\r
- for(ResetRequest r : resetDeque) {\r
- if(r.session == session) {\r
- if(r.timestamp>nextAvailableReset) {\r
- reset=true;\r
- nextAvailableReset = System.currentTimeMillis() + 60000;\r
- tempCluster = cluster;\r
- tempSession = session;\r
- break;\r
- } else {\r
- trans.warn().log("Cassandra Connection Reset Ignored: Recent Reset");\r
- }\r
- }\r
- }\r
- \r
- if(reset || session == null) {\r
- TimeTaken tt = trans.start(NEW_CASSANDRA_SESSION, Env.SUB);\r
- try {\r
- // Note: Maitrayee recommended not closing the cluster, just\r
- // overwrite it. 9/30/2016 assuming same for Session\r
- // This was a bad idea. Ran out of File Handles as I suspected..\r
- if(reset) {\r
- for(AbsCassDAO<? extends TransStore, ?>.PSInfo psi : psinfos) {\r
- psi.reset();\r
- }\r
- }\r
- if(reset || cluster==null) {\r
- cluster = CassAccess.cluster(trans, keyspace);\r
- trans.warn().log(NEW_CASSANDRA_CLUSTER_OBJECT_CREATED);\r
- }\r
- if(reset || session==null) {\r
- session = cluster.connect(keyspace);\r
- trans.warn().log(NEW_CASSANDRA_SESSION_CREATED);\r
- }\r
- } finally {\r
- resetTrigger=false;\r
- tt.done();\r
- }\r
- }\r
- }\r
- } finally {\r
- TimeTaken tt = trans.start("Clear Reset Deque", Env.SUB);\r
- try {\r
- resetDeque.clear();\r
- // Not clearing Session/Cluster appears to kill off FileHandles\r
- if(tempSession!=null && !tempSession.isClosed()) {\r
- tempSession.close();\r
- }\r
- if(tempCluster!=null && !tempCluster.isClosed()) {\r
- tempCluster.close();\r
- }\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- }\r
- return session;\r
- }\r
- \r
- public final boolean reportPerhapsReset(TransStore trans, Exception e) {\r
- if(owningDAO!=null) {\r
- return owningDAO.reportPerhapsReset(trans, e);\r
- } else {\r
- boolean rv = false;\r
- if(CassAccess.isResetException(e)) {\r
- trans.warn().printf("Session Reset called for %s by %s ",session==null?"":session,e==null?"Mgmt Command":e.getClass().getName());\r
- resetDeque.addFirst(new ResetRequest(session));\r
- rv = resetTrigger = true;\r
- } \r
- trans.error().log(e);\r
- return rv;\r
- }\r
- }\r
-\r
- public void close(TransStore trans) {\r
- if(owningDAO==null) {\r
- if(session!=null) {\r
- TimeTaken tt = trans.start("Cassandra Session Close", Env.SUB);\r
- try {\r
- session.close();\r
- } finally {\r
- tt.done();\r
- }\r
- session = null;\r
- } else {\r
- trans.debug().log("close called(), Session already closed");\r
- }\r
- } else {\r
- owningDAO.close(trans);\r
- }\r
- }\r
-\r
- protected void wasModified(TRANS trans, CRUD modified, DATA data, String ... override) {\r
- }\r
- \r
- protected interface Accept<DATA> {\r
- public boolean ok(DATA data);\r
- }\r
-\r
-}\r
-\r
-\r
-\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-\r
-public interface Bytification {\r
- public ByteBuffer bytify() throws IOException;\r
- public void reconstitute(ByteBuffer bb) throws IOException;\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import java.util.Date;\r
-\r
-import org.onap.aaf.authz.layer.Result;\r
-\r
-import org.onap.aaf.inno.env.Trans;\r
-\r
-public interface CIDAO<TRANS extends Trans> {\r
-\r
- /**\r
- * Touch the date field for given Table\r
- * \r
- * @param trans\r
- * @param name\r
- * @return\r
- */\r
- public abstract Result<Void> touch(TRANS trans, String name, int ... seg);\r
-\r
- /**\r
- * Read all Info entries, and set local Date objects\r
- * \r
- * This is to support regular data checks on the Database to speed up Caching behavior\r
- * \r
- */\r
- public abstract Result<Void> check(TRANS trans);\r
-\r
- public abstract Date get(TRANS trans, String table, int seg);\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-/**\r
- * Interface to obtain Segment Integer from DAO Data\r
- * for use in Caching mechanism\r
- * \r
- * This should typically be obtained by getting the Hash of the key, then using modulus on the size of segment.\r
- * \r
- *\r
- */\r
-public interface Cacheable {\r
- public int[] invalidate(Cached<?,?> cache);\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import java.util.Date;\r
-import java.util.List;\r
-import java.util.Map;\r
-import java.util.Timer;\r
-import java.util.TimerTask;\r
-\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.cache.Cache;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.Trans;\r
-\r
-public class Cached<TRANS extends Trans, DATA extends Cacheable> extends Cache<TRANS,DATA> {\r
- // Java does not allow creation of Arrays with Generics in them...\r
- // private Map<String,Dated> cache[];\r
- protected final CIDAO<TRANS> info;\r
- \r
- private static Timer infoTimer;\r
- private Object cache[];\r
- public final int segSize;\r
-\r
- protected final String name;\r
- \r
-\r
-\r
- // Taken from String Hash, but coded, to ensure consistent across Java versions. Also covers negative case;\r
- public int cacheIdx(String key) {\r
- int h = 0;\r
- for (int i = 0; i < key.length(); i++) {\r
- h = 31*h + key.charAt(i);\r
- }\r
- if(h<0)h*=-1;\r
- return h%segSize;\r
- }\r
- \r
- public Cached(CIDAO<TRANS> info, String name, int segSize) {\r
- this.name =name;\r
- this.segSize = segSize;\r
- this.info = info;\r
- cache = new Object[segSize];\r
- // Create a new Map for each Segment, and store locally\r
- for(int i=0;i<segSize;++i) {\r
- cache[i]=obtain(name+i);\r
- }\r
- }\r
- \r
- public void add(String key, List<DATA> data) {\r
- @SuppressWarnings("unchecked")\r
- Map<String,Dated> map = ((Map<String,Dated>)cache[cacheIdx(key)]);\r
- map.put(key, new Dated(data));\r
- }\r
-\r
-\r
- public int invalidate(String key) {\r
- int cacheIdx = cacheIdx(key);\r
- @SuppressWarnings("unchecked")\r
- Map<String,Dated> map = ((Map<String,Dated>)cache[cacheIdx]);\r
-// if(map.remove(key)!=null) // Not seeming to remove all the time\r
- if(map!=null)map.clear();\r
-// System.err.println("Remove " + name + " " + key);\r
- return cacheIdx;\r
- }\r
-\r
- public Result<Void> invalidate(int segment) {\r
- if(segment<0 || segment>=cache.length) return Result.err(Status.ERR_BadData,"Cache Segment %s is out of range",Integer.toString(segment));\r
- @SuppressWarnings("unchecked")\r
- Map<String,Dated> map = ((Map<String,Dated>)cache[segment]);\r
- if(map!=null) {\r
- map.clear();\r
- }\r
- return Result.ok();\r
- }\r
-\r
- protected interface Getter<D> {\r
- public abstract Result<List<D>> get();\r
- };\r
- \r
- // TODO utilize Segmented Caches, and fold "get" into "reads"\r
- @SuppressWarnings("unchecked")\r
- public Result<List<DATA>> get(TRANS trans, String key, Getter<DATA> getter) {\r
- List<DATA> ld = null;\r
- Result<List<DATA>> rld = null;\r
- \r
- int cacheIdx = cacheIdx(key);\r
- Map<String, Dated> map = ((Map<String,Dated>)cache[cacheIdx]);\r
- \r
- // Check for saved element in cache\r
- Dated cached = map.get(key);\r
- // Note: These Segment Timestamps are kept up to date with DB\r
- Date dbStamp = info.get(trans, name,cacheIdx);\r
- \r
- // Check for cache Entry and whether it is still good (a good Cache Entry is same or after DBEntry, so we use "before" syntax)\r
- if(cached!=null && dbStamp.before(cached.timestamp)) {\r
- ld = (List<DATA>)cached.data;\r
- rld = Result.ok(ld);\r
- } else {\r
- rld = getter.get();\r
- if(rld.isOK()) { // only store valid lists\r
- map.put(key, new Dated(rld.value)); // successful item found gets put in cache\r
-// } else if(rld.status == Result.ERR_Backend){\r
-// map.remove(key);\r
- }\r
- }\r
- return rld;\r
- }\r
-\r
- /**\r
- * Each Cached object has multiple Segments that need cleaning. Derive each, and add to Cleansing Thread\r
- * @param env\r
- * @param dao\r
- */\r
- public static void startCleansing(AuthzEnv env, CachedDAO<?,?,?> ... dao) {\r
- for(CachedDAO<?,?,?> d : dao) { \r
- for(int i=0;i<d.segSize;++i) {\r
- startCleansing(env, d.table()+i);\r
- }\r
- }\r
- }\r
-\r
-\r
- public static<T extends Trans> void startRefresh(AuthzEnv env, CIDAO<AuthzTrans> cidao) {\r
- if(infoTimer==null) {\r
- infoTimer = new Timer("CachedDAO Info Refresh Timer");\r
- int minRefresh = 10*1000*60; // 10 mins Integer.parseInt(env.getProperty(CACHE_MIN_REFRESH_INTERVAL,"2000")); // 2 second minimum refresh \r
- infoTimer.schedule(new Refresh(env,cidao, minRefresh), 1000, minRefresh); // note: Refresh from DB immediately\r
- }\r
- }\r
- \r
- public static void stopTimer() {\r
- Cache.stopTimer();\r
- if(infoTimer!=null) {\r
- infoTimer.cancel();\r
- infoTimer = null;\r
- }\r
- }\r
- \r
- private final static class Refresh extends TimerTask {\r
- private static final int maxRefresh = 2*60*10000; // 20 mins\r
- private AuthzEnv env;\r
- private CIDAO<AuthzTrans> cidao;\r
- private int minRefresh;\r
- private long lastRun;\r
- \r
- public Refresh(AuthzEnv env, CIDAO<AuthzTrans> cidao, int minRefresh) {\r
- this.env = env;\r
- this.cidao = cidao;\r
- this.minRefresh = minRefresh;\r
- lastRun = System.currentTimeMillis()-maxRefresh-1000;\r
- }\r
- \r
- @Override\r
- public void run() {\r
- // Evaluate whether to refresh based on transaction rate\r
- long now = System.currentTimeMillis();\r
- long interval = now-lastRun;\r
-\r
- if(interval < minRefresh || interval < Math.min(env.transRate(),maxRefresh)) return;\r
- lastRun = now;\r
- AuthzTrans trans = env.newTransNoAvg();\r
- Result<Void> rv = cidao.check(trans);\r
- if(rv.status!=Result.OK) {\r
- env.error().log("Error in CacheInfo Refresh",rv.details);\r
- }\r
- if(env.debug().isLoggable()) {\r
- StringBuilder sb = new StringBuilder("Cache Info Refresh: ");\r
- trans.auditTrail(0, sb, Env.REMOTE);\r
- env.debug().log(sb);\r
- }\r
- }\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import java.util.ArrayList;\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-\r
-import org.onap.aaf.inno.env.Trans;\r
-\r
-/**\r
- * CachedDAO\r
- * \r
- * Cache the response of "get" of any DAO. \r
- * \r
- * For simplicity's sake, at this time, we only do this for single Object keys \r
- * \r
- *\r
- * @param <DATA>\r
- */\r
-public class CachedDAO<TRANS extends Trans,D extends DAO<TRANS,DATA>,DATA extends Cacheable> \r
- extends Cached<TRANS,DATA> implements DAO_RO<TRANS,DATA>{\r
-// private final String dirty_str; \r
- \r
- private final D dao;\r
-\r
- public CachedDAO(D dao, CIDAO<TRANS> info, int segsize) {\r
- super(info, dao.table(), segsize);\r
- \r
- // Instantiate a new Cache per DAO name (so separate instances use the same cache) \r
- this.dao = dao;\r
- //read_str = "Cached READ for " + dao.table();\r
-// dirty_str = "Cache DIRTY on " + dao.table();\r
- if(dao instanceof CassDAOImpl) {\r
- ((CassDAOImpl<?,?>)dao).cache = this;\r
- }\r
- }\r
- \r
- public static<T extends Trans, DA extends DAO<T,DT>, DT extends Cacheable> \r
- CachedDAO<T,DA,DT> create(DA dao, CIDAO<T> info, int segsize) {\r
- return new CachedDAO<T,DA,DT>(dao,info, segsize);\r
- }\r
-\r
- public void add(DATA data) {\r
- String key = keyFromObjs(dao.keyFrom(data));\r
- List<DATA> list = new ArrayList<DATA>();\r
- list.add(data);\r
- super.add(key,list);\r
- }\r
- \r
-// public void invalidate(TRANS trans, Object ... objs) {\r
-// TimeTaken tt = trans.start(dirty_str, Env.SUB);\r
-// try {\r
-// super.invalidate(keyFromObjs(objs));\r
-// } finally {\r
-// tt.done();\r
-// }\r
-// }\r
-\r
- public static String keyFromObjs(Object ... objs) {\r
- String key;\r
- if(objs.length==1 && objs[0] instanceof String) {\r
- key = (String)objs[0];\r
- } else {\r
- StringBuilder sb = new StringBuilder();\r
- boolean first = true;\r
- for(Object o : objs) {\r
- if(o!=null) {\r
- if(first) {\r
- first =false;\r
- } else {\r
- sb.append('|');\r
- }\r
- sb.append(o.toString());\r
- }\r
- }\r
- key = sb.toString();\r
- }\r
- return key;\r
- }\r
-\r
- public Result<DATA> create(TRANS trans, DATA data) {\r
- Result<DATA> d = dao.create(trans,data);\r
- if(d.status==Status.OK) {\r
- add(d.value);\r
- } else {\r
- trans.error().log(d.errorString());\r
- }\r
- invalidate(trans,data);\r
- return d;\r
- }\r
-\r
- protected class DAOGetter implements Getter<DATA> {\r
- protected TRANS trans;\r
- protected Object objs[];\r
- protected D dao;\r
- public Result<List<DATA>> result;\r
-\r
- public DAOGetter(TRANS trans, D dao, Object ... objs) {\r
- this.trans = trans;\r
- this.dao = dao;\r
- this.objs = objs;\r
- }\r
- \r
- /**\r
- * Separated into single call for easy overloading\r
- * @return\r
- */\r
- public Result<List<DATA>> call() {\r
- return dao.read(trans, objs);\r
- }\r
- \r
- @Override\r
- public final Result<List<DATA>> get() {\r
- return call();\r
-// if(result.isOKhasData()) { // Note, given above logic, could exist, but stale\r
-// return result.value;\r
-// } else {\r
-// return null;\r
-// }\r
- }\r
- }\r
-\r
- @Override\r
- public Result<List<DATA>> read(final TRANS trans, final Object ... objs) {\r
- DAOGetter getter = new DAOGetter(trans,dao,objs); \r
- return get(trans, keyFromObjs(objs),getter);\r
-// if(ld!=null) {\r
-// return Result.ok(ld);//.emptyList(ld.isEmpty());\r
-// }\r
-// // Result Result if exists\r
-// if(getter.result==null) {\r
-// return Result.err(Status.ERR_NotFound, "No Cache or Lookup found on [%s]",dao.table());\r
-// }\r
-// return getter.result;\r
- }\r
-\r
- // Slight Improved performance available when String and Obj versions are known. \r
- public Result<List<DATA>> read(final String key, final TRANS trans, final Object ... objs) {\r
- DAOGetter getter = new DAOGetter(trans,dao,objs); \r
- return get(trans, key, getter);\r
-// if(ld!=null) {\r
-// return Result.ok(ld);//.emptyList(ld.isEmpty());\r
-// }\r
-// // Result Result if exists\r
-// if(getter.result==null) {\r
-// return Result.err(Status.ERR_NotFound, "No Cache or Lookup found on [%s]",dao.table());\r
-// }\r
-// return getter.result;\r
- }\r
- \r
- @Override\r
- public Result<List<DATA>> read(TRANS trans, DATA data) {\r
- return read(trans,dao.keyFrom(data));\r
- }\r
- public Result<Void> update(TRANS trans, DATA data) {\r
- Result<Void> d = dao.update(trans, data);\r
- if(d.status==Status.OK) {\r
- add(data);\r
- } else {\r
- trans.error().log(d.errorString());\r
- }\r
- return d;\r
- }\r
-\r
- public Result<Void> delete(TRANS trans, DATA data, boolean reread) {\r
- if(reread) { // If reread, get from Cache, if possible, not DB exclusively\r
- Result<List<DATA>> rd = read(trans,data);\r
- if(rd.notOK()) {\r
- return Result.err(rd);\r
- } else {\r
- trans.error().log(rd.errorString());\r
- }\r
- if(rd.isEmpty()) {\r
- data.invalidate(this);\r
- return Result.err(Status.ERR_NotFound,"Not Found");\r
- }\r
- data = rd.value.get(0);\r
- }\r
- Result<Void> rv=dao.delete(trans, data, false);\r
- data.invalidate(this);\r
- return rv;\r
- }\r
- \r
- @Override\r
- public void close(TRANS trans) {\r
- if(dao!=null) {\r
- dao.close(trans);\r
- }\r
- }\r
- \r
-\r
- @Override\r
- public String table() {\r
- return dao.table();\r
- }\r
- \r
- public D dao() {\r
- return dao;\r
- }\r
- \r
- public void invalidate(TRANS trans, DATA data) {\r
- if(info.touch(trans, dao.table(),data.invalidate(this)).notOK()) {\r
- trans.error().log("Cannot touch CacheInfo for Role");\r
- }\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import java.io.IOException;\r
-import java.util.ArrayList;\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-\r
-import org.onap.aaf.cadi.routing.GreatCircle;\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.util.Split;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.Cluster.Builder;\r
-import com.datastax.driver.core.policies.DCAwareRoundRobinPolicy;\r
-\r
-public class CassAccess {\r
- public static final String KEYSPACE = "authz";\r
- public static final String CASSANDRA_CLUSTERS = "cassandra.clusters";\r
- public static final String CASSANDRA_CLUSTERS_PORT = "cassandra.clusters.port";\r
- public static final String CASSANDRA_CLUSTERS_USER_NAME = "cassandra.clusters.user";\r
- public static final String CASSANDRA_CLUSTERS_PASSWORD = "cassandra.clusters.password";\r
- public static final String CASSANDRA_RESET_EXCEPTIONS = "cassandra.reset.exceptions";\r
- public static final String LATITUDE = "LATITUDE";\r
- public static final String LONGITUDE = "LONGITUDE";\r
- private static final List<Resettable> resetExceptions = new ArrayList<Resettable>();\r
- public static final String ERR_ACCESS_MSG = "Accessing Backend";\r
- private static Builder cb = null;\r
-\r
- /**\r
- * To create DCAwareRoundRobing Policy:\r
- * Need Properties\r
- * LATITUDE (or AFT_LATITUDE)\r
- * LONGITUDE (or AFT_LONGITUDE)\r
- * CASSANDRA CLUSTERS with additional information:\r
- * machine:DC:lat:long,machine:DC:lat:long\r
- * @param env\r
- * @param prefix\r
- * @return\r
- * @throws APIException\r
- * @throws IOException\r
- */\r
-\r
- @SuppressWarnings("deprecation")\r
- public static synchronized Cluster cluster(Env env, String prefix) throws APIException, IOException {\r
- if(cb == null) {\r
- String pre;\r
- if(prefix==null) {\r
- pre="";\r
- } else {\r
- env.info().log("Cassandra Connection for ",prefix);\r
- pre = prefix+'.';\r
- }\r
- cb = Cluster.builder();\r
- String str = env.getProperty(pre+CASSANDRA_CLUSTERS_PORT,"9042");\r
- if(str!=null) {\r
- env.init().log("Cass Port = ",str );\r
- cb.withPort(Integer.parseInt(str));\r
- }\r
- str = env.getProperty(pre+CASSANDRA_CLUSTERS_USER_NAME,null);\r
- if(str!=null) {\r
- env.init().log("Cass User = ",str );\r
- String epass = env.getProperty(pre + CASSANDRA_CLUSTERS_PASSWORD,null);\r
- if(epass==null) {\r
- throw new APIException("No Password configured for " + str);\r
- }\r
- //TODO Figure out way to ensure Decryptor setting in AuthzEnv\r
- if(env instanceof AuthzEnv) {\r
- cb.withCredentials(str,((AuthzEnv)env).decrypt(epass,true));\r
- } else {\r
- cb.withCredentials(str, env.decryptor().decrypt(epass));\r
- }\r
- }\r
- \r
- str = env.getProperty(pre+CASSANDRA_RESET_EXCEPTIONS,null);\r
- if(str!=null) {\r
- env.init().log("Cass ResetExceptions = ",str );\r
- for(String ex : Split.split(',', str)) {\r
- resetExceptions.add(new Resettable(env,ex));\r
- }\r
- }\r
- \r
- str = env.getProperty(LATITUDE,env.getProperty("AFT_LATITUDE",null));\r
- Double lat = str!=null?Double.parseDouble(str):null;\r
- str = env.getProperty(LONGITUDE,env.getProperty("AFT_LONGITUDE",null));\r
- Double lon = str!=null?Double.parseDouble(str):null;\r
- if(lat == null || lon == null) {\r
- throw new APIException("LATITUDE(or AFT_LATITUDE) and/or LONGITUDE(or AFT_LATITUDE) are not set");\r
- }\r
- \r
- env.init().printf("Service Latitude,Longitude = %f,%f",lat,lon);\r
- \r
- str = env.getProperty(pre+CASSANDRA_CLUSTERS,"localhost");\r
- env.init().log("Cass Clusters = ",str );\r
- String[] machs = Split.split(',', str);\r
- String[] cpoints = new String[machs.length];\r
- String bestDC = null;\r
- int numInBestDC = 1;\r
- double mlat, mlon,temp,distance = -1.0;\r
- for(int i=0;i<machs.length;++i) {\r
- String[] minfo = Split.split(':',machs[i]);\r
- if(minfo.length>0) {\r
- cpoints[i]=minfo[0];\r
- }\r
- \r
- // Calc closest DC with Great Circle\r
- if(minfo.length>3) {\r
- mlat = Double.parseDouble(minfo[2]);\r
- mlon = Double.parseDouble(minfo[3]);\r
- if((temp=GreatCircle.calc(lat, lon, mlat, mlon)) > distance) {\r
- distance = temp;\r
- if(bestDC!=null && bestDC.equals(minfo[1])) {\r
- ++numInBestDC;\r
- } else {\r
- bestDC = minfo[1];\r
- numInBestDC = 1;\r
- }\r
- } else {\r
- if(bestDC!=null && bestDC.equals(minfo[1])) {\r
- ++numInBestDC;\r
- }\r
- }\r
- }\r
- }\r
- \r
- cb.addContactPoints(cpoints);\r
- \r
- if(bestDC!=null) {\r
- // 8/26/2016 Management has determined that Accuracy is preferred over speed in bad situations\r
- // Local DC Aware Load Balancing appears to have the highest normal performance, with the best\r
- // Degraded Accuracy\r
- cb.withLoadBalancingPolicy(new DCAwareRoundRobinPolicy(\r
- bestDC, numInBestDC, true /*allow LocalDC to look at other DCs for LOCAL_QUORUM */));\r
- env.init().printf("Cassandra configured for DCAwareRoundRobinPolicy at %s with emergency remote of up to %d node(s)"\r
- ,bestDC, numInBestDC);\r
- } else {\r
- env.init().printf("Cassandra is using Default Policy, which is not DC aware");\r
- }\r
- }\r
- return cb.build();\r
- }\r
- \r
- private static class Resettable {\r
- private Class<? extends Exception> cls;\r
- private List<String> messages;\r
- \r
- @SuppressWarnings("unchecked")\r
- public Resettable(Env env, String propData) throws APIException {\r
- if(propData!=null && propData.length()>1) {\r
- String[] split = Split.split(':', propData);\r
- if(split.length>0) {\r
- try {\r
- cls = (Class<? extends Exception>)Class.forName(split[0]);\r
- } catch (ClassNotFoundException e) {\r
- throw new APIException("Declared Cassandra Reset Exception, " + propData + ", cannot be ClassLoaded");\r
- }\r
- }\r
- if(split.length>1) {\r
- messages=new ArrayList<String>();\r
- for(int i=1;i<split.length;++i) {\r
- String str = split[i];\r
- int start = str.startsWith("\"")?1:0;\r
- int end = str.length()-(str.endsWith("\"")?1:0);\r
- messages.add(split[i].substring(start, end));\r
- }\r
- } else {\r
- messages = null;\r
- }\r
- }\r
- }\r
- \r
- public boolean matches(Exception ex) {\r
- if(ex.getClass().equals(cls)) {\r
- if(messages!=null) {\r
- String msg = ex.getMessage();\r
- for(String m : messages) {\r
- if(msg.contains(m)) {\r
- return true;\r
- }\r
- }\r
- }\r
- }\r
- return false;\r
- }\r
- }\r
- \r
- public static final boolean isResetException(Exception e) {\r
- if(e==null) {\r
- return true;\r
- }\r
- for(Resettable re : resetExceptions) {\r
- if(re.matches(e)) {\r
- return true;\r
- }\r
- }\r
- return false;\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import java.io.ByteArrayInputStream;\r
-import java.io.DataInputStream;\r
-import java.lang.reflect.Field;\r
-import java.nio.ByteBuffer;\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-\r
-import org.onap.aaf.inno.env.TransStore;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.ConsistencyLevel;\r
-import com.datastax.driver.core.ResultSet;\r
-import com.datastax.driver.core.ResultSetFuture;\r
-\r
-/**\r
- * AbsCassDAO\r
- *\r
- * Deal with the essentials of Interaction with Cassandra DataStore for all Cassandra DAOs\r
- *\r
- *\r
- * @param <DATA>\r
- */\r
-public class CassDAOImpl<TRANS extends TransStore,DATA> extends AbsCassDAO<TRANS, DATA> implements DAO<TRANS,DATA> {\r
- public static final String USER_NAME = "__USER_NAME__";\r
- protected static final String CREATE_SP = "CREATE ";\r
- protected static final String UPDATE_SP = "UPDATE ";\r
- protected static final String DELETE_SP = "DELETE ";\r
- protected static final String SELECT_SP = "SELECT ";\r
-\r
- protected final String C_TEXT = getClass().getSimpleName() + " CREATE";\r
- protected final String R_TEXT = getClass().getSimpleName() + " READ";\r
- protected final String U_TEXT = getClass().getSimpleName() + " UPDATE";\r
- protected final String D_TEXT = getClass().getSimpleName() + " DELETE";\r
- private String table;\r
- \r
- protected final ConsistencyLevel readConsistency,writeConsistency;\r
- \r
- // Setteable only by CachedDAO\r
- protected Cached<?, ?> cache;\r
-\r
- /**\r
- * A Constructor from the originating Cluster. This DAO will open the Session at need,\r
- * and shutdown the session when "close()" is called.\r
- *\r
- * @param cluster\r
- * @param keyspace\r
- * @param dataClass\r
- */\r
- public CassDAOImpl(TRANS trans, String name, Cluster cluster, String keyspace, Class<DATA> dataClass, String table, ConsistencyLevel read, ConsistencyLevel write) {\r
- super(trans, name, cluster,keyspace,dataClass);\r
- this.table = table;\r
- readConsistency = read;\r
- writeConsistency = write;\r
- }\r
- \r
- /**\r
- * A Constructor to share Session with other DAOs.\r
- *\r
- * This method get the Session and Cluster information from the calling DAO, and won't\r
- * touch the Session on closure.\r
- *\r
- * @param aDao\r
- * @param dataClass\r
- */\r
- public CassDAOImpl(TRANS trans, String name, AbsCassDAO<TRANS,?> aDao, Class<DATA> dataClass, String table, ConsistencyLevel read, ConsistencyLevel write) {\r
- super(trans, name, aDao,dataClass);\r
- this.table = table;\r
- readConsistency = read;\r
- writeConsistency = write;\r
- }\r
-\r
- protected PSInfo createPS;\r
- protected PSInfo readPS;\r
- protected PSInfo updatePS;\r
- protected PSInfo deletePS;\r
- private boolean async=false;\r
-\r
- public void async(boolean bool) {\r
- async = bool;\r
- }\r
-\r
- public final String[] setCRUD(TRANS trans, String table, Class<?> dc,Loader<DATA> loader) {\r
- return setCRUD(trans, table, dc, loader, -1);\r
- }\r
- \r
- public final String[] setCRUD(TRANS trans, String table, Class<?> dc,Loader<DATA> loader, int max) {\r
- Field[] fields = dc.getDeclaredFields();\r
- int end = max>=0 & max<fields.length?max:fields.length;\r
- // get keylimit from a non-null Loader\r
- int keylimit = loader.keylimit();\r
- \r
- StringBuilder sbfc = new StringBuilder();\r
- StringBuilder sbq = new StringBuilder();\r
- StringBuilder sbwc = new StringBuilder();\r
- StringBuilder sbup = new StringBuilder();\r
- \r
- if(keylimit>0) {\r
- for(int i=0;i<end;++i) {\r
- if(i>0) {\r
- sbfc.append(',');\r
- sbq.append(',');\r
- if(i<keylimit) {\r
- sbwc.append(" AND ");\r
- }\r
- }\r
- sbfc.append(fields[i].getName());\r
- sbq.append('?');\r
- if(i>=keylimit) {\r
- if(i>keylimit) {\r
- sbup.append(',');\r
- }\r
- sbup.append(fields[i].getName());\r
- sbup.append("=?");\r
- }\r
- if(i<keylimit) {\r
- sbwc.append(fields[i].getName());\r
- sbwc.append("=?");\r
- }\r
- }\r
- \r
- createPS = new PSInfo(trans, "INSERT INTO " + table + " ("+ sbfc +") VALUES ("+ sbq +");",loader,writeConsistency);\r
- \r
- readPS = new PSInfo(trans, "SELECT " + sbfc + " FROM " + table + " WHERE " + sbwc + ';',loader,readConsistency);\r
- \r
- // Note: UPDATES can't compile if there are no fields besides keys... Use "Insert"\r
- if(sbup.length()==0) {\r
- updatePS = createPS; // the same as an insert\r
- } else {\r
- updatePS = new PSInfo(trans, "UPDATE " + table + " SET " + sbup + " WHERE " + sbwc + ';',loader,writeConsistency);\r
- }\r
- \r
- deletePS = new PSInfo(trans, "DELETE FROM " + table + " WHERE " + sbwc + ';',loader,writeConsistency);\r
- }\r
- return new String[] {sbfc.toString(), sbq.toString(), sbup.toString(), sbwc.toString()};\r
- }\r
-\r
- public void replace(CRUD crud, PSInfo psInfo) {\r
- switch(crud) {\r
- case create: createPS = psInfo; break;\r
- case read: readPS = psInfo; break;\r
- case update: updatePS = psInfo; break;\r
- case delete: deletePS = psInfo; break;\r
- }\r
- }\r
-\r
- public void disable(CRUD crud) {\r
- switch(crud) {\r
- case create: createPS = null; break;\r
- case read: readPS = null; break;\r
- case update: updatePS = null; break;\r
- case delete: deletePS = null; break;\r
- }\r
- }\r
-\r
- \r
- /**\r
- * Given a DATA object, extract the individual elements from the Data into an Object Array for the\r
- * execute element.\r
- */\r
- public Result<DATA> create(TRANS trans, DATA data) {\r
- if(createPS==null) {\r
- Result.err(Result.ERR_NotImplemented,"Create is disabled for %s",getClass().getSimpleName());\r
- }\r
- if(async) /*ResultSetFuture */ {\r
- Result<ResultSetFuture> rs = createPS.execAsync(trans, C_TEXT, data);\r
- if(rs.notOK()) {\r
- return Result.err(rs);\r
- }\r
- } else {\r
- Result<ResultSet> rs = createPS.exec(trans, C_TEXT, data);\r
- if(rs.notOK()) {\r
- return Result.err(rs);\r
- }\r
- }\r
- wasModified(trans, CRUD.create, data);\r
- return Result.ok(data);\r
- }\r
-\r
- /**\r
- * Read the Unique Row associated with Full Keys\r
- */\r
- public Result<List<DATA>> read(TRANS trans, DATA data) {\r
- if(readPS==null) {\r
- Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName());\r
- }\r
- return readPS.read(trans, R_TEXT, data);\r
- }\r
-\r
- public Result<List<DATA>> read(TRANS trans, Object ... key) {\r
- if(readPS==null) {\r
- Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName());\r
- }\r
- return readPS.read(trans, R_TEXT, key);\r
- }\r
-\r
- public Result<Void> update(TRANS trans, DATA data) {\r
- if(updatePS==null) {\r
- Result.err(Result.ERR_NotImplemented,"Update is disabled for %s",getClass().getSimpleName());\r
- }\r
- if(async)/* ResultSet rs =*/ {\r
- Result<ResultSetFuture> rs = updatePS.execAsync(trans, U_TEXT, data);\r
- if(rs.notOK()) {\r
- return Result.err(rs);\r
- }\r
- } else {\r
- Result<ResultSet> rs = updatePS.exec(trans, U_TEXT, data);\r
- if(rs.notOK()) {\r
- return Result.err(rs);\r
- }\r
- }\r
- \r
- wasModified(trans, CRUD.update, data);\r
- return Result.ok();\r
- }\r
-\r
- // This method Sig for Cached...\r
- public Result<Void> delete(TRANS trans, DATA data, boolean reread) {\r
- if(deletePS==null) {\r
- Result.err(Result.ERR_NotImplemented,"Delete is disabled for %s",getClass().getSimpleName());\r
- }\r
- // Since Deleting will be stored off, for possible re-constitution, need the whole thing\r
- if(reread) {\r
- Result<List<DATA>> rd = read(trans,data);\r
- if(rd.notOK()) {\r
- return Result.err(rd);\r
- }\r
- if(rd.isEmpty()) {\r
- return Result.err(Status.ERR_NotFound,"Not Found");\r
- }\r
- for(DATA d : rd.value) { \r
- if(async) {\r
- Result<ResultSetFuture> rs = deletePS.execAsync(trans, D_TEXT, d);\r
- if(rs.notOK()) {\r
- return Result.err(rs);\r
- }\r
- } else {\r
- Result<ResultSet> rs = deletePS.exec(trans, D_TEXT, d);\r
- if(rs.notOK()) {\r
- return Result.err(rs);\r
- }\r
- }\r
- wasModified(trans, CRUD.delete, d);\r
- }\r
- } else {\r
- if(async)/* ResultSet rs =*/ {\r
- Result<ResultSetFuture> rs = deletePS.execAsync(trans, D_TEXT, data);\r
- if(rs.notOK()) {\r
- return Result.err(rs);\r
- }\r
- } else {\r
- Result<ResultSet> rs = deletePS.exec(trans, D_TEXT, data);\r
- if(rs.notOK()) {\r
- return Result.err(rs);\r
- }\r
- }\r
- wasModified(trans, CRUD.delete, data);\r
- }\r
- return Result.ok();\r
- }\r
- \r
- public final Object[] keyFrom(DATA data) {\r
- return createPS.keyFrom(data);\r
- }\r
-\r
- @Override\r
- public String table() {\r
- return table;\r
- }\r
- \r
- public static final String CASS_READ_CONSISTENCY="cassandra.readConsistency";\r
- public static final String CASS_WRITE_CONSISTENCY="cassandra.writeConsistency";\r
- protected static ConsistencyLevel readConsistency(AuthzTrans trans, String table) {\r
- String prop = trans.getProperty(CASS_READ_CONSISTENCY+'.'+table);\r
- if(prop==null) {\r
- prop = trans.getProperty(CASS_READ_CONSISTENCY);\r
- if(prop==null) {\r
- return ConsistencyLevel.ONE; // this is Cassandra Default\r
- }\r
- }\r
- return ConsistencyLevel.valueOf(prop);\r
- }\r
-\r
- protected static ConsistencyLevel writeConsistency(AuthzTrans trans, String table) {\r
- String prop = trans.getProperty(CASS_WRITE_CONSISTENCY+'.'+table);\r
- if(prop==null) {\r
- prop = trans.getProperty(CASS_WRITE_CONSISTENCY);\r
- if(prop==null) {\r
- return ConsistencyLevel.ONE; // this is Cassandra Default\\r
- }\r
- }\r
- return ConsistencyLevel.valueOf(prop);\r
- }\r
-\r
- public static DataInputStream toDIS(ByteBuffer bb) {\r
- byte[] b = bb.array();\r
- return new DataInputStream(\r
- new ByteArrayInputStream(b,bb.position(),bb.limit())\r
- );\r
- }\r
-\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import org.onap.aaf.authz.layer.Result;\r
-\r
-import org.onap.aaf.inno.env.Trans;\r
-\r
-\r
-/**\r
- * DataAccessObject Interface\r
- *\r
- * Extend the ReadOnly form (for Get), and add manipulation methods\r
- *\r
- * @param <DATA>\r
- */\r
-public interface DAO<TRANS extends Trans,DATA> extends DAO_RO<TRANS,DATA> {\r
- public Result<DATA> create(TRANS trans, DATA data);\r
- public Result<Void> update(TRANS trans, DATA data);\r
- // In many cases, the data has been correctly read first, so we shouldn't read again\r
- // Use reread=true if you are using DATA with only a Key\r
- public Result<Void> delete(TRANS trans, DATA data, boolean reread);\r
- public Object[] keyFrom(DATA data);\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-public class DAOException extends Exception {\r
-\r
- /**\r
- * \r
- */\r
- private static final long serialVersionUID = 1527904125585539823L;\r
-\r
-// // TODO - enum in result class == is our intended design, currently the DAO layer does not use Result<RV> so we still use these for now\r
-// public final static DAOException RoleNotFoundDAOException = new DAOException("RoleNotFound");\r
-// public final static DAOException PermissionNotFoundDAOException = new DAOException("PermissionNotFound");\r
-// public final static DAOException UserNotFoundDAOException = new DAOException("UserNotFound");\r
-\r
- public DAOException() {\r
- }\r
-\r
- public DAOException(String message) {\r
- super(message);\r
- }\r
-\r
- public DAOException(Throwable cause) {\r
- super(cause);\r
- }\r
-\r
- public DAOException(String message, Throwable cause) {\r
- super(message, cause);\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.layer.Result;\r
-\r
-import org.onap.aaf.inno.env.Trans;\r
-\r
-/**\r
- * DataAccessObject - ReadOnly\r
- * \r
- * It is useful to have a ReadOnly part of the interface for CachedDAO\r
- * \r
- * Normal DAOs will implement full DAO\r
- * \r
- *\r
- * @param <DATA>\r
- */\r
-public interface DAO_RO<TRANS extends Trans,DATA> {\r
- /**\r
- * Get a List of Data given Key of Object Array\r
- * @param objs\r
- * @return\r
- * @throws DAOException\r
- */\r
- public Result<List<DATA>> read(TRANS trans, Object ... key);\r
-\r
- /**\r
- * Get a List of Data given Key of DATA Object\r
- * @param trans\r
- * @param key\r
- * @return\r
- * @throws DAOException\r
- */\r
- public Result<List<DATA>> read(TRANS trans, DATA key);\r
-\r
- /**\r
- * close DAO\r
- */\r
- public void close(TRANS trans);\r
-\r
- /**\r
- * Return name of referenced Data\r
- * @return\r
- */\r
- public String table();\r
-\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import java.io.DataInputStream;\r
-import java.io.DataOutputStream;\r
-import java.io.IOException;\r
-import java.util.ArrayList;\r
-import java.util.Collection;\r
-import java.util.HashMap;\r
-import java.util.HashSet;\r
-import java.util.List;\r
-import java.util.Map;\r
-import java.util.Map.Entry;\r
-import java.util.Set;\r
-\r
-import com.datastax.driver.core.Row;\r
-\r
-public abstract class Loader<DATA> {\r
- private int keylimit;\r
- public Loader(int keylimit) {\r
- this.keylimit = keylimit;\r
- }\r
- \r
- public int keylimit() {\r
- return keylimit;\r
- }\r
- \r
- protected abstract DATA load(DATA data, Row row);\r
- protected abstract void key(DATA data, int idx, Object[] obj);\r
- protected abstract void body(DATA data, int idx, Object[] obj);\r
-\r
- public final Object[] extract(DATA data, int size, CassDAOImpl.CRUD type) {\r
- Object[] rv=null;\r
- switch(type) {\r
- case delete:\r
- rv = new Object[keylimit()];\r
- key(data,0,rv);\r
- break;\r
- case update:\r
- rv = new Object[size];\r
- body(data,0,rv);\r
- int body = size-keylimit();\r
- if(body>0) {\r
- key(data,body,rv);\r
- }\r
- break;\r
- default:\r
- rv = new Object[size];\r
- key(data,0,rv);\r
- if(size>keylimit()) {\r
- body(data,keylimit(),rv);\r
- }\r
- break;\r
- }\r
- return rv;\r
- }\r
- \r
- public static void writeString(DataOutputStream os, String s) throws IOException {\r
- if(s==null) {\r
- os.writeInt(-1);\r
- } else {\r
- switch(s.length()) {\r
- case 0:\r
- os.writeInt(0);\r
- break;\r
- default:\r
- byte[] bytes = s.getBytes();\r
- os.writeInt(bytes.length);\r
- os.write(bytes);\r
- }\r
- }\r
- }\r
- \r
- /**\r
- * We use bytes here to set a Maximum\r
- * \r
- * @param is\r
- * @param MAX\r
- * @return\r
- * @throws IOException\r
- */\r
- public static String readString(DataInputStream is, byte[] _buff) throws IOException {\r
- int l = is.readInt();\r
- byte[] buff = _buff;\r
- switch(l) {\r
- case -1: return null;\r
- case 0: return "";\r
- default:\r
- // Cover case where there is a large string, without always allocating a large buffer.\r
- if(l>buff.length) {\r
- buff = new byte[l];\r
- }\r
- is.read(buff,0,l);\r
- return new String(buff,0,l);\r
- }\r
- }\r
-\r
- /**\r
- * Write a set with proper sizing\r
- * \r
- * Note: at the moment, this is just String. Probably can develop system where types\r
- * are supported too... but not now.\r
- * \r
- * @param os\r
- * @param set\r
- * @throws IOException\r
- */\r
- public static void writeStringSet(DataOutputStream os, Collection<String> set) throws IOException {\r
- if(set==null) {\r
- os.writeInt(-1);\r
- } else {\r
- os.writeInt(set.size());\r
- for(String s : set) {\r
- writeString(os, s);\r
- }\r
- }\r
-\r
- }\r
- \r
- public static Set<String> readStringSet(DataInputStream is, byte[] buff) throws IOException {\r
- int l = is.readInt();\r
- if(l<0) {\r
- return null;\r
- }\r
- Set<String> set = new HashSet<String>(l);\r
- for(int i=0;i<l;++i) {\r
- set.add(readString(is,buff));\r
- }\r
- return set;\r
- }\r
- \r
- public static List<String> readStringList(DataInputStream is, byte[] buff) throws IOException {\r
- int l = is.readInt();\r
- if(l<0) {\r
- return null;\r
- }\r
- List<String> list = new ArrayList<String>(l);\r
- for(int i=0;i<l;++i) {\r
- list.add(Loader.readString(is,buff));\r
- }\r
- return list;\r
- }\r
-\r
- /** \r
- * Write a map\r
- * @param os\r
- * @param map\r
- * @throws IOException\r
- */\r
- public static void writeStringMap(DataOutputStream os, Map<String,String> map) throws IOException {\r
- if(map==null) {\r
- os.writeInt(-1);\r
- } else {\r
- Set<Entry<String, String>> es = map.entrySet();\r
- os.writeInt(es.size());\r
- for(Entry<String,String> e : es) {\r
- writeString(os, e.getKey());\r
- writeString(os, e.getValue());\r
- }\r
- }\r
-\r
- }\r
-\r
- public static Map<String,String> readStringMap(DataInputStream is, byte[] buff) throws IOException {\r
- int l = is.readInt();\r
- if(l<0) {\r
- return null;\r
- }\r
- Map<String,String> map = new HashMap<String,String>(l);\r
- for(int i=0;i<l;++i) {\r
- String key = readString(is,buff);\r
- map.put(key,readString(is,buff));\r
- }\r
- return map;\r
- }\r
- public static void writeHeader(DataOutputStream os, int magic, int version) throws IOException {\r
- os.writeInt(magic);\r
- os.writeInt(version);\r
- }\r
- \r
- public static int readHeader(DataInputStream is, final int magic, final int version) throws IOException {\r
- if(is.readInt()!=magic) {\r
- throw new IOException("Corrupted Data Stream");\r
- }\r
- int v = is.readInt();\r
- if(version<0 || v>version) {\r
- throw new IOException("Unsupported Data Version: " + v);\r
- }\r
- return v;\r
- }\r
-\r
-}\r
-\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import java.io.DataInputStream;\r
-import java.io.DataOutputStream;\r
-import java.io.IOException;\r
-\r
-public interface Streamer<DATA> {\r
- public abstract void marshal(DATA data, DataOutputStream os) throws IOException;\r
- public abstract void unmarshal(DATA data, DataInputStream is) throws IOException;\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-public interface Touchable {\r
- // Or make all DAOs accept list of CIDAOs...\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cached;\r
-\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.CIDAO;\r
-import org.onap.aaf.dao.CachedDAO;\r
-import org.onap.aaf.dao.aaf.cass.CertDAO;\r
-\r
-public class CachedCertDAO extends CachedDAO<AuthzTrans, CertDAO, CertDAO.Data> {\r
- public CachedCertDAO(CertDAO dao, CIDAO<AuthzTrans> info) {\r
- super(dao, info, CertDAO.CACHE_SEG);\r
- }\r
- \r
- /**\r
- * Pass through Cert ID Lookup\r
- * \r
- * @param trans\r
- * @param ns\r
- * @return\r
- */\r
- \r
- public Result<List<CertDAO.Data>> readID(AuthzTrans trans, final String id) {\r
- return dao().readID(trans, id);\r
- }\r
- \r
- public Result<List<CertDAO.Data>> readX500(AuthzTrans trans, final String x500) {\r
- return dao().readX500(trans, x500);\r
- }\r
-\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cached;\r
-\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.CIDAO;\r
-import org.onap.aaf.dao.CachedDAO;\r
-import org.onap.aaf.dao.aaf.cass.CredDAO;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-\r
-public class CachedCredDAO extends CachedDAO<AuthzTrans, CredDAO, CredDAO.Data> {\r
- public CachedCredDAO(CredDAO dao, CIDAO<AuthzTrans> info) {\r
- super(dao, info, CredDAO.CACHE_SEG);\r
- }\r
- \r
- /**\r
- * Pass through Cred Lookup\r
- * \r
- * Unlike Role and Perm, we don't need or want to cache these elements... Only used for NS Delete.\r
- * \r
- * @param trans\r
- * @param ns\r
- * @return\r
- */\r
- public Result<List<CredDAO.Data>> readNS(AuthzTrans trans, final String ns) {\r
- \r
- return dao().readNS(trans, ns);\r
- }\r
- \r
- public Result<List<CredDAO.Data>> readID(AuthzTrans trans, final String id) {\r
- DAOGetter getter = new DAOGetter(trans,dao()) {\r
- public Result<List<CredDAO.Data>> call() {\r
- return dao().readID(trans, id);\r
- }\r
- };\r
- \r
- Result<List<CredDAO.Data>> lurd = get(trans, id, getter);\r
- if(lurd.isOK() && lurd.isEmpty()) {\r
- return Result.err(Status.ERR_UserNotFound,"No User Cred found");\r
- }\r
- return lurd;\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cached;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.dao.CIDAO;\r
-import org.onap.aaf.dao.CachedDAO;\r
-import org.onap.aaf.dao.aaf.cass.NsDAO;\r
-\r
-public class CachedNSDAO extends CachedDAO<AuthzTrans, NsDAO, NsDAO.Data> {\r
- public CachedNSDAO(NsDAO dao, CIDAO<AuthzTrans> info) {\r
- super(dao, info, NsDAO.CACHE_SEG);\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cached;\r
-\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.CIDAO;\r
-import org.onap.aaf.dao.CachedDAO;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO.Data;\r
-\r
-public class CachedPermDAO extends CachedDAO<AuthzTrans,PermDAO, PermDAO.Data> {\r
-\r
- public CachedPermDAO(PermDAO dao, CIDAO<AuthzTrans> info) {\r
- super(dao, info, PermDAO.CACHE_SEG);\r
- }\r
-\r
- public Result<List<Data>> readNS(AuthzTrans trans, final String ns) {\r
- DAOGetter getter = new DAOGetter(trans,dao()) {\r
- public Result<List<Data>> call() {\r
- return dao.readNS(trans, ns);\r
- }\r
- };\r
- \r
- Result<List<Data>> lurd = get(trans, ns, getter);\r
- if(lurd.isOKhasData()) {\r
- return lurd;\r
- } else {\r
- \r
- }\r
-// if(getter.result==null) {\r
-// if(lurd==null) {\r
- return Result.err(Status.ERR_PermissionNotFound,"No Permission found - " + lurd.details);\r
-// } else {\r
-// return Result.ok(lurd);\r
-// }\r
-// }\r
-// return getter.result;\r
- }\r
-\r
- public Result<List<Data>> readChildren(AuthzTrans trans, final String ns, final String type) {\r
- return dao().readChildren(trans,ns,type);\r
- }\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param ns\r
- * @param type\r
- * @return\r
- */\r
- public Result<List<Data>> readByType(AuthzTrans trans, final String ns, final String type) {\r
- DAOGetter getter = new DAOGetter(trans,dao()) {\r
- public Result<List<Data>> call() {\r
- return dao.readByType(trans, ns, type);\r
- }\r
- };\r
- \r
- // Note: Can reuse index1 here, because there is no name collision versus response\r
- Result<List<Data>> lurd = get(trans, ns+'|'+type, getter);\r
- if(lurd.isOK() && lurd.isEmpty()) {\r
- return Result.err(Status.ERR_PermissionNotFound,"No Permission found");\r
- }\r
- return lurd;\r
- }\r
- \r
- /**\r
- * Add desciption to this permission\r
- * \r
- * @param trans\r
- * @param ns\r
- * @param type\r
- * @param instance\r
- * @param action\r
- * @param description\r
- * @return\r
- */\r
- public Result<Void> addDescription(AuthzTrans trans, String ns, String type, \r
- String instance, String action, String description) {\r
- //TODO Invalidate?\r
- return dao().addDescription(trans, ns, type, instance, action, description);\r
- }\r
- \r
- public Result<Void> addRole(AuthzTrans trans, PermDAO.Data perm, RoleDAO.Data role) {\r
- Result<Void> rv = dao().addRole(trans,perm,role.encode());\r
- if(trans.debug().isLoggable())\r
- trans.debug().log("Adding",role.encode(),"to", perm, "with CachedPermDAO.addRole");\r
- invalidate(trans,perm);\r
- return rv;\r
- }\r
-\r
- public Result<Void> delRole(AuthzTrans trans, Data perm, RoleDAO.Data role) {\r
- Result<Void> rv = dao().delRole(trans,perm,role.encode());\r
- if(trans.debug().isLoggable())\r
- trans.debug().log("Removing",role.encode(),"from", perm, "with CachedPermDAO.delRole");\r
- invalidate(trans,perm);\r
- return rv;\r
- }\r
-\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cached;\r
-\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.CIDAO;\r
-import org.onap.aaf.dao.CachedDAO;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO.Data;\r
-\r
-public class CachedRoleDAO extends CachedDAO<AuthzTrans,RoleDAO, RoleDAO.Data> {\r
- public CachedRoleDAO(RoleDAO dao, CIDAO<AuthzTrans> info) {\r
- super(dao, info, RoleDAO.CACHE_SEG);\r
- }\r
-\r
- public Result<List<Data>> readNS(AuthzTrans trans, final String ns) {\r
- DAOGetter getter = new DAOGetter(trans,dao()) {\r
- public Result<List<Data>> call() {\r
- return dao.readNS(trans, ns);\r
- }\r
- };\r
- \r
- Result<List<Data>> lurd = get(trans, ns, getter);\r
- if(lurd.isOK() && lurd.isEmpty()) {\r
- return Result.err(Status.ERR_RoleNotFound,"No Role found");\r
- }\r
- return lurd;\r
- }\r
-\r
- public Result<List<Data>> readName(AuthzTrans trans, final String name) {\r
- DAOGetter getter = new DAOGetter(trans,dao()) {\r
- public Result<List<Data>> call() {\r
- return dao().readName(trans, name);\r
- }\r
- };\r
- \r
- Result<List<Data>> lurd = get(trans, name, getter);\r
- if(lurd.isOK() && lurd.isEmpty()) {\r
- return Result.err(Status.ERR_RoleNotFound,"No Role found");\r
- }\r
- return lurd;\r
- }\r
-\r
- public Result<List<Data>> readChildren(AuthzTrans trans, final String ns, final String name) {\r
- // At this point, I'm thinking it's better not to try to cache "*" results\r
- // Data probably won't be accurate, and adding it makes every update invalidate most of the cache\r
- // 2/4/2014\r
- return dao().readChildren(trans,ns,name);\r
- }\r
-\r
- public Result<Void> addPerm(AuthzTrans trans, RoleDAO.Data rd, PermDAO.Data perm) {\r
- Result<Void> rv = dao().addPerm(trans,rd,perm);\r
- if(trans.debug().isLoggable())\r
- trans.debug().log("Adding",perm,"to", rd, "with CachedRoleDAO.addPerm");\r
- invalidate(trans, rd);\r
- return rv;\r
- }\r
-\r
- public Result<Void> delPerm(AuthzTrans trans, RoleDAO.Data rd, PermDAO.Data perm) {\r
- Result<Void> rv = dao().delPerm(trans,rd,perm);\r
- if(trans.debug().isLoggable())\r
- trans.debug().log("Removing",perm,"from", rd, "with CachedRoleDAO.addPerm");\r
- invalidate(trans, rd);\r
- return rv;\r
- }\r
- \r
- /**\r
- * Add description to this role\r
- * \r
- * @param trans\r
- * @param ns\r
- * @param name\r
- * @param description\r
- * @return\r
- */\r
- public Result<Void> addDescription(AuthzTrans trans, String ns, String name, String description) {\r
- //TODO Invalidate?\r
- return dao().addDescription(trans, ns, name, description);\r
-\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cached;\r
-\r
-import java.util.ArrayList;\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.CIDAO;\r
-import org.onap.aaf.dao.CachedDAO;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-import org.onap.aaf.dao.aaf.cass.UserRoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.UserRoleDAO.Data;\r
-\r
-import org.onap.aaf.inno.env.Slot;\r
-\r
-public class CachedUserRoleDAO extends CachedDAO<AuthzTrans,UserRoleDAO, UserRoleDAO.Data> {\r
- private Slot transURSlot;\r
-\r
- public CachedUserRoleDAO(UserRoleDAO dao, CIDAO<AuthzTrans> info) {\r
- super(dao, info, UserRoleDAO.CACHE_SEG);\r
- transURSlot = dao.transURSlot;\r
- }\r
-\r
- /**\r
- * Special Case. \r
- * User Roles by User are very likely to be called many times in a Transaction, to validate "May User do..."\r
- * Pull result, and make accessible by the Trans, which is always keyed by User.\r
- * @param trans\r
- * @param user\r
- * @return\r
- */\r
- public Result<List<Data>> readByUser(AuthzTrans trans, final String user) {\r
- DAOGetter getter = new DAOGetter(trans,dao()) {\r
- public Result<List<Data>> call() {\r
- // If the call is for THIS user, and it exists, get from TRANS, add to TRANS if not.\r
- if(user!=null && user.equals(trans.user())) {\r
- Result<List<Data>> transLD = trans.get(transURSlot,null);\r
- if(transLD==null ) {\r
- transLD = dao.readByUser(trans, user);\r
- }\r
- return transLD;\r
- } else {\r
- return dao.readByUser(trans, user);\r
- }\r
- }\r
- };\r
- Result<List<Data>> lurd = get(trans, user, getter);\r
- if(lurd.isOK() && lurd.isEmpty()) {\r
- return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for [%s]",user);\r
- }\r
- return lurd;\r
- }\r
-\r
- \r
- public Result<List<Data>> readByRole(AuthzTrans trans, final String role) {\r
- DAOGetter getter = new DAOGetter(trans,dao()) {\r
- public Result<List<Data>> call() {\r
- return dao.readByRole(trans, role);\r
- }\r
- };\r
- Result<List<Data>> lurd = get(trans, role, getter);\r
- if(lurd.isOK() && lurd.isEmpty()) {\r
- return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for [%s]",role);\r
- }\r
- return lurd;\r
- }\r
-\r
- public Result<List<UserRoleDAO.Data>> readUserInRole(final AuthzTrans trans, final String user, final String role) {\r
- DAOGetter getter = new DAOGetter(trans,dao()) {\r
- public Result<List<Data>> call() {\r
- if(user.equals(trans.user())) {\r
- Result<List<Data>> rrbu = readByUser(trans, user);\r
- if(rrbu.isOK()) {\r
- List<Data> ld = new ArrayList<Data>(1);\r
- for(Data d : rrbu.value) {\r
- if(d.role.equals(role)) {\r
- ld.add(d);\r
- break;\r
- }\r
- }\r
- return Result.ok(ld).emptyList(ld.isEmpty());\r
- } else {\r
- return rrbu;\r
- }\r
- }\r
- return dao.readByUserRole(trans, user, role);\r
- }\r
- };\r
- Result<List<Data>> lurd = get(trans, keyFromObjs(user,role), getter);\r
- if(lurd.isOK() && lurd.isEmpty()) {\r
- return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for role [%s] and user [%s]",role,user);\r
- }\r
- return lurd;\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import java.util.Date;\r
-import java.util.List;\r
-import java.util.UUID;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.Loader;\r
-\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.Row;\r
-\r
-\r
-public class ApprovalDAO extends CassDAOImpl<AuthzTrans,ApprovalDAO.Data> {\r
- public static final String PENDING = "pending";\r
- public static final String DENIED = "denied";\r
- public static final String APPROVED = "approved";\r
- \r
- private static final String TABLE = "approval";\r
- private HistoryDAO historyDAO;\r
- private PSInfo psByUser, psByApprover, psByTicket, psByStatus;\r
-\r
- \r
- public ApprovalDAO(AuthzTrans trans, Cluster cluster, String keyspace) {\r
- super(trans, ApprovalDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- historyDAO = new HistoryDAO(trans, this);\r
- init(trans);\r
- }\r
-\r
-\r
- public ApprovalDAO(AuthzTrans trans, HistoryDAO hDAO) {\r
- super(trans, ApprovalDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- historyDAO=hDAO;\r
- init(trans);\r
- }\r
-\r
- private static final int KEYLIMIT = 1;\r
- public static class Data {\r
- public UUID id;\r
- public UUID ticket;\r
- public String user;\r
- public String approver;\r
- public String type;\r
- public String status;\r
- public String memo;\r
- public String operation;\r
- public Date updated;\r
- }\r
- \r
- private static class ApprovalLoader extends Loader<Data> {\r
- public static final ApprovalLoader deflt = new ApprovalLoader(KEYLIMIT);\r
- \r
- public ApprovalLoader(int keylimit) {\r
- super(keylimit);\r
- }\r
- \r
- @Override\r
- public Data load(Data data, Row row) {\r
- data.id = row.getUUID(0);\r
- data.ticket = row.getUUID(1);\r
- data.user = row.getString(2);\r
- data.approver = row.getString(3);\r
- data.type = row.getString(4);\r
- data.status = row.getString(5);\r
- data.memo = row.getString(6);\r
- data.operation = row.getString(7);\r
- if(row.getColumnDefinitions().size()>8) {\r
- // Rows reported in MicroSeconds\r
- data.updated = new Date(row.getLong(8)/1000);\r
- }\r
- return data;\r
- }\r
-\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx]=data.id;\r
- }\r
-\r
- @Override\r
- protected void body(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
- obj[idx]=data.ticket;\r
- obj[++idx]=data.user;\r
- obj[++idx]=data.approver;\r
- obj[++idx]=data.type;\r
- obj[++idx]=data.status;\r
- obj[++idx]=data.memo;\r
- obj[++idx]=data.operation;\r
- }\r
- } \r
- \r
- private void init(AuthzTrans trans) {\r
- String[] helpers = setCRUD(trans, TABLE, Data.class, ApprovalLoader.deflt,8);\r
- // Need a specialty Creator to handle the "now()"\r
- replace(CRUD.create, new PSInfo(trans, "INSERT INTO " + TABLE + " (" + helpers[FIELD_COMMAS] +\r
- ") VALUES(now(),?,?,?,?,?,?,?)",new ApprovalLoader(0) {\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- // Overridden because key is the "now()"\r
- }\r
- },writeConsistency)\r
- );\r
-\r
- psByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + \r
- " WHERE user = ?", new ApprovalLoader(1) {\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx]=data.user;\r
- }\r
- }, readConsistency);\r
- \r
- psByApprover = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + \r
- " WHERE approver = ?", new ApprovalLoader(1) {\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx]=data.approver;\r
- }\r
- }, readConsistency);\r
-\r
- psByTicket = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + \r
- " WHERE ticket = ?", new ApprovalLoader(1) {\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx]=data.ticket;\r
- }\r
- }, readConsistency);\r
-\r
- psByStatus = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + \r
- " WHERE status = ?", new ApprovalLoader(1) {\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx]=data.status;\r
- }\r
- }, readConsistency);\r
-\r
-\r
- }\r
- \r
- public Result<List<ApprovalDAO.Data>> readByUser(AuthzTrans trans, String user) {\r
- return psByUser.read(trans, R_TEXT, new Object[]{user});\r
- }\r
-\r
- public Result<List<ApprovalDAO.Data>> readByApprover(AuthzTrans trans, String approver) {\r
- return psByApprover.read(trans, R_TEXT, new Object[]{approver});\r
- }\r
-\r
- public Result<List<ApprovalDAO.Data>> readByTicket(AuthzTrans trans, UUID ticket) {\r
- return psByTicket.read(trans, R_TEXT, new Object[]{ticket});\r
- }\r
-\r
- public Result<List<ApprovalDAO.Data>> readByStatus(AuthzTrans trans, String status) {\r
- return psByStatus.read(trans, R_TEXT, new Object[]{status});\r
- } \r
-\r
- /**\r
- * Log Modification statements to History\r
- *\r
- * @param modified which CRUD action was done\r
- * @param data entity data that needs a log entry\r
- * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data\r
- */\r
- @Override\r
- protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {\r
- boolean memo = override.length>0 && override[0]!=null;\r
- boolean subject = override.length>1 && override[1]!=null;\r
-\r
- HistoryDAO.Data hd = HistoryDAO.newInitedData();\r
- hd.user = trans.user();\r
- hd.action = modified.name();\r
- hd.target = TABLE;\r
- hd.subject = subject?override[1]:data.user + "|" + data.approver;\r
- hd.memo = memo\r
- ? String.format("%s by %s", override[0], hd.user)\r
- : (modified.name() + "d approval for " + data.user);\r
- // Detail?\r
- // Reconstruct?\r
- if(historyDAO.create(trans, hd).status!=Status.OK) {\r
- trans.error().log("Cannot log to History");\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import java.io.ByteArrayOutputStream;\r
-import java.io.DataInputStream;\r
-import java.io.DataOutputStream;\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.util.Date;\r
-import java.util.HashSet;\r
-import java.util.List;\r
-import java.util.Set;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.Bytification;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.Loader;\r
-import org.onap.aaf.dao.Streamer;\r
-\r
-import org.onap.aaf.inno.env.util.Chrono;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.Row;\r
-\r
-/**\r
- * CredDAO manages credentials. \r
- * Date: 7/19/13\r
- */\r
-public class ArtiDAO extends CassDAOImpl<AuthzTrans,ArtiDAO.Data> {\r
- public static final String TABLE = "artifact";\r
- \r
- private HistoryDAO historyDAO;\r
- private PSInfo psByMechID,psByMachine;\r
- \r
- public ArtiDAO(AuthzTrans trans, Cluster cluster, String keyspace) {\r
- super(trans, ArtiDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- init(trans);\r
- }\r
-\r
- public ArtiDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) {\r
- super(trans, ArtiDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- historyDAO = hDao;\r
- init(trans);\r
- }\r
-\r
- public static final int KEYLIMIT = 2;\r
- public static class Data implements Bytification {\r
- public String mechid;\r
- public String machine;\r
- private Set<String> type;\r
- public String sponsor;\r
- public String ca;\r
- public String dir;\r
- public String appName;\r
- public String os_user;\r
- public String notify;\r
- public Date expires;\r
- public int renewDays;\r
- \r
-// // Getters\r
- public Set<String> type(boolean mutable) {\r
- if (type == null) {\r
- type = new HashSet<String>();\r
- } else if (mutable && !(type instanceof HashSet)) {\r
- type = new HashSet<String>(type);\r
- }\r
- return type;\r
- }\r
-\r
-\r
- @Override\r
- public ByteBuffer bytify() throws IOException {\r
- ByteArrayOutputStream baos = new ByteArrayOutputStream();\r
- ArtifactLoader.deflt.marshal(this,new DataOutputStream(baos));\r
- return ByteBuffer.wrap(baos.toByteArray());\r
- }\r
- \r
- @Override\r
- public void reconstitute(ByteBuffer bb) throws IOException {\r
- ArtifactLoader.deflt.unmarshal(this, toDIS(bb));\r
- }\r
-\r
- public String toString() {\r
- return mechid + ' ' + machine + ' ' + Chrono.dateTime(expires);\r
- }\r
- }\r
-\r
- private static class ArtifactLoader extends Loader<Data> implements Streamer<Data>{\r
- public static final int MAGIC=95829343;\r
- public static final int VERSION=1;\r
- public static final int BUFF_SIZE=48; // Note: \r
-\r
- public static final ArtifactLoader deflt = new ArtifactLoader(KEYLIMIT);\r
- public ArtifactLoader(int keylimit) {\r
- super(keylimit);\r
- }\r
-\r
- @Override\r
- public Data load(Data data, Row row) {\r
- data.mechid = row.getString(0);\r
- data.machine = row.getString(1);\r
- data.type = row.getSet(2, String.class);\r
- data.sponsor = row.getString(3);\r
- data.ca = row.getString(4);\r
- data.dir = row.getString(5);\r
- data.appName = row.getString(6);\r
- data.os_user = row.getString(7);\r
- data.notify = row.getString(8);\r
- data.expires = row.getDate(9);\r
- data.renewDays = row.getInt(10);\r
- return data;\r
- }\r
-\r
- @Override\r
- protected void key(final Data data, final int idx, Object[] obj) {\r
- int i;\r
- obj[i=idx] = data.mechid;\r
- obj[++i] = data.machine;\r
- }\r
-\r
- @Override\r
- protected void body(final Data data, final int idx, Object[] obj) {\r
- int i;\r
- obj[i=idx] = data.type;\r
- obj[++i] = data.sponsor;\r
- obj[++i] = data.ca;\r
- obj[++i] = data.dir;\r
- obj[++i] = data.appName;\r
- obj[++i] = data.os_user;\r
- obj[++i] = data.notify;\r
- obj[++i] = data.expires;\r
- obj[++i] = data.renewDays;\r
- }\r
-\r
- @Override\r
- public void marshal(Data data, DataOutputStream os) throws IOException {\r
- writeHeader(os,MAGIC,VERSION);\r
- writeString(os, data.mechid);\r
- writeString(os, data.machine);\r
- os.writeInt(data.type.size());\r
- for(String s : data.type) {\r
- writeString(os, s);\r
- }\r
- writeString(os, data.sponsor);\r
- writeString(os, data.ca);\r
- writeString(os, data.dir);\r
- writeString(os, data.appName);\r
- writeString(os, data.os_user);\r
- writeString(os, data.notify);\r
- os.writeLong(data.expires==null?-1:data.expires.getTime());\r
- os.writeInt(data.renewDays);\r
- }\r
-\r
- @Override\r
- public void unmarshal(Data data, DataInputStream is) throws IOException {\r
- /*int version = */readHeader(is,MAGIC,VERSION);\r
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields\r
- byte[] buff = new byte[BUFF_SIZE];\r
- data.mechid = readString(is,buff);\r
- data.machine = readString(is,buff);\r
- int size = is.readInt();\r
- data.type = new HashSet<String>(size);\r
- for(int i=0;i<size;++i) {\r
- data.type.add(readString(is,buff));\r
- }\r
- data.sponsor = readString(is,buff);\r
- data.ca = readString(is,buff);\r
- data.dir = readString(is,buff);\r
- data.appName = readString(is,buff);\r
- data.os_user = readString(is,buff);\r
- data.notify = readString(is,buff);\r
- long l = is.readLong();\r
- data.expires = l<0?null:new Date(l);\r
- data.renewDays = is.readInt();\r
- }\r
- }\r
-\r
- private void init(AuthzTrans trans) {\r
- // Set up sub-DAOs\r
- if(historyDAO==null) {\r
- historyDAO = new HistoryDAO(trans,this);\r
- }\r
- \r
- String[] helpers = setCRUD(trans, TABLE, Data.class, ArtifactLoader.deflt);\r
-\r
- psByMechID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + \r
- " WHERE mechid = ?", new ArtifactLoader(1) {\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx]=data.type;\r
- }\r
- },readConsistency);\r
-\r
- psByMachine = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + \r
- " WHERE machine = ?", new ArtifactLoader(1) {\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx]=data.type;\r
- }\r
- },readConsistency);\r
-\r
- }\r
- \r
- \r
- public Result<List<Data>> readByMechID(AuthzTrans trans, String mechid) {\r
- return psByMechID.read(trans, R_TEXT, new Object[]{mechid});\r
- }\r
-\r
- public Result<List<ArtiDAO.Data>> readByMachine(AuthzTrans trans, String machine) {\r
- return psByMachine.read(trans, R_TEXT, new Object[]{machine});\r
- }\r
-\r
- /**\r
- * Log Modification statements to History\r
- *\r
- * @param modified which CRUD action was done\r
- * @param data entity data that needs a log entry\r
- * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data\r
- */\r
- @Override\r
- protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {\r
- boolean memo = override.length>0 && override[0]!=null;\r
- boolean subject = override.length>1 && override[1]!=null;\r
-\r
- HistoryDAO.Data hd = HistoryDAO.newInitedData();\r
- hd.user = trans.user();\r
- hd.action = modified.name();\r
- hd.target = TABLE;\r
- hd.subject = subject?override[1]: data.mechid;\r
- hd.memo = memo\r
- ? String.format("%s by %s", override[0], hd.user)\r
- : String.format("%sd %s for %s",modified.name(),data.mechid,data.machine);\r
- // Detail?\r
- if(modified==CRUD.delete) {\r
- try {\r
- hd.reconstruct = data.bytify();\r
- } catch (IOException e) {\r
- trans.error().log(e,"Could not serialize CredDAO.Data");\r
- }\r
- }\r
-\r
- if(historyDAO.create(trans, hd).status!=Status.OK) {\r
- trans.error().log("Cannot log to History");\r
- }\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import java.io.IOException;\r
-import java.net.HttpURLConnection;\r
-import java.net.URI;\r
-import java.util.Date;\r
-import java.util.HashMap;\r
-import java.util.HashSet;\r
-import java.util.Map;\r
-import java.util.Map.Entry;\r
-import java.util.concurrent.BlockingQueue;\r
-import java.util.concurrent.ConcurrentHashMap;\r
-import java.util.concurrent.LinkedBlockingQueue;\r
-import java.util.concurrent.TimeUnit;\r
-\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.AbsCassDAO;\r
-import org.onap.aaf.dao.CIDAO;\r
-import org.onap.aaf.dao.CassAccess;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.Loader;\r
-\r
-import org.onap.aaf.cadi.CadiException;\r
-import org.onap.aaf.cadi.SecuritySetter;\r
-import org.onap.aaf.cadi.client.Future;\r
-import org.onap.aaf.cadi.client.Rcli;\r
-import org.onap.aaf.cadi.client.Retryable;\r
-import org.onap.aaf.cadi.http.HMangr;\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.TimeTaken;\r
-import org.onap.aaf.inno.env.Trans;\r
-import com.datastax.driver.core.BoundStatement;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.ResultSet;\r
-import com.datastax.driver.core.Row;\r
-import com.datastax.driver.core.exceptions.DriverException;\r
-\r
-public class CacheInfoDAO extends CassDAOImpl<AuthzTrans,CacheInfoDAO.Data> implements CIDAO<AuthzTrans> {\r
-\r
- private static final String TABLE = "cache";\r
- public static final Map<String,Date[]> info = new ConcurrentHashMap<String,Date[]>();\r
-\r
- private static CacheUpdate cacheUpdate;\r
- \r
- \r
- private BoundStatement check;\r
- // Hold current time stamps from Tables\r
- private final Date startTime;\r
- \r
- public CacheInfoDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {\r
- super(trans, CacheInfoDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- startTime = new Date();\r
- init(trans);\r
- }\r
-\r
- public CacheInfoDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) throws APIException, IOException {\r
- super(trans, CacheInfoDAO.class.getSimpleName(),aDao,Data.class,TABLE,readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- startTime = new Date();\r
- init(trans);\r
- }\r
-\r
-\r
- //////////////////////////////////////////\r
- // Data Definition, matches Cassandra DM\r
- //////////////////////////////////////////\r
- private static final int KEYLIMIT = 2;\r
- /**\r
- */\r
- public static class Data {\r
- public Data() {\r
- name = null;\r
- touched = null;\r
- }\r
- public Data(String name, int seg) {\r
- this.name = name;\r
- this.seg = seg;\r
- touched = null;\r
- }\r
- \r
- public String name;\r
- public int seg;\r
- public Date touched;\r
- }\r
-\r
- private static class InfoLoader extends Loader<Data> {\r
- public static final InfoLoader dflt = new InfoLoader(KEYLIMIT);\r
- \r
- public InfoLoader(int keylimit) {\r
- super(keylimit);\r
- }\r
- \r
- @Override\r
- public Data load(Data data, Row row) {\r
- // Int more efficient\r
- data.name = row.getString(0);\r
- data.seg = row.getInt(1);\r
- data.touched = row.getDate(2);\r
- return data;\r
- }\r
-\r
- @Override\r
- protected void key(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
-\r
- obj[idx]=data.name;\r
- obj[++idx]=data.seg;\r
- }\r
-\r
- @Override\r
- protected void body(Data data, int idx, Object[] obj) {\r
- obj[idx]=data.touched;\r
- }\r
- }\r
- \r
- public static<T extends Trans> void startUpdate(AuthzEnv env, HMangr hman, SecuritySetter<HttpURLConnection> ss, String ip, int port) {\r
- if(cacheUpdate==null) {\r
- Thread t= new Thread(cacheUpdate = new CacheUpdate(env,hman,ss, ip,port),"CacheInfo Update Thread");\r
- t.setDaemon(true);\r
- t.start();\r
- }\r
- }\r
-\r
- public static<T extends Trans> void stopUpdate() {\r
- if(cacheUpdate!=null) {\r
- cacheUpdate.go=false;\r
- }\r
- }\r
-\r
- private final static class CacheUpdate extends Thread {\r
- public static BlockingQueue<Transfer> notifyDQ = new LinkedBlockingQueue<Transfer>(2000);\r
-\r
- private static final String VOID_CT="application/Void+json;q=1.0;charset=utf-8;version=2.0,application/json;q=1.0;version=2.0,*/*;q=1.0";\r
- private AuthzEnv env;\r
- private HMangr hman;\r
- private SecuritySetter<HttpURLConnection> ss;\r
- private final String authority;\r
- public boolean go = true;\r
- \r
- public CacheUpdate(AuthzEnv env, HMangr hman, SecuritySetter<HttpURLConnection> ss, String ip, int port) {\r
- this.env = env;\r
- this.hman = hman;\r
- this.ss = ss;\r
- \r
- this.authority = ip+':'+port;\r
- }\r
- \r
- private static class Transfer {\r
- public String table;\r
- public int segs[];\r
- public Transfer(String table, int[] segs) {\r
- this.table = table;\r
- this.segs = segs;\r
- }\r
- }\r
- private class CacheClear extends Retryable<Integer> {\r
- public int total=0;\r
- private AuthzTrans trans;\r
- private String type;\r
- private String segs;\r
- \r
- public CacheClear(AuthzTrans trans) {\r
- this.trans = trans;\r
- }\r
-\r
- public void set(Entry<String, IntHolder> es) {\r
- type = es.getKey();\r
- segs = es.getValue().toString();\r
- }\r
- \r
- @Override\r
- public Integer code(Rcli<?> client) throws APIException, CadiException {\r
- URI to = client.getURI();\r
- if(!to.getAuthority().equals(authority)) {\r
- Future<Void> f = client.delete("/mgmt/cache/"+type+'/'+segs,VOID_CT);\r
- if(f.get(hman.readTimeout())) {\r
- ++total;\r
- } else {\r
- trans.error().log("Error During AAF Peer Notify",f.code(),f.body());\r
- }\r
- }\r
- return total;\r
- }\r
- }\r
- \r
- private class IntHolder {\r
- private int[] raw;\r
- HashSet<Integer> set;\r
- \r
- public IntHolder(int ints[]) {\r
- raw = ints;\r
- set = null;\r
- }\r
- public void add(int[] ints) {\r
- if(set==null) {\r
- set = new HashSet<Integer>();\r
- \r
- for(int i=0;i<raw.length;++i) {\r
- set.add(raw[i]);\r
- }\r
- }\r
- for(int i=0;i<ints.length;++i) {\r
- set.add(ints[i]);\r
- }\r
- }\r
-\r
- @Override\r
- public String toString() {\r
- StringBuilder sb = new StringBuilder();\r
- boolean first = true;\r
- if(set==null) {\r
- for(int i : raw) {\r
- if(first) {\r
- first=false;\r
- } else {\r
- sb.append(',');\r
- }\r
- sb.append(i);\r
- }\r
- } else {\r
- for(Integer i : set) {\r
- if(first) {\r
- first=false;\r
- } else {\r
- sb.append(',');\r
- }\r
- sb.append(i);\r
- }\r
- }\r
- return sb.toString();\r
- }\r
- }\r
- \r
- @Override\r
- public void run() {\r
- do {\r
- try {\r
- Transfer data = notifyDQ.poll(4,TimeUnit.SECONDS);\r
- if(data==null) {\r
- continue;\r
- }\r
- \r
- int count = 0;\r
- CacheClear cc = null;\r
- Map<String,IntHolder> gather = null;\r
- AuthzTrans trans = null;\r
- long start=0;\r
- // Do a block poll first\r
- do {\r
- if(gather==null) {\r
- start = System.nanoTime();\r
- trans = env.newTransNoAvg();\r
- cc = new CacheClear(trans);\r
- gather = new HashMap<String,IntHolder>();\r
- }\r
- IntHolder prev = gather.get(data.table);\r
- if(prev==null) {\r
- gather.put(data.table,new IntHolder(data.segs));\r
- } else {\r
- prev.add(data.segs);\r
- }\r
- // continue while there is data\r
- } while((data = notifyDQ.poll())!=null);\r
- if(gather!=null) {\r
- for(Entry<String, IntHolder> es : gather.entrySet()) {\r
- cc.set(es);\r
- try {\r
- if(hman.all(ss, cc, false)!=null) {\r
- ++count;\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e, "Error on Cache Update");\r
- }\r
- }\r
- if(env.debug().isLoggable()) {\r
- float millis = (System.nanoTime()-start)/1000000f;\r
- StringBuilder sb = new StringBuilder("Direct Cache Refresh: ");\r
- sb.append("Updated ");\r
- sb.append(count);\r
- if(count==1) {\r
- sb.append(" entry for ");\r
- } else { \r
- sb.append(" entries for ");\r
- }\r
- int peers = count<=0?0:cc.total/count;\r
- sb.append(peers);\r
- sb.append(" client");\r
- if(peers!=1) {\r
- sb.append('s');\r
- }\r
- sb.append(" in ");\r
- sb.append(millis);\r
- sb.append("ms");\r
- trans.auditTrail(0, sb, Env.REMOTE);\r
- env.debug().log(sb);\r
- }\r
- }\r
- } catch (InterruptedException e1) {\r
- go = false;\r
- }\r
- } while(go);\r
- }\r
- }\r
-\r
- private void init(AuthzTrans trans) throws APIException, IOException {\r
- \r
- String[] helpers = setCRUD(trans, TABLE, Data.class, InfoLoader.dflt);\r
- check = getSession(trans).prepare(SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE).bind();\r
-\r
- disable(CRUD.create);\r
- disable(CRUD.delete);\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.dao.aaf.cass.CIDAO#touch(org.onap.aaf.authz.env.AuthzTrans, java.lang.String, int)\r
- */\r
- \r
- @Override\r
- public Result<Void> touch(AuthzTrans trans, String name, int ... seg) {\r
- /////////////\r
- // Direct Service Cache Invalidation\r
- /////////////\r
- // ConcurrentQueues are open-ended. We don't want any Memory leaks \r
- // Note: we keep a separate counter, because "size()" on a Linked Queue is expensive\r
- if(cacheUpdate!=null) {\r
- try {\r
- if(!CacheUpdate.notifyDQ.offer(new CacheUpdate.Transfer(name, seg),2,TimeUnit.SECONDS)) {\r
- trans.error().log("Cache Notify Queue is not accepting messages, bouncing may be appropriate" );\r
- }\r
- } catch (InterruptedException e) {\r
- trans.error().log("Cache Notify Queue posting was interrupted" );\r
- }\r
- }\r
-\r
- /////////////\r
- // Table Based Cache Invalidation (original)\r
- /////////////\r
- // Note: Save time with multiple Sequence Touches, but PreparedStmt doesn't support IN\r
- StringBuilder start = new StringBuilder("CacheInfoDAO Touch segments ");\r
- start.append(name);\r
- start.append(": ");\r
- StringBuilder sb = new StringBuilder("BEGIN BATCH\n");\r
- boolean first = true;\r
- for(int s : seg) {\r
- sb.append(UPDATE_SP);\r
- sb.append(TABLE);\r
- sb.append(" SET touched=dateof(now()) WHERE name = '");\r
- sb.append(name);\r
- sb.append("' AND seg = ");\r
- sb.append(s);\r
- sb.append(";\n"); \r
- if(first) {\r
- first =false;\r
- } else {\r
- start.append(',');\r
- }\r
- start.append(s);\r
- }\r
- sb.append("APPLY BATCH;");\r
- TimeTaken tt = trans.start(start.toString(),Env.REMOTE);\r
- try {\r
- getSession(trans).executeAsync(sb.toString());\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- } finally {\r
- tt.done();\r
- }\r
- return Result.ok();\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.dao.aaf.cass.CIDAO#check(org.onap.aaf.authz.env.AuthzTrans)\r
- */\r
- @Override\r
- public Result<Void> check(AuthzTrans trans) {\r
- ResultSet rs;\r
- TimeTaken tt = trans.start("Check Table Timestamps",Env.REMOTE);\r
- try {\r
- rs = getSession(trans).execute(check);\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- } finally {\r
- tt.done();\r
- }\r
- \r
- String lastName = null;\r
- Date[] dates = null;\r
- for(Row row : rs.all()) {\r
- String name = row.getString(0);\r
- int seg = row.getInt(1);\r
- if(!name.equals(lastName)) {\r
- dates = info.get(name);\r
- lastName=name;\r
- }\r
- if(dates==null) {\r
- dates=new Date[seg+1];\r
- info.put(name,dates);\r
- } else if(dates.length<=seg) {\r
- Date[] temp = new Date[seg+1];\r
- System.arraycopy(dates, 0, temp, 0, dates.length);\r
- dates = temp;\r
- info.put(name, dates);\r
- }\r
- Date temp = row.getDate(2);\r
- if(dates[seg]==null || dates[seg].before(temp)) {\r
- dates[seg]=temp;\r
- }\r
- }\r
- return Result.ok();\r
- }\r
- \r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.dao.aaf.cass.CIDAO#get(java.lang.String, int)\r
- */\r
- @Override\r
- public Date get(AuthzTrans trans, String table, int seg) {\r
- Date[] dates = info.get(table);\r
- if(dates==null) {\r
- dates = new Date[seg+1];\r
- touch(trans,table, seg);\r
- } else if(dates.length<=seg) {\r
- Date[] temp = new Date[seg+1];\r
- System.arraycopy(dates, 0, temp, 0, dates.length);\r
- dates = temp;\r
- }\r
- Date rv = dates[seg];\r
- if(rv==null) {\r
- rv=dates[seg]=startTime;\r
- }\r
- return rv;\r
- }\r
-\r
- @Override\r
- protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {\r
- // Do nothing\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import org.onap.aaf.dao.Cacheable;\r
-import org.onap.aaf.dao.Cached;\r
-import org.onap.aaf.dao.CachedDAO;\r
-\r
-public abstract class CacheableData implements Cacheable {\r
- // WARNING: DON'T attempt to add any members here, as it will \r
- // be treated by system as fields expected in Tables\r
- protected int seg(Cached<?,?> cache, Object ... fields) {\r
- return cache==null?0:cache.invalidate(CachedDAO.keyFromObjs(fields));\r
- }\r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import java.io.ByteArrayOutputStream;\r
-import java.io.DataInputStream;\r
-import java.io.DataOutputStream;\r
-import java.io.IOException;\r
-import java.math.BigInteger;\r
-import java.nio.ByteBuffer;\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.Bytification;\r
-import org.onap.aaf.dao.CIDAO;\r
-import org.onap.aaf.dao.Cached;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.Loader;\r
-import org.onap.aaf.dao.Streamer;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.Row;\r
-\r
-/**\r
- * CredDAO manages credentials. \r
- * Date: 7/19/13\r
- */\r
-public class CertDAO extends CassDAOImpl<AuthzTrans,CertDAO.Data> {\r
- public static final String TABLE = "x509";\r
- public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F\r
- \r
- private HistoryDAO historyDAO;\r
- private CIDAO<AuthzTrans> infoDAO;\r
- private PSInfo psX500,psID;\r
- \r
- public CertDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {\r
- super(trans, CertDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- init(trans);\r
- }\r
-\r
- public CertDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) throws APIException, IOException {\r
- super(trans, CertDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- historyDAO = hDao;\r
- infoDAO = ciDao;\r
- init(trans);\r
- }\r
- \r
- public static final int KEYLIMIT = 2;\r
- public static class Data extends CacheableData implements Bytification {\r
- \r
- public String ca;\r
- public BigInteger serial;\r
- public String id;\r
- public String x500;\r
- public String x509;\r
-\r
- @Override\r
- public int[] invalidate(Cached<?,?> cache) {\r
- return new int[] {\r
- seg(cache,ca,serial)\r
- };\r
- }\r
- \r
- @Override\r
- public ByteBuffer bytify() throws IOException {\r
- ByteArrayOutputStream baos = new ByteArrayOutputStream();\r
- CertLoader.deflt.marshal(this,new DataOutputStream(baos));\r
- return ByteBuffer.wrap(baos.toByteArray());\r
- }\r
- \r
- @Override\r
- public void reconstitute(ByteBuffer bb) throws IOException {\r
- CertLoader.deflt.unmarshal(this, toDIS(bb));\r
- }\r
- }\r
-\r
- private static class CertLoader extends Loader<Data> implements Streamer<Data>{\r
- public static final int MAGIC=85102934;\r
- public static final int VERSION=1;\r
- public static final int BUFF_SIZE=48; // Note: \r
-\r
- public static final CertLoader deflt = new CertLoader(KEYLIMIT);\r
- public CertLoader(int keylimit) {\r
- super(keylimit);\r
- }\r
-\r
- @Override\r
- public Data load(Data data, Row row) {\r
- data.ca = row.getString(0);\r
- ByteBuffer bb = row.getBytesUnsafe(1);\r
- byte[] bytes = new byte[bb.remaining()];\r
- bb.get(bytes);\r
- data.serial = new BigInteger(bytes);\r
- data.id = row.getString(2);\r
- data.x500 = row.getString(3);\r
- data.x509 = row.getString(4);\r
- return data;\r
- }\r
-\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx] = data.ca;\r
- obj[++idx] = ByteBuffer.wrap(data.serial.toByteArray());\r
- }\r
-\r
- @Override\r
- protected void body(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
-\r
- obj[idx] = data.id;\r
- obj[++idx] = data.x500;\r
- obj[++idx] = data.x509;\r
-\r
- \r
- }\r
-\r
- @Override\r
- public void marshal(Data data, DataOutputStream os) throws IOException {\r
- writeHeader(os,MAGIC,VERSION);\r
- writeString(os, data.id);\r
- writeString(os, data.x500);\r
- writeString(os, data.x509);\r
- writeString(os, data.ca);\r
- if(data.serial==null) {\r
- os.writeInt(-1);\r
- } else {\r
- byte[] dsba = data.serial.toByteArray();\r
- int l = dsba.length;\r
- os.writeInt(l);\r
- os.write(dsba,0,l);\r
- }\r
- }\r
-\r
- @Override\r
- public void unmarshal(Data data, DataInputStream is) throws IOException {\r
- /*int version = */readHeader(is,MAGIC,VERSION);\r
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields\r
- byte[] buff = new byte[BUFF_SIZE];\r
- data.id = readString(is,buff);\r
- data.x500 = readString(is,buff);\r
- data.x509 = readString(is,buff);\r
- data.ca = readString(is,buff);\r
- int i = is.readInt();\r
- if(i<0) {\r
- data.serial=null;\r
- } else {\r
- byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads\r
- is.read(bytes);\r
- data.serial = new BigInteger(bytes);\r
- }\r
- }\r
- }\r
- \r
- public Result<List<CertDAO.Data>> read(AuthzTrans trans, Object ... key) {\r
- // Translate BigInteger to Byte array for lookup\r
- return super.read(trans, key[0],ByteBuffer.wrap(((BigInteger)key[1]).toByteArray()));\r
- }\r
-\r
- private void init(AuthzTrans trans) throws APIException, IOException {\r
- // Set up sub-DAOs\r
- if(historyDAO==null) {\r
- historyDAO = new HistoryDAO(trans,this);\r
- }\r
- if(infoDAO==null) {\r
- infoDAO = new CacheInfoDAO(trans,this);\r
- }\r
-\r
- String[] helpers = setCRUD(trans, TABLE, Data.class, CertLoader.deflt);\r
-\r
- psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +\r
- " WHERE id = ?", CertLoader.deflt,readConsistency);\r
-\r
- psX500 = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +\r
- " WHERE x500 = ?", CertLoader.deflt,readConsistency);\r
- \r
- }\r
- \r
- public Result<List<Data>> readX500(AuthzTrans trans, String x500) {\r
- return psX500.read(trans, R_TEXT, new Object[]{x500});\r
- }\r
-\r
- public Result<List<Data>> readID(AuthzTrans trans, String id) {\r
- return psID.read(trans, R_TEXT, new Object[]{id});\r
- }\r
-\r
- /**\r
- * Log Modification statements to History\r
- *\r
- * @param modified which CRUD action was done\r
- * @param data entity data that needs a log entry\r
- * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data\r
- */\r
- @Override\r
- protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {\r
- boolean memo = override.length>0 && override[0]!=null;\r
- boolean subject = override.length>1 && override[1]!=null;\r
-\r
- HistoryDAO.Data hd = HistoryDAO.newInitedData();\r
- hd.user = trans.user();\r
- hd.action = modified.name();\r
- hd.target = TABLE;\r
- hd.subject = subject?override[1]: data.id;\r
- hd.memo = memo\r
- ? String.format("%s by %s", override[0], hd.user)\r
- : (modified.name() + "d certificate info for " + data.id);\r
- // Detail?\r
- if(modified==CRUD.delete) {\r
- try {\r
- hd.reconstruct = data.bytify();\r
- } catch (IOException e) {\r
- trans.error().log(e,"Could not serialize CertDAO.Data");\r
- }\r
- }\r
-\r
- if(historyDAO.create(trans, hd).status!=Status.OK) {\r
- trans.error().log("Cannot log to History");\r
- }\r
- if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).status!=Status.OK) {\r
- trans.error().log("Cannot touch Cert");\r
- }\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import java.io.ByteArrayOutputStream;\r
-import java.io.DataInputStream;\r
-import java.io.DataOutputStream;\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.util.Date;\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.Bytification;\r
-import org.onap.aaf.dao.CIDAO;\r
-import org.onap.aaf.dao.Cached;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.Loader;\r
-import org.onap.aaf.dao.Streamer;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.util.Chrono;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.Row;\r
-\r
-/**\r
- * CredDAO manages credentials. \r
- * Date: 7/19/13\r
- */\r
-public class CredDAO extends CassDAOImpl<AuthzTrans,CredDAO.Data> {\r
- public static final String TABLE = "cred";\r
- public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F\r
- public static final int RAW = -1;\r
- public static final int BASIC_AUTH = 1;\r
- public static final int BASIC_AUTH_SHA256 = 2;\r
- public static final int CERT_SHA256_RSA =200;\r
- \r
- private HistoryDAO historyDAO;\r
- private CIDAO<AuthzTrans> infoDAO;\r
- private PSInfo psNS;\r
- private PSInfo psID;\r
- \r
- public CredDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {\r
- super(trans, CredDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- init(trans);\r
- }\r
-\r
- public CredDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) throws APIException, IOException {\r
- super(trans, CredDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- historyDAO = hDao;\r
- infoDAO = ciDao;\r
- init(trans);\r
- }\r
-\r
- public static final int KEYLIMIT = 3;\r
- public static class Data extends CacheableData implements Bytification {\r
- \r
- public String id;\r
- public Integer type;\r
- public Date expires;\r
- public Integer other;\r
- public String ns;\r
- public String notes;\r
- public ByteBuffer cred; // this is a blob in cassandra\r
-\r
-\r
- @Override\r
- public int[] invalidate(Cached<?,?> cache) {\r
- return new int[] {\r
- seg(cache,id) // cache is for all entities\r
- };\r
- }\r
- \r
- @Override\r
- public ByteBuffer bytify() throws IOException {\r
- ByteArrayOutputStream baos = new ByteArrayOutputStream();\r
- CredLoader.deflt.marshal(this,new DataOutputStream(baos));\r
- return ByteBuffer.wrap(baos.toByteArray());\r
- }\r
- \r
- @Override\r
- public void reconstitute(ByteBuffer bb) throws IOException {\r
- CredLoader.deflt.unmarshal(this, toDIS(bb));\r
- }\r
-\r
- public String toString() {\r
- return id + ' ' + type + ' ' + Chrono.dateTime(expires);\r
- }\r
- }\r
-\r
- private static class CredLoader extends Loader<Data> implements Streamer<Data>{\r
- public static final int MAGIC=153323443;\r
- public static final int VERSION=1;\r
- public static final int BUFF_SIZE=48; // Note: \r
-\r
- public static final CredLoader deflt = new CredLoader(KEYLIMIT);\r
- public CredLoader(int keylimit) {\r
- super(keylimit);\r
- }\r
-\r
- @Override\r
- public Data load(Data data, Row row) {\r
- data.id = row.getString(0);\r
- data.type = row.getInt(1); // NOTE: in datastax driver, If the int value is NULL, 0 is returned!\r
- data.expires = row.getDate(2);\r
- data.other = row.getInt(3);\r
- data.ns = row.getString(4); \r
- data.notes = row.getString(5);\r
- data.cred = row.getBytesUnsafe(6); \r
- return data;\r
- }\r
-\r
- @Override\r
- protected void key(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
-\r
- obj[idx] = data.id;\r
- obj[++idx] = data.type;\r
- obj[++idx] = data.expires;\r
- }\r
-\r
- @Override\r
- protected void body(Data data, int idx, Object[] obj) {\r
- int i;\r
- obj[i=idx] = data.other;\r
- obj[++i] = data.ns;\r
- obj[++i] = data.notes;\r
- obj[++i] = data.cred;\r
- }\r
-\r
- @Override\r
- public void marshal(Data data, DataOutputStream os) throws IOException {\r
- writeHeader(os,MAGIC,VERSION);\r
- writeString(os, data.id);\r
- os.writeInt(data.type); \r
- os.writeLong(data.expires==null?-1:data.expires.getTime());\r
- os.writeInt(data.other==null?0:data.other);\r
- writeString(os, data.ns);\r
- writeString(os, data.notes);\r
- if(data.cred==null) {\r
- os.writeInt(-1);\r
- } else {\r
- int l = data.cred.limit()-data.cred.position();\r
- os.writeInt(l);\r
- os.write(data.cred.array(),data.cred.position(),l);\r
- }\r
- }\r
-\r
- @Override\r
- public void unmarshal(Data data, DataInputStream is) throws IOException {\r
- /*int version = */readHeader(is,MAGIC,VERSION);\r
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields\r
- byte[] buff = new byte[BUFF_SIZE];\r
- data.id = readString(is,buff);\r
- data.type = is.readInt();\r
- \r
- long l = is.readLong();\r
- data.expires = l<0?null:new Date(l);\r
- data.other = is.readInt();\r
- data.ns = readString(is,buff);\r
- data.notes = readString(is,buff);\r
- \r
- int i = is.readInt();\r
- if(i<0) {\r
- data.cred=null;\r
- } else {\r
- byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads\r
- is.read(bytes);\r
- data.cred = ByteBuffer.wrap(bytes);\r
- }\r
- }\r
- }\r
-\r
- private void init(AuthzTrans trans) throws APIException, IOException {\r
- // Set up sub-DAOs\r
- if(historyDAO==null) {\r
- historyDAO = new HistoryDAO(trans,this);\r
- }\r
- if(infoDAO==null) {\r
- infoDAO = new CacheInfoDAO(trans,this);\r
- }\r
- \r
-\r
- String[] helpers = setCRUD(trans, TABLE, Data.class, CredLoader.deflt);\r
- \r
- psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +\r
- " WHERE ns = ?", CredLoader.deflt,readConsistency);\r
- \r
- psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +\r
- " WHERE id = ?", CredLoader.deflt,readConsistency);\r
- }\r
- \r
- public Result<List<Data>> readNS(AuthzTrans trans, String ns) {\r
- return psNS.read(trans, R_TEXT, new Object[]{ns});\r
- }\r
- \r
- public Result<List<Data>> readID(AuthzTrans trans, String id) {\r
- return psID.read(trans, R_TEXT, new Object[]{id});\r
- }\r
- \r
- /**\r
- * Log Modification statements to History\r
- *\r
- * @param modified which CRUD action was done\r
- * @param data entity data that needs a log entry\r
- * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data\r
- */\r
- @Override\r
- protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {\r
- boolean memo = override.length>0 && override[0]!=null;\r
- boolean subject = override.length>1 && override[1]!=null;\r
-\r
- HistoryDAO.Data hd = HistoryDAO.newInitedData();\r
- hd.user = trans.user();\r
- hd.action = modified.name();\r
- hd.target = TABLE;\r
- hd.subject = subject?override[1]: data.id;\r
- hd.memo = memo\r
- ? String.format("%s by %s", override[0], hd.user)\r
- : (modified.name() + "d credential for " + data.id);\r
- // Detail?\r
- if(modified==CRUD.delete) {\r
- try {\r
- hd.reconstruct = data.bytify();\r
- } catch (IOException e) {\r
- trans.error().log(e,"Could not serialize CredDAO.Data");\r
- }\r
- }\r
-\r
- if(historyDAO.create(trans, hd).status!=Status.OK) {\r
- trans.error().log("Cannot log to History");\r
- }\r
- if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).status!=Status.OK) {\r
- trans.error().log("Cannot touch Cred");\r
- }\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import java.io.ByteArrayOutputStream;\r
-import java.io.DataInputStream;\r
-import java.io.DataOutputStream;\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.util.Date;\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.AbsCassDAO;\r
-import org.onap.aaf.dao.Bytification;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.Loader;\r
-import org.onap.aaf.dao.Streamer;\r
-\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.Row;\r
-\r
-public class DelegateDAO extends CassDAOImpl<AuthzTrans, DelegateDAO.Data> {\r
-\r
- public static final String TABLE = "delegate";\r
- private PSInfo psByDelegate;\r
- \r
- public DelegateDAO(AuthzTrans trans, Cluster cluster, String keyspace) {\r
- super(trans, DelegateDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- init(trans);\r
- }\r
-\r
- public DelegateDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {\r
- super(trans, DelegateDAO.class.getSimpleName(),aDao,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- init(trans);\r
- }\r
- \r
- private static final int KEYLIMIT = 1;\r
- public static class Data implements Bytification {\r
- public String user;\r
- public String delegate;\r
- public Date expires;\r
-\r
- @Override\r
- public ByteBuffer bytify() throws IOException {\r
- ByteArrayOutputStream baos = new ByteArrayOutputStream();\r
- DelegateLoader.dflt.marshal(this,new DataOutputStream(baos));\r
- return ByteBuffer.wrap(baos.toByteArray());\r
- }\r
- \r
- @Override\r
- public void reconstitute(ByteBuffer bb) throws IOException {\r
- DelegateLoader.dflt.unmarshal(this, toDIS(bb));\r
- }\r
- }\r
- \r
- private static class DelegateLoader extends Loader<Data> implements Streamer<Data>{\r
- public static final int MAGIC=0xD823ACF2;\r
- public static final int VERSION=1;\r
- public static final int BUFF_SIZE=48;\r
-\r
- public static final DelegateLoader dflt = new DelegateLoader(KEYLIMIT);\r
-\r
- public DelegateLoader(int keylimit) {\r
- super(keylimit);\r
- }\r
- \r
- @Override\r
- public Data load(Data data, Row row) {\r
- data.user = row.getString(0);\r
- data.delegate = row.getString(1);\r
- data.expires = row.getDate(2);\r
- return data;\r
- }\r
-\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx]=data.user;\r
- }\r
-\r
- @Override\r
- protected void body(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
-\r
- obj[idx]=data.delegate;\r
- obj[++idx]=data.expires;\r
- }\r
-\r
- @Override\r
- public void marshal(Data data, DataOutputStream os) throws IOException {\r
- writeHeader(os,MAGIC,VERSION);\r
- writeString(os, data.user);\r
- writeString(os, data.delegate);\r
- os.writeLong(data.expires.getTime());\r
- }\r
-\r
- @Override\r
- public void unmarshal(Data data, DataInputStream is) throws IOException {\r
- /*int version = */readHeader(is,MAGIC,VERSION);\r
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields\r
- byte[] buff = new byte[BUFF_SIZE];\r
- data.user = readString(is, buff);\r
- data.delegate = readString(is,buff);\r
- data.expires = new Date(is.readLong());\r
- }\r
- } \r
- \r
- private void init(AuthzTrans trans) {\r
- String[] helpers = setCRUD(trans, TABLE, Data.class, DelegateLoader.dflt);\r
- psByDelegate = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +\r
- " WHERE delegate = ?", new DelegateLoader(1),readConsistency);\r
-\r
- }\r
-\r
- public Result<List<DelegateDAO.Data>> readByDelegate(AuthzTrans trans, String delegate) {\r
- return psByDelegate.read(trans, R_TEXT, new Object[]{delegate});\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import java.nio.ByteBuffer;\r
-import java.util.Date;\r
-import java.util.List;\r
-import java.util.UUID;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.DAOException;\r
-import org.onap.aaf.dao.Loader;\r
-\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.ResultSet;\r
-import com.datastax.driver.core.Row;\r
-\r
-/**\r
- * FutureDAO stores Construction information to create \r
- * elements at another time.\r
- * \r
- * 8/20/2013\r
- */\r
-public class FutureDAO extends CassDAOImpl<AuthzTrans,FutureDAO.Data> {\r
- private static final String TABLE = "future";\r
- private final HistoryDAO historyDAO;\r
-// private static String createString;\r
- private PSInfo psByStartAndTarget;\r
- \r
- public FutureDAO(AuthzTrans trans, Cluster cluster, String keyspace) {\r
- super(trans, FutureDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- historyDAO = new HistoryDAO(trans, this);\r
- init(trans);\r
- }\r
-\r
- public FutureDAO(AuthzTrans trans, HistoryDAO hDAO) {\r
- super(trans, FutureDAO.class.getSimpleName(),hDAO, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- historyDAO=hDAO;\r
- init(trans);\r
- }\r
-\r
- public static final int KEYLIMIT = 1;\r
- public static class Data {\r
- public UUID id;\r
- public String target;\r
- public String memo;\r
- public Date start;\r
- public Date expires;\r
- public ByteBuffer construct; // this is a blob in cassandra\r
- }\r
-\r
- private static class FLoader extends Loader<Data> {\r
- public FLoader() {\r
- super(KEYLIMIT);\r
- }\r
-\r
- public FLoader(int keylimit) {\r
- super(keylimit);\r
- }\r
-\r
- @Override\r
- public Data load(Data data, Row row) {\r
- data.id = row.getUUID(0);\r
- data.target = row.getString(1);\r
- data.memo = row.getString(2);\r
- data.start = row.getDate(3);\r
- data.expires = row.getDate(4);\r
- data.construct = row.getBytes(5);\r
- return data;\r
- }\r
-\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx] = data.id;\r
- }\r
-\r
- @Override\r
- protected void body(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
-\r
- obj[idx] = data.target;\r
- obj[++idx] = data.memo;\r
- obj[++idx] = data.start;\r
- obj[++idx] = data.expires;\r
- obj[++idx] = data.construct;\r
- }\r
- }\r
-\r
- private void init(AuthzTrans trans) {\r
- // Set up sub-DAOs\r
- String[] helpers = setCRUD(trans, TABLE, Data.class, new FLoader(KEYLIMIT));\r
-\r
- // Uh, oh. Can't use "now()" in Prepared Statements (at least at this level)\r
-// createString = "INSERT INTO " + TABLE + " ("+helpers[FIELD_COMMAS] +") VALUES (now(),";\r
-//\r
-// // Need a specialty Creator to handle the "now()"\r
-// replace(CRUD.Create, new PSInfo(trans, "INSERT INTO future (" + helpers[FIELD_COMMAS] +\r
-// ") VALUES(now(),?,?,?,?,?)",new FLoader(0)));\r
- \r
- // Other SELECT style statements... match with a local Method\r
- psByStartAndTarget = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +\r
- " FROM future WHERE start <= ? and target = ? ALLOW FILTERING", new FLoader(2) {\r
- @Override\r
- protected void key(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
-\r
- obj[idx]=data.start;\r
- obj[++idx]=data.target;\r
- }\r
- },readConsistency);\r
- \r
-\r
- }\r
-\r
- public Result<List<Data>> readByStartAndTarget(AuthzTrans trans, Date start, String target) throws DAOException {\r
- return psByStartAndTarget.read(trans, R_TEXT, new Object[]{start, target});\r
- }\r
-\r
- /**\r
- * Override create to add secondary ID to Subject in History, and create Data.ID, if it is null\r
- */\r
- public Result<FutureDAO.Data> create(AuthzTrans trans, FutureDAO.Data data, String id) {\r
- // If ID is not set (typical), create one.\r
- if(data.id==null) {\r
- StringBuilder sb = new StringBuilder(trans.user());\r
- sb.append(data.target);\r
- sb.append(System.currentTimeMillis());\r
- data.id = UUID.nameUUIDFromBytes(sb.toString().getBytes());\r
- }\r
- Result<ResultSet> rs = createPS.exec(trans, C_TEXT, data);\r
- if(rs.notOK()) {\r
- return Result.err(rs);\r
- }\r
- wasModified(trans, CRUD.create, data, null, id);\r
- return Result.ok(data); \r
- }\r
-\r
- /**\r
- * Log Modification statements to History\r
- *\r
- * @param modified which CRUD action was done\r
- * @param data entity data that needs a log entry\r
- * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data\r
- */\r
- @Override\r
- protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {\r
- boolean memo = override.length>0 && override[0]!=null;\r
- boolean subject = override.length>1 && override[1]!=null;\r
- HistoryDAO.Data hd = HistoryDAO.newInitedData();\r
- hd.user = trans.user();\r
- hd.action = modified.name();\r
- hd.target = TABLE;\r
- hd.subject = subject?override[1]:"";\r
- hd.memo = memo?String.format("%s by %s", override[0], hd.user):data.memo;\r
- \r
- if(historyDAO.create(trans, hd).status!=Status.OK) {\r
- trans.error().log("Cannot log to History");\r
- }\r
- }\r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import java.nio.ByteBuffer;\r
-import java.text.SimpleDateFormat;\r
-import java.util.Date;\r
-import java.util.List;\r
-import java.util.UUID;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.AbsCassDAO;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.Loader;\r
-\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.ConsistencyLevel;\r
-import com.datastax.driver.core.ResultSet;\r
-import com.datastax.driver.core.Row;\r
-\r
-/**\r
- * History\r
- * \r
- * \r
- * History is a special case, because we don't want Updates or Deletes... Too likely to mess up history.\r
- * \r
- * 9-9-2013 - Found a problem with using "Prepare". You cannot prepare anything with a "now()" in it, as\r
- * it is evaluated once during the prepare, and kept. That renders any use of "now()" pointless. Therefore\r
- * the Create function needs to be run fresh everytime.\r
- * \r
- * Fixed in Cassandra 1.2.6 https://issues.apache.org/jira/browse/CASSANDRA-5616\r
- *\r
- */\r
-public class HistoryDAO extends CassDAOImpl<AuthzTrans, HistoryDAO.Data> {\r
- private static final String TABLE = "history";\r
-\r
- public static final SimpleDateFormat monthFormat = new SimpleDateFormat("yyyyMM");\r
-// private static final SimpleDateFormat dayTimeFormat = new SimpleDateFormat("ddHHmmss");\r
-\r
- private String[] helpers;\r
-\r
- private HistLoader defLoader;\r
-\r
- private AbsCassDAO<AuthzTrans, Data>.PSInfo readByUser, readBySubject, readByYRMN;\r
-\r
- public HistoryDAO(AuthzTrans trans, Cluster cluster, String keyspace) {\r
- super(trans, HistoryDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);\r
- init(trans);\r
- }\r
-\r
- public HistoryDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {\r
- super(trans, HistoryDAO.class.getSimpleName(),aDao,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);\r
- init(trans);\r
- }\r
-\r
-\r
- private static final int KEYLIMIT = 1;\r
- public static class Data {\r
- public UUID id;\r
- public int yr_mon;\r
- public String user;\r
- public String action;\r
- public String target;\r
- public String subject;\r
- public String memo;\r
-// Map<String, String> detail = null;\r
-// public Map<String, String> detail() {\r
-// if(detail == null) {\r
-// detail = new HashMap<String, String>();\r
-// }\r
-// return detail;\r
-// }\r
- public ByteBuffer reconstruct;\r
- }\r
- \r
- private static class HistLoader extends Loader<Data> {\r
- public HistLoader(int keylimit) {\r
- super(keylimit);\r
- }\r
-\r
- @Override\r
- public Data load(Data data, Row row) {\r
- data.id = row.getUUID(0);\r
- data.yr_mon = row.getInt(1);\r
- data.user = row.getString(2);\r
- data.action = row.getString(3);\r
- data.target = row.getString(4);\r
- data.subject = row.getString(5);\r
- data.memo = row.getString(6);\r
-// data.detail = row.getMap(6, String.class, String.class);\r
- data.reconstruct = row.getBytes(7);\r
- return data;\r
- }\r
-\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx]=data.id;\r
- }\r
-\r
- @Override\r
- protected void body(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
- obj[idx]=data.yr_mon;\r
- obj[++idx]=data.user;\r
- obj[++idx]=data.action;\r
- obj[++idx]=data.target;\r
- obj[++idx]=data.subject;\r
- obj[++idx]=data.memo;\r
-// obj[++idx]=data.detail;\r
- obj[++idx]=data.reconstruct; \r
- }\r
- };\r
- \r
- private void init(AuthzTrans trans) {\r
- // Loader must match fields order\r
- defLoader = new HistLoader(KEYLIMIT);\r
- helpers = setCRUD(trans, TABLE, Data.class, defLoader);\r
-\r
- // Need a specialty Creator to handle the "now()"\r
- // 9/9/2013 - jg - Just great... now() is evaluated once on Client side, invalidating usage (what point is a now() from a long time in the past?\r
- // Unless this is fixed, we're putting in non-prepared statement\r
- // Solved in Cassandra. Make sure you are running 1.2.6 Cassandra or later. https://issues.apache.org/jira/browse/CASSANDRA-5616 \r
- replace(CRUD.create, new PSInfo(trans, "INSERT INTO history (" + helpers[FIELD_COMMAS] +\r
- ") VALUES(now(),?,?,?,?,?,?,?)", \r
- new HistLoader(0) {\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- }\r
- },writeConsistency)\r
- );\r
-// disable(CRUD.Create);\r
- \r
- replace(CRUD.read, new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +\r
- " FROM history WHERE id = ?", defLoader,readConsistency) \r
-// new HistLoader(2) {\r
-// @Override\r
-// protected void key(Data data, int idx, Object[] obj) {\r
-// obj[idx]=data.yr_mon;\r
-// obj[++idx]=data.id;\r
-// }\r
-// })\r
- );\r
- disable(CRUD.update);\r
- disable(CRUD.delete);\r
- \r
- readByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + \r
- " FROM history WHERE user = ?", defLoader,readConsistency);\r
- readBySubject = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + \r
- " FROM history WHERE subject = ? and target = ? ALLOW FILTERING", defLoader,readConsistency);\r
- readByYRMN = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + \r
- " FROM history WHERE yr_mon = ?", defLoader,readConsistency);\r
- async(true); //TODO dropping messages with Async\r
- }\r
-\r
- public static Data newInitedData() {\r
- Data data = new Data();\r
- Date now = new Date();\r
- data.yr_mon = Integer.parseInt(monthFormat.format(now));\r
- // data.day_time = Integer.parseInt(dayTimeFormat.format(now));\r
- return data; \r
- }\r
-\r
- public Result<List<Data>> readByYYYYMM(AuthzTrans trans, int yyyymm) {\r
- Result<ResultSet> rs = readByYRMN.exec(trans, "yr_mon", yyyymm);\r
- if(rs.notOK()) {\r
- return Result.err(rs);\r
- }\r
- return extract(defLoader,rs.value,null,dflt);\r
- }\r
-\r
- /**\r
- * Gets the history for a user in the specified year and month\r
- * year - the year in yyyy format\r
- * month - the month in a year ...values 1 - 12\r
- **/\r
- public Result<List<Data>> readByUser(AuthzTrans trans, String user, int ... yyyymm) {\r
- if(yyyymm.length==0) {\r
- return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");\r
- }\r
- Result<ResultSet> rs = readByUser.exec(trans, "user", user);\r
- if(rs.notOK()) {\r
- return Result.err(rs);\r
- }\r
- return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);\r
- }\r
- \r
- public Result<List<Data>> readBySubject(AuthzTrans trans, String subject, String target, int ... yyyymm) {\r
- if(yyyymm.length==0) {\r
- return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");\r
- }\r
- Result<ResultSet> rs = readBySubject.exec(trans, "subject", subject, target);\r
- if(rs.notOK()) {\r
- return Result.err(rs);\r
- }\r
- return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);\r
- }\r
- \r
- private class YYYYMM implements Accept<Data> {\r
- private int[] yyyymm;\r
- public YYYYMM(int yyyymm[]) {\r
- this.yyyymm = yyyymm;\r
- }\r
- @Override\r
- public boolean ok(Data data) {\r
- int dym = data.yr_mon;\r
- for(int ym:yyyymm) {\r
- if(dym==ym) {\r
- return true;\r
- }\r
- }\r
- return false;\r
- }\r
- \r
- };\r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import java.io.ByteArrayOutputStream;\r
-import java.io.DataInputStream;\r
-import java.io.DataOutputStream;\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.util.ArrayList;\r
-import java.util.List;\r
-import java.util.Map.Entry;\r
-\r
-import org.onap.aaf.cssa.rserv.Pair;\r
-import org.onap.aaf.dao.Bytification;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.Loader;\r
-\r
-\r
-public class Namespace implements Bytification {\r
- public static final int MAGIC=250935515;\r
- public static final int VERSION=1;\r
- public static final int BUFF_SIZE=48;\r
-\r
- public String name;\r
- public List<String> owner;\r
- public List<String> admin;\r
- public List<Pair<String,String>> attrib;\r
- public String description;\r
- public Integer type;\r
- public String parent;\r
- public Namespace() {}\r
- \r
- public Namespace(NsDAO.Data ndd) {\r
- name = ndd.name;\r
- description = ndd.description;\r
- type = ndd.type;\r
- parent = ndd.parent;\r
- if(ndd.attrib!=null && !ndd.attrib.isEmpty()) {\r
- attrib = new ArrayList<Pair<String,String>>();\r
- for( Entry<String, String> entry : ndd.attrib.entrySet()) {\r
- attrib.add(new Pair<String,String>(entry.getKey(),entry.getValue()));\r
- }\r
- }\r
- }\r
- \r
- public Namespace(NsDAO.Data ndd,List<String> owner, List<String> admin) {\r
- name = ndd.name;\r
- this.owner = owner;\r
- this.admin = admin;\r
- description = ndd.description;\r
- type = ndd.type;\r
- parent = ndd.parent;\r
- if(ndd.attrib!=null && !ndd.attrib.isEmpty()) {\r
- attrib = new ArrayList<Pair<String,String>>();\r
- for( Entry<String, String> entry : ndd.attrib.entrySet()) {\r
- attrib.add(new Pair<String,String>(entry.getKey(),entry.getValue()));\r
- }\r
- }\r
- }\r
-\r
- public NsDAO.Data data() {\r
- NsDAO.Data ndd = new NsDAO.Data();\r
- ndd.name = name;\r
- ndd.description = description;\r
- ndd.parent = parent;\r
- ndd.type = type;\r
- return ndd;\r
- }\r
-\r
- @Override\r
- public ByteBuffer bytify() throws IOException {\r
- ByteArrayOutputStream baos = new ByteArrayOutputStream();\r
- DataOutputStream os = new DataOutputStream(baos);\r
-\r
- Loader.writeHeader(os,MAGIC,VERSION);\r
- Loader.writeString(os, name);\r
- os.writeInt(type);\r
- Loader.writeStringSet(os,admin);\r
- Loader.writeStringSet(os,owner);\r
- Loader.writeString(os,description);\r
- Loader.writeString(os,parent);\r
-\r
- return ByteBuffer.wrap(baos.toByteArray());\r
- }\r
-\r
- @Override\r
- public void reconstitute(ByteBuffer bb) throws IOException {\r
- DataInputStream is = CassDAOImpl.toDIS(bb);\r
- /*int version = */Loader.readHeader(is,MAGIC,VERSION);\r
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields\r
- \r
- byte[] buff = new byte[BUFF_SIZE];\r
- name = Loader.readString(is, buff);\r
- type = is.readInt();\r
- admin = Loader.readStringList(is,buff);\r
- owner = Loader.readStringList(is,buff);\r
- description = Loader.readString(is,buff);\r
- parent = Loader.readString(is,buff);\r
- \r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see java.lang.Object#hashCode()\r
- */\r
- @Override\r
- public int hashCode() {\r
- return name.hashCode();\r
- }\r
- \r
-\r
- /* (non-Javadoc)\r
- * @see java.lang.Object#toString()\r
- */\r
- @Override\r
- public String toString() {\r
- return name.toString();\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see java.lang.Object#equals(java.lang.Object)\r
- */\r
- @Override\r
- public boolean equals(Object arg0) {\r
- if(arg0==null || !(arg0 instanceof Namespace)) {\r
- return false;\r
- }\r
- return name.equals(((Namespace)arg0).name);\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import java.io.ByteArrayOutputStream;\r
-import java.io.DataInputStream;\r
-import java.io.DataOutputStream;\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.util.HashMap;\r
-import java.util.HashSet;\r
-import java.util.Iterator;\r
-import java.util.List;\r
-import java.util.Map;\r
-import java.util.Map.Entry;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.Bytification;\r
-import org.onap.aaf.dao.Cached;\r
-import org.onap.aaf.dao.CassAccess;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.Loader;\r
-import org.onap.aaf.dao.Streamer;\r
-\r
-import java.util.Set;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.TimeTaken;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.ResultSet;\r
-import com.datastax.driver.core.Row;\r
-import com.datastax.driver.core.exceptions.DriverException;\r
-\r
-/**\r
- * NsDAO\r
- * \r
- * Data Access Object for Namespace Data\r
- *\r
- */\r
-public class NsDAO extends CassDAOImpl<AuthzTrans,NsDAO.Data> {\r
- public static final String TABLE = "ns";\r
- public static final String TABLE_ATTRIB = "ns_attrib";\r
- public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F\r
- public static final int ROOT = 1;\r
- public static final int COMPANY=2;\r
- public static final int APP = 3;\r
-\r
- private static final String BEGIN_BATCH = "BEGIN BATCH\n";\r
- private static final String APPLY_BATCH = "APPLY BATCH;\n";\r
- private static final String SQSCCR = "';\n";\r
- private static final String SQCSQ = "','";\r
- \r
- private HistoryDAO historyDAO;\r
- private CacheInfoDAO infoDAO;\r
- private PSInfo psNS;\r
-\r
- public NsDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {\r
- super(trans, NsDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- init(trans);\r
- }\r
-\r
- public NsDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO iDAO) throws APIException, IOException {\r
- super(trans, NsDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- historyDAO=hDAO;\r
- infoDAO = iDAO;\r
- init(trans);\r
- }\r
-\r
-\r
- //////////////////////////////////////////\r
- // Data Definition, matches Cassandra DM\r
- //////////////////////////////////////////\r
- private static final int KEYLIMIT = 1;\r
- /**\r
- * Data class that matches the Cassandra Table "role"\r
- * \r
- */\r
- public static class Data extends CacheableData implements Bytification {\r
- public String name;\r
- public int type;\r
- public String description;\r
- public String parent;\r
- public Map<String,String> attrib;\r
-\r
-// ////////////////////////////////////////\r
-// // Getters\r
- public Map<String,String> attrib(boolean mutable) {\r
- if (attrib == null) {\r
- attrib = new HashMap<String,String>();\r
- } else if (mutable && !(attrib instanceof HashMap)) {\r
- attrib = new HashMap<String,String>(attrib);\r
- }\r
- return attrib;\r
- }\r
-\r
- @Override\r
- public int[] invalidate(Cached<?,?> cache) {\r
- return new int[] {\r
- seg(cache,name)\r
- };\r
- }\r
-\r
- public NsSplit split(String name) {\r
- return new NsSplit(this,name);\r
- }\r
-\r
- @Override\r
- public ByteBuffer bytify() throws IOException {\r
- ByteArrayOutputStream baos = new ByteArrayOutputStream();\r
- NSLoader.deflt.marshal(this,new DataOutputStream(baos));\r
- return ByteBuffer.wrap(baos.toByteArray());\r
- }\r
- \r
- @Override\r
- public void reconstitute(ByteBuffer bb) throws IOException {\r
- NSLoader.deflt.unmarshal(this,toDIS(bb));\r
- }\r
- \r
- @Override\r
- public String toString() {\r
- return name;\r
- }\r
- \r
- }\r
- \r
- private void init(AuthzTrans trans) throws APIException, IOException {\r
- // Set up sub-DAOs\r
- if(historyDAO==null) {\r
- historyDAO = new HistoryDAO(trans, this);\r
- }\r
- if(infoDAO==null) {\r
- infoDAO = new CacheInfoDAO(trans,this);\r
- }\r
-\r
- String[] helpers = setCRUD(trans, TABLE, Data.class, NSLoader.deflt,4/*need to skip attrib */);\r
- \r
- psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +\r
- " WHERE parent = ?", new NSLoader(1),readConsistency);\r
-\r
- }\r
- \r
- private static final class NSLoader extends Loader<Data> implements Streamer<Data> {\r
- public static final int MAGIC=250935515;\r
- public static final int VERSION=1;\r
- public static final int BUFF_SIZE=48;\r
-\r
- public static final NSLoader deflt = new NSLoader(KEYLIMIT);\r
- \r
- public NSLoader(int keylimit) {\r
- super(keylimit);\r
- }\r
-\r
- @Override\r
- public Data load(Data data, Row row) {\r
- // Int more efficient\r
- data.name = row.getString(0);\r
- data.type = row.getInt(1);\r
- data.description = row.getString(2);\r
- data.parent = row.getString(3);\r
- return data;\r
- }\r
-\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx]=data.name;\r
- }\r
-\r
- @Override\r
- protected void body(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
-\r
- obj[idx]=data.type;\r
- obj[++idx]=data.description;\r
- obj[++idx]=data.parent;\r
- }\r
- \r
- @Override\r
- public void marshal(Data data, DataOutputStream os) throws IOException {\r
- writeHeader(os,MAGIC,VERSION);\r
- writeString(os, data.name);\r
- os.writeInt(data.type);\r
- writeString(os,data.description);\r
- writeString(os,data.parent);\r
- if(data.attrib==null) {\r
- os.writeInt(-1);\r
- } else {\r
- os.writeInt(data.attrib.size());\r
- for(Entry<String, String> es : data.attrib(false).entrySet()) {\r
- writeString(os,es.getKey());\r
- writeString(os,es.getValue());\r
- }\r
- }\r
- }\r
-\r
- @Override\r
- public void unmarshal(Data data, DataInputStream is) throws IOException {\r
- /*int version = */readHeader(is,MAGIC,VERSION);\r
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields\r
- \r
- byte[] buff = new byte[BUFF_SIZE];\r
- data.name = readString(is, buff);\r
- data.type = is.readInt();\r
- data.description = readString(is,buff);\r
- data.parent = readString(is,buff);\r
- int count = is.readInt();\r
- if(count>0) {\r
- Map<String, String> da = data.attrib(true);\r
- for(int i=0;i<count;++i) {\r
- da.put(readString(is,buff), readString(is,buff));\r
- }\r
- }\r
- }\r
-\r
- }\r
- \r
- @Override\r
- public Result<Data> create(AuthzTrans trans, Data data) {\r
- String ns = data.name;\r
- // Ensure Parent is set\r
- int ldot = ns.lastIndexOf('.');\r
- data.parent=ldot<0?".":ns.substring(0,ldot);\r
-\r
- // insert Attributes\r
- StringBuilder stmt = new StringBuilder();\r
- stmt.append(BEGIN_BATCH);\r
- attribInsertStmts(stmt, data);\r
- stmt.append(APPLY_BATCH);\r
- try {\r
- getSession(trans).execute(stmt.toString());\r
-//// TEST CODE for Exception \r
-// boolean force = true; \r
-// if(force) {\r
-// throw new com.datastax.driver.core.exceptions.NoHostAvailableException(new HashMap<InetSocketAddress,Throwable>());\r
-//// throw new com.datastax.driver.core.exceptions.AuthenticationException(new InetSocketAddress(9999),"Sample Message");\r
-// }\r
-////END TEST CODE\r
-\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- trans.info().log(stmt);\r
- return Result.err(Result.ERR_Backend, "Backend Access");\r
- }\r
- return super.create(trans, data);\r
- }\r
-\r
- @Override\r
- public Result<Void> update(AuthzTrans trans, Data data) {\r
- String ns = data.name;\r
- // Ensure Parent is set\r
- int ldot = ns.lastIndexOf('.');\r
- data.parent=ldot<0?".":ns.substring(0,ldot);\r
-\r
- StringBuilder stmt = new StringBuilder();\r
- stmt.append(BEGIN_BATCH);\r
- try {\r
- Map<String, String> localAttr = data.attrib;\r
- Result<Map<String, String>> rremoteAttr = readAttribByNS(trans,ns);\r
- if(rremoteAttr.notOK()) {\r
- return Result.err(rremoteAttr);\r
- }\r
- // update Attributes\r
- String str;\r
- for(Entry<String, String> es : localAttr.entrySet()) {\r
- str = rremoteAttr.value.get(es.getKey());\r
- if(str==null || !str.equals(es.getValue())) {\r
- attribInsertStmt(stmt, ns, es.getKey(),es.getValue());\r
- }\r
- }\r
- \r
- // No point in deleting... insert overwrites...\r
-// for(Entry<String, String> es : remoteAttr.entrySet()) {\r
-// str = localAttr.get(es.getKey());\r
-// if(str==null || !str.equals(es.getValue())) {\r
-// attribDeleteStmt(stmt, ns, es.getKey());\r
-// }\r
-// }\r
- if(stmt.length()>BEGIN_BATCH.length()) {\r
- stmt.append(APPLY_BATCH);\r
- getSession(trans).execute(stmt.toString());\r
- }\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- trans.info().log(stmt);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- }\r
-\r
- return super.update(trans,data);\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.dao.CassDAOImpl#read(org.onap.aaf.inno.env.TransStore, java.lang.Object)\r
- */\r
- @Override\r
- public Result<List<Data>> read(AuthzTrans trans, Data data) {\r
- Result<List<Data>> rld = super.read(trans, data);\r
- \r
- if(rld.isOKhasData()) {\r
- for(Data d : rld.value) {\r
- // Note: Map is null at this point, save time/mem by assignment\r
- Result<Map<String, String>> rabn = readAttribByNS(trans,d.name);\r
- if(rabn.isOK()) {\r
- d.attrib = rabn.value;\r
- } else {\r
- return Result.err(rabn);\r
- }\r
- }\r
- }\r
- return rld;\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.dao.CassDAOImpl#read(org.onap.aaf.inno.env.TransStore, java.lang.Object[])\r
- */\r
- @Override\r
- public Result<List<Data>> read(AuthzTrans trans, Object... key) {\r
- Result<List<Data>> rld = super.read(trans, key);\r
-\r
- if(rld.isOKhasData()) {\r
- for(Data d : rld.value) {\r
- // Note: Map is null at this point, save time/mem by assignment\r
- Result<Map<String, String>> rabn = readAttribByNS(trans,d.name);\r
- if(rabn.isOK()) {\r
- d.attrib = rabn.value;\r
- } else {\r
- return Result.err(rabn);\r
- }\r
- }\r
- }\r
- return rld;\r
- }\r
-\r
- @Override\r
- public Result<Void> delete(AuthzTrans trans, Data data, boolean reread) {\r
- TimeTaken tt = trans.start("Delete NS Attributes " + data.name, Env.REMOTE);\r
- try {\r
- StringBuilder stmt = new StringBuilder();\r
- attribDeleteAllStmt(stmt, data);\r
- try {\r
- getSession(trans).execute(stmt.toString());\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- trans.info().log(stmt);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- }\r
- } finally {\r
- tt.done();\r
- }\r
- return super.delete(trans, data, reread);\r
-\r
- }\r
- \r
- public Result<Map<String,String>> readAttribByNS(AuthzTrans trans, String ns) {\r
- Map<String,String> map = new HashMap<String,String>();\r
- TimeTaken tt = trans.start("readAttribByNS " + ns, Env.REMOTE);\r
- try {\r
- ResultSet rs = getSession(trans).execute("SELECT key,value FROM " \r
- + TABLE_ATTRIB \r
- + " WHERE ns='"\r
- + ns\r
- + "';");\r
- \r
- for(Iterator<Row> iter = rs.iterator();iter.hasNext(); ) {\r
- Row r = iter.next();\r
- map.put(r.getString(0), r.getString(1));\r
- }\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- } finally {\r
- tt.done();\r
- }\r
- return Result.ok(map);\r
- }\r
-\r
- public Result<Set<String>> readNsByAttrib(AuthzTrans trans, String key) {\r
- Set<String> set = new HashSet<String>();\r
- TimeTaken tt = trans.start("readNsBykey " + key, Env.REMOTE);\r
- try {\r
- ResultSet rs = getSession(trans).execute("SELECT ns FROM " \r
- + TABLE_ATTRIB \r
- + " WHERE key='"\r
- + key\r
- + "';");\r
- \r
- for(Iterator<Row> iter = rs.iterator();iter.hasNext(); ) {\r
- Row r = iter.next();\r
- set.add(r.getString(0));\r
- }\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- } finally {\r
- tt.done();\r
- }\r
- return Result.ok(set);\r
- }\r
-\r
- public Result<Void> attribAdd(AuthzTrans trans, String ns, String key, String value) {\r
- try {\r
- getSession(trans).execute(attribInsertStmt(new StringBuilder(),ns,key,value).toString());\r
- return Result.ok();\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- }\r
- }\r
- \r
- private StringBuilder attribInsertStmt(StringBuilder sb, String ns, String key, String value) {\r
- sb.append("INSERT INTO ");\r
- sb.append(TABLE_ATTRIB);\r
- sb.append(" (ns,key,value) VALUES ('");\r
- sb.append(ns);\r
- sb.append(SQCSQ);\r
- sb.append(key);\r
- sb.append(SQCSQ);\r
- sb.append(value);\r
- sb.append("');");\r
- return sb;\r
- }\r
- \r
- public Result<Void> attribRemove(AuthzTrans trans, String ns, String key) {\r
- try {\r
- getSession(trans).execute(attribDeleteStmt(new StringBuilder(),ns,key).toString());\r
- return Result.ok();\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- }\r
- }\r
- \r
- private StringBuilder attribDeleteStmt(StringBuilder stmt, String ns, String key) {\r
- stmt.append("DELETE FROM ");\r
- stmt.append(TABLE_ATTRIB);\r
- stmt.append(" WHERE ns='");\r
- stmt.append(ns);\r
- stmt.append("' AND key='");\r
- stmt.append(key);\r
- stmt.append("';");\r
- return stmt;\r
- }\r
- \r
- private void attribDeleteAllStmt(StringBuilder stmt, Data data) {\r
- stmt.append(" DELETE FROM ");\r
- stmt.append(TABLE_ATTRIB);\r
- stmt.append(" WHERE ns='");\r
- stmt.append(data.name);\r
- stmt.append(SQSCCR);\r
- }\r
-\r
- private void attribInsertStmts(StringBuilder stmt, Data data) {\r
- // INSERT new Attrib\r
- for(Entry<String,String> es : data.attrib(false).entrySet() ) {\r
- stmt.append(" ");\r
- attribInsertStmt(stmt,data.name,es.getKey(),es.getValue());\r
- }\r
- }\r
-\r
- /**\r
- * Add description to Namespace\r
- * @param trans\r
- * @param ns\r
- * @param description\r
- * @return\r
- */\r
- public Result<Void> addDescription(AuthzTrans trans, String ns, String description) {\r
- try {\r
- getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '" \r
- + description + "' WHERE name = '" + ns + "';");\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- }\r
-\r
- Data data = new Data();\r
- data.name=ns;\r
- wasModified(trans, CRUD.update, data, "Added description " + description + " to namespace " + ns, null );\r
- return Result.ok();\r
- }\r
-\r
- public Result<List<Data>> getChildren(AuthzTrans trans, String parent) {\r
- return psNS.read(trans, R_TEXT, new Object[]{parent});\r
- }\r
- \r
-\r
- /**\r
- * Log Modification statements to History\r
- * \r
- * @param modified which CRUD action was done\r
- * @param data entity data that needs a log entry\r
- * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data\r
- */\r
- @Override\r
- protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {\r
- boolean memo = override.length>0 && override[0]!=null;\r
- boolean subject = override.length>1 && override[1]!=null;\r
-\r
- //TODO Must log history\r
- HistoryDAO.Data hd = HistoryDAO.newInitedData();\r
- hd.user = trans.user();\r
- hd.action = modified.name();\r
- hd.target = TABLE;\r
- hd.subject = subject ? override[1] : data.name;\r
- hd.memo = memo ? override[0] : (data.name + " was " + modified.name() + 'd' );\r
- if(modified==CRUD.delete) {\r
- try {\r
- hd.reconstruct = data.bytify();\r
- } catch (IOException e) {\r
- trans.error().log(e,"Could not serialize NsDAO.Data");\r
- }\r
- }\r
-\r
- if(historyDAO.create(trans, hd).status!=Status.OK) {\r
- trans.error().log("Cannot log to History");\r
- }\r
- if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {\r
- trans.error().log("Cannot touch CacheInfo");\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-public class NsSplit {\r
- public final String ns;\r
- public final String name;\r
- public final NsDAO.Data nsd;\r
- \r
- public NsSplit(NsDAO.Data nsd, String child) {\r
- this.nsd = nsd;\r
- if(child.startsWith(nsd.name)) {\r
- ns = nsd.name;\r
- int dot = ns.length();\r
- if(dot<child.length() && child.charAt(dot)=='.') {\r
- name = child.substring(dot+1);\r
- } else {\r
- name="";\r
- }\r
- } else {\r
- name=null;\r
- ns = null;\r
- }\r
- }\r
- \r
- public NsSplit(String ns, String name) {\r
- this.ns = ns;\r
- this.name = name;\r
- this.nsd = new NsDAO.Data();\r
- nsd.name = ns;\r
- int dot = ns.lastIndexOf('.');\r
- if(dot>=0) {\r
- nsd.parent = ns.substring(0, dot);\r
- } else {\r
- nsd.parent = ".";\r
- }\r
- }\r
-\r
- public boolean isOK() {\r
- return ns!=null && name !=null;\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-/**\r
- * Defines the Type Codes in the NS Table.\r
- *\r
- */\r
-public enum NsType {\r
- UNKNOWN (-1),\r
- DOT (0),\r
- ROOT (1), \r
- COMPANY (2), \r
- APP (3), \r
- STACKED_APP (10), \r
- STACK (11);\r
- \r
- public final int type;\r
- private NsType(int t) {\r
- type = t;\r
- }\r
- /**\r
- * This is not the Ordinal, but the Type that is stored in NS Tables\r
- * \r
- * @param t\r
- * @return\r
- */\r
- public static NsType fromType(int t) {\r
- for(NsType nst : values()) {\r
- if(t==nst.type) {\r
- return nst;\r
- }\r
- }\r
- return UNKNOWN;\r
- }\r
- \r
- /**\r
- * Use this one rather than "valueOf" to avoid Exception\r
- * @param s\r
- * @return\r
- */\r
- public static NsType fromString(String s) {\r
- if(s!=null) {\r
- for(NsType nst : values()) {\r
- if(nst.name().equals(s)) {\r
- return nst;\r
- }\r
- }\r
- }\r
- return UNKNOWN;\r
- }\r
-\r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import java.io.ByteArrayOutputStream;\r
-import java.io.DataInputStream;\r
-import java.io.DataOutputStream;\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.util.HashSet;\r
-import java.util.List;\r
-import java.util.Set;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.Bytification;\r
-import org.onap.aaf.dao.Cached;\r
-import org.onap.aaf.dao.CassAccess;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.DAOException;\r
-import org.onap.aaf.dao.Loader;\r
-import org.onap.aaf.dao.Streamer;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.util.Split;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.Row;\r
-import com.datastax.driver.core.exceptions.DriverException;\r
-\r
-public class PermDAO extends CassDAOImpl<AuthzTrans,PermDAO.Data> {\r
-\r
- public static final String TABLE = "perm";\r
-\r
- public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F\r
- private static final String STAR = "*";\r
- \r
- private final HistoryDAO historyDAO;\r
- private final CacheInfoDAO infoDAO;\r
- \r
- private PSInfo psNS, psChildren, psByType;\r
-\r
- public PermDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {\r
- super(trans, PermDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- init(trans);\r
- historyDAO = new HistoryDAO(trans, this);\r
- infoDAO = new CacheInfoDAO(trans,this);\r
- }\r
-\r
- public PermDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {\r
- super(trans, PermDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- historyDAO = hDAO;\r
- infoDAO=ciDAO;\r
- init(trans);\r
- }\r
-\r
-\r
- private static final int KEYLIMIT = 4;\r
- public static class Data extends CacheableData implements Bytification {\r
- public String ns;\r
- public String type;\r
- public String instance;\r
- public String action;\r
- public Set<String> roles; \r
- public String description;\r
-\r
- public Data() {}\r
- \r
- public Data(NsSplit nss, String instance, String action) {\r
- ns = nss.ns;\r
- type = nss.name;\r
- this.instance = instance;\r
- this.action = action;\r
- }\r
-\r
- public String fullType() {\r
- return ns + '.' + type;\r
- }\r
- \r
- public String fullPerm() {\r
- return ns + '.' + type + '|' + instance + '|' + action;\r
- }\r
-\r
- public String encode() {\r
- return ns + '|' + type + '|' + instance + '|' + action;\r
- }\r
- \r
- /**\r
- * Decode Perm String, including breaking into appropriate Namespace\r
- * \r
- * @param trans\r
- * @param q\r
- * @param p\r
- * @return\r
- */\r
- public static Result<Data> decode(AuthzTrans trans, Question q, String p) {\r
- String[] ss = Split.splitTrim('|', p,4);\r
- if(ss[2]==null) {\r
- return Result.err(Status.ERR_BadData,"Perm Encodings must be separated by '|'");\r
- }\r
- Data data = new Data();\r
- if(ss[3]==null) { // older 3 part encoding must be evaluated for NS\r
- Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);\r
- if(nss.notOK()) {\r
- return Result.err(nss);\r
- }\r
- data.ns=nss.value.ns;\r
- data.type=nss.value.name;\r
- data.instance=ss[1];\r
- data.action=ss[2];\r
- } else { // new 4 part encoding\r
- data.ns=ss[0];\r
- data.type=ss[1];\r
- data.instance=ss[2];\r
- data.action=ss[3];\r
- }\r
- return Result.ok(data);\r
- }\r
-\r
- /**\r
- * Decode Perm String, including breaking into appropriate Namespace\r
- * \r
- * @param trans\r
- * @param q\r
- * @param p\r
- * @return\r
- */\r
- public static Result<String[]> decodeToArray(AuthzTrans trans, Question q, String p) {\r
- String[] ss = Split.splitTrim('|', p,4);\r
- if(ss[2]==null) {\r
- return Result.err(Status.ERR_BadData,"Perm Encodings must be separated by '|'");\r
- }\r
- \r
- if(ss[3]==null) { // older 3 part encoding must be evaluated for NS\r
- ss[3] = ss[2];\r
- ss[2] = ss[1];\r
- Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);\r
- if(nss.notOK()) {\r
- return Result.err(nss);\r
- }\r
- ss[1] = nss.value.name;\r
- ss[0] = nss.value.ns;\r
- }\r
- return Result.ok(ss);\r
- }\r
-\r
- public static Data create(NsDAO.Data ns, String name) {\r
- NsSplit nss = new NsSplit(ns,name);\r
- Data rv = new Data();\r
- rv.ns = nss.ns;\r
- String[] s = nss.name.split("\\|");\r
- switch(s.length) {\r
- case 3:\r
- rv.type=s[0];\r
- rv.instance=s[1];\r
- rv.action=s[2];\r
- break;\r
- case 2:\r
- rv.type=s[0];\r
- rv.instance=s[1];\r
- rv.action=STAR;\r
- break;\r
- default:\r
- rv.type=s[0];\r
- rv.instance = STAR;\r
- rv.action = STAR;\r
- }\r
- return rv;\r
- }\r
- \r
- public static Data create(AuthzTrans trans, Question q, String name) {\r
- String[] s = name.split("\\|");\r
- Result<NsSplit> rdns = q.deriveNsSplit(trans, s[0]);\r
- Data rv = new PermDAO.Data();\r
- if(rdns.isOKhasData()) {\r
- switch(s.length) {\r
- case 3:\r
- rv.type=s[1];\r
- rv.instance=s[2];\r
- rv.action=s[3];\r
- break;\r
- case 2:\r
- rv.type=s[1];\r
- rv.instance=s[2];\r
- rv.action=STAR;\r
- break;\r
- default:\r
- rv.type=s[1];\r
- rv.instance = STAR;\r
- rv.action = STAR;\r
- }\r
- }\r
- return rv;\r
- }\r
- \r
- ////////////////////////////////////////\r
- // Getters\r
- public Set<String> roles(boolean mutable) {\r
- if (roles == null) {\r
- roles = new HashSet<String>();\r
- } else if (mutable && !(roles instanceof HashSet)) {\r
- roles = new HashSet<String>(roles);\r
- }\r
- return roles;\r
- }\r
-\r
- @Override\r
- public int[] invalidate(Cached<?,?> cache) {\r
- return new int[] {\r
- seg(cache,ns),\r
- seg(cache,ns,type),\r
- seg(cache,ns,type,STAR),\r
- seg(cache,ns,type,instance,action)\r
- };\r
- }\r
-\r
- @Override\r
- public ByteBuffer bytify() throws IOException {\r
- ByteArrayOutputStream baos = new ByteArrayOutputStream();\r
- PermLoader.deflt.marshal(this, new DataOutputStream(baos));\r
- return ByteBuffer.wrap(baos.toByteArray());\r
- }\r
- \r
- @Override\r
- public void reconstitute(ByteBuffer bb) throws IOException {\r
- PermLoader.deflt.unmarshal(this, toDIS(bb));\r
- }\r
-\r
- @Override\r
- public String toString() {\r
- return encode();\r
- }\r
- }\r
- \r
- private static class PermLoader extends Loader<Data> implements Streamer<Data> {\r
- public static final int MAGIC=283939453;\r
- public static final int VERSION=1;\r
- public static final int BUFF_SIZE=96;\r
-\r
- public static final PermLoader deflt = new PermLoader(KEYLIMIT);\r
- \r
- public PermLoader(int keylimit) {\r
- super(keylimit);\r
- }\r
- \r
- @Override\r
- public Data load(Data data, Row row) {\r
- // Int more efficient Match "fields" string\r
- data.ns = row.getString(0);\r
- data.type = row.getString(1);\r
- data.instance = row.getString(2);\r
- data.action = row.getString(3);\r
- data.roles = row.getSet(4,String.class);\r
- data.description = row.getString(5);\r
- return data;\r
- }\r
-\r
- @Override\r
- protected void key(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
- obj[idx]=data.ns;\r
- obj[++idx]=data.type;\r
- obj[++idx]=data.instance;\r
- obj[++idx]=data.action;\r
- }\r
-\r
- @Override\r
- protected void body(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
- obj[idx]=data.roles;\r
- obj[++idx]=data.description;\r
- }\r
-\r
- @Override\r
- public void marshal(Data data, DataOutputStream os) throws IOException {\r
- writeHeader(os,MAGIC,VERSION);\r
- writeString(os, data.ns);\r
- writeString(os, data.type);\r
- writeString(os, data.instance);\r
- writeString(os, data.action);\r
- writeStringSet(os, data.roles);\r
- writeString(os, data.description);\r
- }\r
-\r
- @Override\r
- public void unmarshal(Data data, DataInputStream is) throws IOException {\r
- /*int version = */readHeader(is,MAGIC,VERSION);\r
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields\r
- byte[] buff = new byte[BUFF_SIZE];\r
- data.ns = readString(is, buff);\r
- data.type = readString(is,buff);\r
- data.instance = readString(is,buff);\r
- data.action = readString(is,buff);\r
- data.roles = readStringSet(is,buff);\r
- data.description = readString(is,buff);\r
- }\r
- }\r
- \r
- private void init(AuthzTrans trans) {\r
- // the 3 is the number of key fields\r
- String[] helpers = setCRUD(trans, TABLE, Data.class, PermLoader.deflt);\r
- \r
- // Other SELECT style statements... match with a local Method\r
- psByType = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + \r
- " WHERE ns = ? AND type = ?", new PermLoader(2) {\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx]=data.type;\r
- }\r
- },readConsistency);\r
- \r
- psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +\r
- " WHERE ns = ?", new PermLoader(1),readConsistency);\r
- \r
- psChildren = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + \r
- " WHERE ns=? AND type > ? AND type < ?", \r
- new PermLoader(3) {\r
- @Override\r
- protected void key(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
- obj[idx] = data.ns;\r
- obj[++idx]=data.type + DOT;\r
- obj[++idx]=data.type + DOT_PLUS_ONE;\r
- }\r
- },readConsistency);\r
-\r
- }\r
-\r
-\r
- /**\r
- * Add a single Permission to the Role's Permission Collection\r
- * \r
- * @param trans\r
- * @param roleFullName\r
- * @param perm\r
- * @param type\r
- * @param action\r
- * @return\r
- */\r
- public Result<Void> addRole(AuthzTrans trans, PermDAO.Data perm, String roleFullName) {\r
- // Note: Prepared Statements for Collection updates aren't supported\r
- //ResultSet rv =\r
- try {\r
- getSession(trans).execute(UPDATE_SP + TABLE + " SET roles = roles + {'" + roleFullName + "'} " +\r
- "WHERE " +\r
- "ns = '" + perm.ns + "' AND " +\r
- "type = '" + perm.type + "' AND " +\r
- "instance = '" + perm.instance + "' AND " +\r
- "action = '" + perm.action + "';"\r
- );\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- }\r
-\r
- wasModified(trans, CRUD.update, perm, "Added role " + roleFullName + " to perm " +\r
- perm.ns + '.' + perm.type + '|' + perm.instance + '|' + perm.action);\r
- return Result.ok();\r
- }\r
-\r
- /**\r
- * Remove a single Permission from the Role's Permission Collection\r
- * @param trans\r
- * @param roleFullName\r
- * @param perm\r
- * @param type\r
- * @param action\r
- * @return\r
- */\r
- public Result<Void> delRole(AuthzTrans trans, PermDAO.Data perm, String roleFullName) {\r
- // Note: Prepared Statements for Collection updates aren't supported\r
- //ResultSet rv =\r
- try {\r
- getSession(trans).execute(UPDATE_SP + TABLE + " SET roles = roles - {'" + roleFullName + "'} " +\r
- "WHERE " +\r
- "ns = '" + perm.ns + "' AND " +\r
- "type = '" + perm.type + "' AND " +\r
- "instance = '" + perm.instance + "' AND " +\r
- "action = '" + perm.action + "';"\r
- );\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- }\r
-\r
- //TODO how can we tell when it doesn't?\r
- wasModified(trans, CRUD.update, perm, "Removed role " + roleFullName + " from perm " +\r
- perm.ns + '.' + perm.type + '|' + perm.instance + '|' + perm.action);\r
- return Result.ok();\r
- }\r
-\r
-\r
- \r
- /**\r
- * Additional method: \r
- * Select all Permissions by Name\r
- * \r
- * @param name\r
- * @return\r
- * @throws DAOException\r
- */\r
- public Result<List<Data>> readByType(AuthzTrans trans, String ns, String type) {\r
- return psByType.read(trans, R_TEXT, new Object[]{ns, type});\r
- }\r
- \r
- public Result<List<Data>> readChildren(AuthzTrans trans, String ns, String type) {\r
- return psChildren.read(trans, R_TEXT, new Object[]{ns, type+DOT, type + DOT_PLUS_ONE});\r
- }\r
-\r
- public Result<List<Data>> readNS(AuthzTrans trans, String ns) {\r
- return psNS.read(trans, R_TEXT, new Object[]{ns});\r
- }\r
-\r
- /**\r
- * Add description to this permission\r
- * \r
- * @param trans\r
- * @param ns\r
- * @param type\r
- * @param instance\r
- * @param action\r
- * @param description\r
- * @return\r
- */\r
- public Result<Void> addDescription(AuthzTrans trans, String ns, String type,\r
- String instance, String action, String description) {\r
- try {\r
- getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '" \r
- + description + "' WHERE ns = '" + ns + "' AND type = '" + type + "'"\r
- + "AND instance = '" + instance + "' AND action = '" + action + "';");\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- }\r
-\r
- Data data = new Data();\r
- data.ns=ns;\r
- data.type=type;\r
- data.instance=instance;\r
- data.action=action;\r
- wasModified(trans, CRUD.update, data, "Added description " + description + " to permission " \r
- + data.encode(), null );\r
- return Result.ok();\r
- }\r
- \r
- /**\r
- * Log Modification statements to History\r
- */\r
- @Override\r
- protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {\r
- boolean memo = override.length>0 && override[0]!=null;\r
- boolean subject = override.length>1 && override[1]!=null;\r
-\r
- // Need to update history\r
- HistoryDAO.Data hd = HistoryDAO.newInitedData();\r
- hd.user = trans.user();\r
- hd.action = modified.name();\r
- hd.target = TABLE;\r
- hd.subject = subject ? override[1] : data.fullType();\r
- if (memo) {\r
- hd.memo = String.format("%s", override[0]);\r
- } else {\r
- hd.memo = String.format("%sd %s|%s|%s", modified.name(),data.fullType(),data.instance,data.action);\r
- }\r
- \r
- if(modified==CRUD.delete) {\r
- try {\r
- hd.reconstruct = data.bytify();\r
- } catch (IOException e) {\r
- trans.error().log(e,"Could not serialize PermDAO.Data");\r
- }\r
- }\r
- \r
- if(historyDAO.create(trans, hd).status!=Status.OK) {\r
- trans.error().log("Cannot log to History");\r
- }\r
- if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {\r
- trans.error().log("Cannot touch CacheInfo");\r
- }\r
- }\r
-}\r
-\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import java.io.ByteArrayOutputStream;\r
-import java.io.DataInputStream;\r
-import java.io.DataOutputStream;\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.util.HashSet;\r
-import java.util.List;\r
-import java.util.Set;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.Bytification;\r
-import org.onap.aaf.dao.Cached;\r
-import org.onap.aaf.dao.CassAccess;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.Loader;\r
-import org.onap.aaf.dao.Streamer;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.util.Split;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.Row;\r
-import com.datastax.driver.core.exceptions.DriverException;\r
-\r
-public class RoleDAO extends CassDAOImpl<AuthzTrans,RoleDAO.Data> {\r
-\r
- public static final String TABLE = "role";\r
- public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F\r
- \r
- private final HistoryDAO historyDAO;\r
- private final CacheInfoDAO infoDAO;\r
-\r
- private PSInfo psChildren, psNS, psName;\r
-\r
- public RoleDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {\r
- super(trans, RoleDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- // Set up sub-DAOs\r
- historyDAO = new HistoryDAO(trans, this);\r
- infoDAO = new CacheInfoDAO(trans,this);\r
- init(trans);\r
- }\r
-\r
- public RoleDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {\r
- super(trans, RoleDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- historyDAO = hDAO;\r
- infoDAO = ciDAO;\r
- init(trans);\r
- }\r
-\r
-\r
- //////////////////////////////////////////\r
- // Data Definition, matches Cassandra DM\r
- //////////////////////////////////////////\r
- private static final int KEYLIMIT = 2;\r
- /**\r
- * Data class that matches the Cassandra Table "role"\r
- */\r
- public static class Data extends CacheableData implements Bytification {\r
- public String ns;\r
- public String name;\r
- public Set<String> perms;\r
- public String description;\r
-\r
- ////////////////////////////////////////\r
- // Getters\r
- public Set<String> perms(boolean mutable) {\r
- if (perms == null) {\r
- perms = new HashSet<String>();\r
- } else if (mutable && !(perms instanceof HashSet)) {\r
- perms = new HashSet<String>(perms);\r
- }\r
- return perms;\r
- }\r
- \r
- public static Data create(NsDAO.Data ns, String name) {\r
- NsSplit nss = new NsSplit(ns,name); \r
- RoleDAO.Data rv = new Data();\r
- rv.ns = nss.ns;\r
- rv.name=nss.name;\r
- return rv;\r
- }\r
- \r
- public String fullName() {\r
- return ns + '.' + name;\r
- }\r
- \r
- public String encode() {\r
- return ns + '|' + name;\r
- }\r
- \r
- /**\r
- * Decode Perm String, including breaking into appropriate Namespace\r
- * \r
- * @param trans\r
- * @param q\r
- * @param r\r
- * @return\r
- */\r
- public static Result<Data> decode(AuthzTrans trans, Question q, String r) {\r
- String[] ss = Split.splitTrim('|', r,2);\r
- Data data = new Data();\r
- if(ss[1]==null) { // older 1 part encoding must be evaluated for NS\r
- Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);\r
- if(nss.notOK()) {\r
- return Result.err(nss);\r
- }\r
- data.ns=nss.value.ns;\r
- data.name=nss.value.name;\r
- } else { // new 4 part encoding\r
- data.ns=ss[0];\r
- data.name=ss[1];\r
- }\r
- return Result.ok(data);\r
- }\r
-\r
- /**\r
- * Decode from UserRole Data\r
- * @param urdd\r
- * @return\r
- */\r
- public static RoleDAO.Data decode(UserRoleDAO.Data urdd) {\r
- RoleDAO.Data rd = new RoleDAO.Data();\r
- rd.ns = urdd.ns;\r
- rd.name = urdd.rname;\r
- return rd;\r
- }\r
-\r
-\r
- /**\r
- * Decode Perm String, including breaking into appropriate Namespace\r
- * \r
- * @param trans\r
- * @param q\r
- * @param p\r
- * @return\r
- */\r
- public static Result<String[]> decodeToArray(AuthzTrans trans, Question q, String p) {\r
- String[] ss = Split.splitTrim('|', p,2);\r
- if(ss[1]==null) { // older 1 part encoding must be evaluated for NS\r
- Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);\r
- if(nss.notOK()) {\r
- return Result.err(nss);\r
- }\r
- ss[0] = nss.value.ns;\r
- ss[1] = nss.value.name;\r
- }\r
- return Result.ok(ss);\r
- }\r
- \r
- @Override\r
- public int[] invalidate(Cached<?,?> cache) {\r
- return new int[] {\r
- seg(cache,ns,name),\r
- seg(cache,ns),\r
- seg(cache,name),\r
- };\r
- }\r
-\r
- @Override\r
- public ByteBuffer bytify() throws IOException {\r
- ByteArrayOutputStream baos = new ByteArrayOutputStream();\r
- RoleLoader.deflt.marshal(this,new DataOutputStream(baos));\r
- return ByteBuffer.wrap(baos.toByteArray());\r
- }\r
- \r
- @Override\r
- public void reconstitute(ByteBuffer bb) throws IOException {\r
- RoleLoader.deflt.unmarshal(this, toDIS(bb));\r
- }\r
-\r
- @Override\r
- public String toString() {\r
- return ns + '.' + name;\r
- }\r
- }\r
-\r
- private static class RoleLoader extends Loader<Data> implements Streamer<Data> {\r
- public static final int MAGIC=923577343;\r
- public static final int VERSION=1;\r
- public static final int BUFF_SIZE=96;\r
-\r
- public static final RoleLoader deflt = new RoleLoader(KEYLIMIT);\r
- \r
- public RoleLoader(int keylimit) {\r
- super(keylimit);\r
- }\r
- \r
- @Override\r
- public Data load(Data data, Row row) {\r
- // Int more efficient\r
- data.ns = row.getString(0);\r
- data.name = row.getString(1);\r
- data.perms = row.getSet(2,String.class);\r
- data.description = row.getString(3);\r
- return data;\r
- }\r
-\r
- @Override\r
- protected void key(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
- obj[idx]=data.ns;\r
- obj[++idx]=data.name;\r
- }\r
-\r
- @Override\r
- protected void body(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
- obj[idx]=data.perms;\r
- obj[++idx]=data.description;\r
- }\r
-\r
- @Override\r
- public void marshal(Data data, DataOutputStream os) throws IOException {\r
- writeHeader(os,MAGIC,VERSION);\r
- writeString(os, data.ns);\r
- writeString(os, data.name);\r
- writeStringSet(os,data.perms);\r
- writeString(os, data.description);\r
- }\r
-\r
- @Override\r
- public void unmarshal(Data data, DataInputStream is) throws IOException {\r
- /*int version = */readHeader(is,MAGIC,VERSION);\r
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields\r
- byte[] buff = new byte[BUFF_SIZE];\r
- data.ns = readString(is, buff);\r
- data.name = readString(is,buff);\r
- data.perms = readStringSet(is,buff);\r
- data.description = readString(is,buff);\r
- }\r
- };\r
-\r
- private void init(AuthzTrans trans) {\r
- String[] helpers = setCRUD(trans, TABLE, Data.class, RoleLoader.deflt);\r
- \r
- psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +\r
- " WHERE ns = ?", new RoleLoader(1),readConsistency);\r
-\r
- psName = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +\r
- " WHERE name = ?", new RoleLoader(1),readConsistency);\r
-\r
- psChildren = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + \r
- " WHERE ns=? AND name > ? AND name < ?", \r
- new RoleLoader(3) {\r
- @Override\r
- protected void key(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
- obj[idx] = data.ns;\r
- obj[++idx]=data.name + DOT;\r
- obj[++idx]=data.name + DOT_PLUS_ONE;\r
- }\r
- },readConsistency);\r
- \r
- }\r
-\r
- public Result<List<Data>> readNS(AuthzTrans trans, String ns) {\r
- return psNS.read(trans, R_TEXT + " NS " + ns, new Object[]{ns});\r
- }\r
-\r
- public Result<List<Data>> readName(AuthzTrans trans, String name) {\r
- return psName.read(trans, R_TEXT + name, new Object[]{name});\r
- }\r
-\r
- public Result<List<Data>> readChildren(AuthzTrans trans, String ns, String role) {\r
- if(role.length()==0 || "*".equals(role)) {\r
- return psChildren.read(trans, R_TEXT, new Object[]{ns, FIRST_CHAR, LAST_CHAR}); \r
- } else {\r
- return psChildren.read(trans, R_TEXT, new Object[]{ns, role+DOT, role+DOT_PLUS_ONE});\r
- }\r
- }\r
-\r
- /**\r
- * Add a single Permission to the Role's Permission Collection\r
- * \r
- * @param trans\r
- * @param role\r
- * @param perm\r
- * @param type\r
- * @param action\r
- * @return\r
- */\r
- public Result<Void> addPerm(AuthzTrans trans, RoleDAO.Data role, PermDAO.Data perm) {\r
- // Note: Prepared Statements for Collection updates aren't supported\r
- String pencode = perm.encode();\r
- try {\r
- getSession(trans).execute(UPDATE_SP + TABLE + " SET perms = perms + {'" + \r
- pencode + "'} WHERE " +\r
- "ns = '" + role.ns + "' AND name = '" + role.name + "';");\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- }\r
-\r
- wasModified(trans, CRUD.update, role, "Added permission " + pencode + " to role " + role.fullName());\r
- return Result.ok();\r
- }\r
-\r
- /**\r
- * Remove a single Permission from the Role's Permission Collection\r
- * @param trans\r
- * @param role\r
- * @param perm\r
- * @param type\r
- * @param action\r
- * @return\r
- */\r
- public Result<Void> delPerm(AuthzTrans trans, RoleDAO.Data role, PermDAO.Data perm) {\r
- // Note: Prepared Statements for Collection updates aren't supported\r
-\r
- String pencode = perm.encode();\r
- \r
- //ResultSet rv =\r
- try {\r
- getSession(trans).execute(UPDATE_SP + TABLE + " SET perms = perms - {'" + \r
- pencode + "'} WHERE " +\r
- "ns = '" + role.ns + "' AND name = '" + role.name + "';");\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- }\r
-\r
- //TODO how can we tell when it doesn't?\r
- wasModified(trans, CRUD.update, role, "Removed permission " + pencode + " from role " + role.fullName() );\r
- return Result.ok();\r
- }\r
- \r
- /**\r
- * Add description to role\r
- * \r
- * @param trans\r
- * @param ns\r
- * @param name\r
- * @param description\r
- * @return\r
- */\r
- public Result<Void> addDescription(AuthzTrans trans, String ns, String name, String description) {\r
- try {\r
- getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '" \r
- + description + "' WHERE ns = '" + ns + "' AND name = '" + name + "';");\r
- } catch (DriverException | APIException | IOException e) {\r
- reportPerhapsReset(trans,e);\r
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);\r
- }\r
-\r
- Data data = new Data();\r
- data.ns=ns;\r
- data.name=name;\r
- wasModified(trans, CRUD.update, data, "Added description " + description + " to role " + data.fullName(), null );\r
- return Result.ok();\r
- }\r
- \r
- \r
- /**\r
- * Log Modification statements to History\r
- * @param modified which CRUD action was done\r
- * @param data entity data that needs a log entry\r
- * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data\r
- */\r
- @Override\r
- protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {\r
- boolean memo = override.length>0 && override[0]!=null;\r
- boolean subject = override.length>1 && override[1]!=null;\r
-\r
- HistoryDAO.Data hd = HistoryDAO.newInitedData();\r
- hd.user = trans.user();\r
- hd.action = modified.name();\r
- hd.target = TABLE;\r
- hd.subject = subject ? override[1] : data.fullName();\r
- hd.memo = memo ? override[0] : (data.fullName() + " was " + modified.name() + 'd' );\r
- if(modified==CRUD.delete) {\r
- try {\r
- hd.reconstruct = data.bytify();\r
- } catch (IOException e) {\r
- trans.error().log(e,"Could not serialize RoleDAO.Data");\r
- }\r
- }\r
-\r
- if(historyDAO.create(trans, hd).status!=Status.OK) {\r
- trans.error().log("Cannot log to History");\r
- }\r
- if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {\r
- trans.error().log("Cannot touch CacheInfo for Role");\r
- }\r
- }\r
-\r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import org.onap.aaf.authz.layer.Result;\r
-\r
-\r
-\r
-\r
-/**\r
- * Add additional Behavior for Specific Applications for Results\r
- * \r
- * In this case, we add additional BitField information accessible by\r
- * method (\r
- *\r
- * @param <RV>\r
- */\r
-public class Status<RV> extends Result<RV> {\r
- \r
- // 10/1/2013: Initially, I used enum, but it's not extensible.\r
- public final static int ERR_NsNotFound = Result.ERR_General+1,\r
- ERR_RoleNotFound = Result.ERR_General+2,\r
- ERR_PermissionNotFound = Result.ERR_General+3, \r
- ERR_UserNotFound = Result.ERR_General+4,\r
- ERR_UserRoleNotFound = Result.ERR_General+5,\r
- ERR_DelegateNotFound = Result.ERR_General+6,\r
- ERR_InvalidDelegate = Result.ERR_General+7,\r
- ERR_DependencyExists = Result.ERR_General+8,\r
- ERR_NoApprovals = Result.ERR_General+9,\r
- ACC_Now = Result.ERR_General+10,\r
- ACC_Future = Result.ERR_General+11,\r
- ERR_ChoiceNeeded = Result.ERR_General+12,\r
- ERR_FutureNotRequested = Result.ERR_General+13;\r
- \r
- /**\r
- * Constructor for Result set. \r
- * @param data\r
- * @param status\r
- */\r
- private Status(RV value, int status, String details, String[] variables ) {\r
- super(value,status,details,variables);\r
- }\r
-\r
- public static String name(int status) {\r
- switch(status) {\r
- case OK: return "OK";\r
- case ERR_NsNotFound: return "ERR_NsNotFound";\r
- case ERR_RoleNotFound: return "ERR_RoleNotFound";\r
- case ERR_PermissionNotFound: return "ERR_PermissionNotFound"; \r
- case ERR_UserNotFound: return "ERR_UserNotFound";\r
- case ERR_UserRoleNotFound: return "ERR_UserRoleNotFound";\r
- case ERR_DelegateNotFound: return "ERR_DelegateNotFound";\r
- case ERR_InvalidDelegate: return "ERR_InvalidDelegate";\r
- case ERR_ConflictAlreadyExists: return "ERR_ConflictAlreadyExists";\r
- case ERR_DependencyExists: return "ERR_DependencyExists";\r
- case ERR_ActionNotCompleted: return "ERR_ActionNotCompleted";\r
- case ERR_Denied: return "ERR_Denied";\r
- case ERR_Policy: return "ERR_Policy";\r
- case ERR_BadData: return "ERR_BadData";\r
- case ERR_NotImplemented: return "ERR_NotImplemented";\r
- case ERR_NotFound: return "ERR_NotFound";\r
- case ERR_ChoiceNeeded: return "ERR_ChoiceNeeded";\r
- }\r
- //case ERR_General: or unknown... \r
- return "ERR_General";\r
- }\r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.cass;\r
-\r
-import java.io.ByteArrayOutputStream;\r
-import java.io.DataInputStream;\r
-import java.io.DataOutputStream;\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.util.Date;\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.Bytification;\r
-import org.onap.aaf.dao.Cached;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.DAOException;\r
-import org.onap.aaf.dao.Loader;\r
-import org.onap.aaf.dao.Streamer;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Slot;\r
-import org.onap.aaf.inno.env.util.Chrono;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.Row;\r
-\r
-public class UserRoleDAO extends CassDAOImpl<AuthzTrans,UserRoleDAO.Data> {\r
- public static final String TABLE = "user_role";\r
- \r
- public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F\r
-\r
- private static final String TRANS_UR_SLOT = "_TRANS_UR_SLOT_";\r
- public Slot transURSlot;\r
- \r
- private final HistoryDAO historyDAO;\r
- private final CacheInfoDAO infoDAO;\r
- \r
- private PSInfo psByUser, psByRole, psUserInRole;\r
-\r
-\r
-\r
- public UserRoleDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {\r
- super(trans, UserRoleDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- transURSlot = trans.slot(TRANS_UR_SLOT);\r
- init(trans);\r
-\r
- // Set up sub-DAOs\r
- historyDAO = new HistoryDAO(trans, this);\r
- infoDAO = new CacheInfoDAO(trans,this);\r
- }\r
-\r
- public UserRoleDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {\r
- super(trans, UserRoleDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));\r
- transURSlot = trans.slot(TRANS_UR_SLOT);\r
- historyDAO = hDAO;\r
- infoDAO = ciDAO;\r
- init(trans);\r
- }\r
-\r
- private static final int KEYLIMIT = 2;\r
- public static class Data extends CacheableData implements Bytification {\r
- public String user;\r
- public String role;\r
- public String ns; \r
- public String rname; \r
- public Date expires;\r
- \r
- @Override\r
- public int[] invalidate(Cached<?,?> cache) {\r
- // Note: I'm not worried about Name collisions, because the formats are different:\r
- // myName ... etc versus\r
- // com. ...\r
- // The "dot" makes the difference.\r
- return new int[] {\r
- seg(cache,user,role),\r
- seg(cache,user),\r
- seg(cache,role)\r
- };\r
- }\r
-\r
- @Override\r
- public ByteBuffer bytify() throws IOException {\r
- ByteArrayOutputStream baos = new ByteArrayOutputStream();\r
- URLoader.deflt.marshal(this,new DataOutputStream(baos));\r
- return ByteBuffer.wrap(baos.toByteArray());\r
- }\r
- \r
- @Override\r
- public void reconstitute(ByteBuffer bb) throws IOException {\r
- URLoader.deflt.unmarshal(this, toDIS(bb));\r
- }\r
-\r
- public void role(String ns, String rname) {\r
- this.ns = ns;\r
- this.rname = rname;\r
- this.role = ns + '.' + rname;\r
- }\r
- \r
- public void role(RoleDAO.Data rdd) {\r
- ns = rdd.ns;\r
- rname = rdd.name;\r
- role = rdd.fullName();\r
- }\r
-\r
- \r
- public boolean role(AuthzTrans trans, Question ques, String role) {\r
- this.role = role;\r
- Result<NsSplit> rnss = ques.deriveNsSplit(trans, role);\r
- if(rnss.isOKhasData()) {\r
- ns = rnss.value.ns;\r
- rname = rnss.value.name;\r
- return true;\r
- } else {\r
- return false;\r
- }\r
- }\r
-\r
- @Override\r
- public String toString() {\r
- return user + '|' + ns + '|' + rname + '|' + Chrono.dateStamp(expires);\r
- }\r
-\r
-\r
- }\r
- \r
- private static class URLoader extends Loader<Data> implements Streamer<Data> {\r
- public static final int MAGIC=738469903;\r
- public static final int VERSION=1;\r
- public static final int BUFF_SIZE=48;\r
- \r
- public static final URLoader deflt = new URLoader(KEYLIMIT);\r
-\r
- public URLoader(int keylimit) {\r
- super(keylimit);\r
- }\r
-\r
- @Override\r
- public Data load(Data data, Row row) {\r
- data.user = row.getString(0);\r
- data.role = row.getString(1);\r
- data.ns = row.getString(2);\r
- data.rname = row.getString(3);\r
- data.expires = row.getDate(4);\r
- return data;\r
- }\r
-\r
- @Override\r
- protected void key(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
- obj[idx]=data.user;\r
- obj[++idx]=data.role;\r
- }\r
-\r
- @Override\r
- protected void body(Data data, int _idx, Object[] obj) {\r
- int idx = _idx;\r
- obj[idx]=data.ns;\r
- obj[++idx]=data.rname;\r
- obj[++idx]=data.expires;\r
- }\r
- \r
- @Override\r
- public void marshal(Data data, DataOutputStream os) throws IOException {\r
- writeHeader(os,MAGIC,VERSION);\r
-\r
- writeString(os, data.user);\r
- writeString(os, data.role);\r
- writeString(os, data.ns);\r
- writeString(os, data.rname);\r
- os.writeLong(data.expires==null?-1:data.expires.getTime());\r
- }\r
-\r
- @Override\r
- public void unmarshal(Data data, DataInputStream is) throws IOException {\r
- /*int version = */readHeader(is,MAGIC,VERSION);\r
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields\r
- \r
- byte[] buff = new byte[BUFF_SIZE];\r
- data.user = readString(is,buff);\r
- data.role = readString(is,buff);\r
- data.ns = readString(is,buff);\r
- data.rname = readString(is,buff);\r
- long l = is.readLong();\r
- data.expires = l<0?null:new Date(l);\r
- }\r
-\r
- };\r
- \r
- private void init(AuthzTrans trans) {\r
- String[] helper = setCRUD(trans, TABLE, Data.class, URLoader.deflt);\r
- \r
- psByUser = new PSInfo(trans, SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE user = ?", \r
- new URLoader(1) {\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx]=data.user;\r
- }\r
- },readConsistency);\r
- \r
- // Note: We understand this call may have poor performance, so only should be used in Management (Delete) func\r
- psByRole = new PSInfo(trans, SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE role = ? ALLOW FILTERING", \r
- new URLoader(1) {\r
- @Override\r
- protected void key(Data data, int idx, Object[] obj) {\r
- obj[idx]=data.role;\r
- }\r
- },readConsistency);\r
- \r
- psUserInRole = new PSInfo(trans,SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE user = ? AND role = ?",\r
- URLoader.deflt,readConsistency);\r
- }\r
-\r
- public Result<List<Data>> readByUser(AuthzTrans trans, String user) {\r
- return psByUser.read(trans, R_TEXT + " by User " + user, new Object[]{user});\r
- }\r
-\r
- /**\r
- * Note: Use Sparingly. Cassandra's forced key structure means this will perform fairly poorly\r
- * @param trans\r
- * @param role\r
- * @return\r
- * @throws DAOException\r
- */\r
- public Result<List<Data>> readByRole(AuthzTrans trans, String role) {\r
- return psByRole.read(trans, R_TEXT + " by Role " + role, new Object[]{role});\r
- }\r
- \r
- /**\r
- * Direct Lookup of User Role\r
- * Don't forget to check for Expiration\r
- */\r
- public Result<List<Data>> readByUserRole(AuthzTrans trans, String user, String role) {\r
- return psUserInRole.read(trans, R_TEXT + " by User " + user + " and Role " + role, new Object[]{user,role});\r
- }\r
-\r
-\r
- /**\r
- * Log Modification statements to History\r
- * @param modified which CRUD action was done\r
- * @param data entity data that needs a log entry\r
- * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data\r
- */\r
- @Override\r
- protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {\r
- boolean memo = override.length>0 && override[0]!=null;\r
- boolean subject = override.length>1 && override[1]!=null;\r
-\r
- HistoryDAO.Data hd = HistoryDAO.newInitedData();\r
- HistoryDAO.Data hdRole = HistoryDAO.newInitedData();\r
- \r
- hd.user = hdRole.user = trans.user();\r
- hd.action = modified.name();\r
- // Modifying User/Role is an Update to Role, not a Create. JG, 07-14-2015\r
- hdRole.action = CRUD.update.name();\r
- hd.target = TABLE;\r
- hdRole.target = RoleDAO.TABLE;\r
- hd.subject = subject?override[1] : (data.user + '|'+data.role);\r
- hdRole.subject = data.role;\r
- switch(modified) {\r
- case create: \r
- hd.memo = hdRole.memo = memo\r
- ? String.format("%s by %s", override[0], hd.user)\r
- : String.format("%s added to %s",data.user,data.role); \r
- break;\r
- case update: \r
- hd.memo = hdRole.memo = memo\r
- ? String.format("%s by %s", override[0], hd.user)\r
- : String.format("%s - %s was updated",data.user,data.role);\r
- break;\r
- case delete: \r
- hd.memo = hdRole.memo = memo\r
- ? String.format("%s by %s", override[0], hd.user)\r
- : String.format("%s removed from %s",data.user,data.role);\r
- try {\r
- hd.reconstruct = hdRole.reconstruct = data.bytify();\r
- } catch (IOException e) {\r
- trans.warn().log(e,"Deleted UserRole could not be serialized");\r
- }\r
- break;\r
- default:\r
- hd.memo = hdRole.memo = memo\r
- ? String.format("%s by %s", override[0], hd.user)\r
- : "n/a";\r
- }\r
-\r
- if(historyDAO.create(trans, hd).status!=Status.OK) {\r
- trans.error().log("Cannot log to History");\r
- }\r
- \r
- if(historyDAO.create(trans, hdRole).status!=Status.OK) {\r
- trans.error().log("Cannot log to History");\r
- }\r
- // uses User as Segment\r
- if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {\r
- trans.error().log("Cannot touch CacheInfo");\r
- }\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.hl;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.org.Executor;\r
-import org.onap.aaf.dao.aaf.cass.NsSplit;\r
-import org.onap.aaf.dao.aaf.cass.NsDAO.Data;\r
-\r
-public class CassExecutor implements Executor {\r
-\r
- private Question q;\r
- private Function f;\r
- private AuthzTrans trans;\r
-\r
- public CassExecutor(AuthzTrans trans, Function f) {\r
- this.trans = trans;\r
- this.f = f;\r
- this.q = this.f.q;\r
- }\r
-\r
- @Override\r
- public boolean hasPermission(String user, String ns, String type, String instance, String action) {\r
- return isGranted(user, ns, type, instance, action);\r
- }\r
-\r
- @Override\r
- public boolean inRole(String name) {\r
- Result<NsSplit> nss = q.deriveNsSplit(trans, name);\r
- if(nss.notOK())return false;\r
- return q.roleDAO.read(trans, nss.value.ns,nss.value.name).isOKhasData();\r
- }\r
-\r
- public boolean isGranted(String user, String ns, String type, String instance, String action) {\r
- return q.isGranted(trans, user, ns, type, instance,action);\r
- }\r
-\r
- @Override\r
- public String namespace() throws Exception {\r
- Result<Data> res = q.validNSOfDomain(trans,trans.user());\r
- if(res.isOK()) {\r
- String user[] = trans.user().split("\\.");\r
- return user[user.length-1] + '.' + user[user.length-2];\r
- }\r
- throw new Exception(res.status + ' ' + res.details);\r
- }\r
-\r
- @Override\r
- public String id() {\r
- return trans.user();\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.hl;\r
-\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-\r
-import java.io.IOException;\r
-import java.util.ArrayList;\r
-import java.util.Date;\r
-import java.util.HashSet;\r
-import java.util.List;\r
-import java.util.Set;\r
-import java.util.UUID;\r
-\r
-import org.onap.aaf.authz.common.Define;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.org.Executor;\r
-import org.onap.aaf.authz.org.Organization;\r
-import org.onap.aaf.authz.org.Organization.Expiration;\r
-import org.onap.aaf.authz.org.Organization.Identity;\r
-import org.onap.aaf.authz.org.Organization.Policy;\r
-import org.onap.aaf.dao.DAOException;\r
-import org.onap.aaf.dao.aaf.cass.ApprovalDAO;\r
-import org.onap.aaf.dao.aaf.cass.CredDAO;\r
-import org.onap.aaf.dao.aaf.cass.DelegateDAO;\r
-import org.onap.aaf.dao.aaf.cass.FutureDAO;\r
-import org.onap.aaf.dao.aaf.cass.Namespace;\r
-import org.onap.aaf.dao.aaf.cass.NsDAO;\r
-import org.onap.aaf.dao.aaf.cass.NsSplit;\r
-import org.onap.aaf.dao.aaf.cass.NsType;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-import org.onap.aaf.dao.aaf.cass.UserRoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.NsDAO.Data;\r
-import org.onap.aaf.dao.aaf.hl.Question.Access;\r
-\r
-public class Function {\r
-\r
- public static final String FOP_CRED = "cred";\r
- public static final String FOP_DELEGATE = "delegate";\r
- public static final String FOP_NS = "ns";\r
- public static final String FOP_PERM = "perm";\r
- public static final String FOP_ROLE = "role";\r
- public static final String FOP_USER_ROLE = "user_role";\r
- // First Action should ALWAYS be "write", see "CreateRole"\r
- public final Question q;\r
-\r
- public Function(AuthzTrans trans, Question question) {\r
- q = question;\r
- }\r
-\r
- private class ErrBuilder {\r
- private StringBuilder sb;\r
- private List<String> ao;\r
-\r
- public void log(Result<?> result) {\r
- if (result.notOK()) {\r
- if (sb == null) {\r
- sb = new StringBuilder();\r
- ao = new ArrayList<String>();\r
- }\r
- sb.append(result.details);\r
- sb.append('\n');\r
- for (String s : result.variables) {\r
- ao.add(s);\r
- }\r
- }\r
- }\r
-\r
- public String[] vars() {\r
- String[] rv = new String[ao.size()];\r
- ao.toArray(rv);\r
- return rv;\r
- }\r
-\r
- public boolean hasErr() {\r
- return sb != null;\r
- }\r
-\r
- @Override\r
- public String toString() {\r
- return sb == null ? "" : String.format(sb.toString(), ao);\r
- }\r
- }\r
-\r
- /**\r
- * createNS\r
- * \r
- * Create Namespace\r
- * \r
- * @param trans\r
- * @param org\r
- * @param ns\r
- * @param user\r
- * @return\r
- * @throws DAOException\r
- * \r
- * To create an NS, you need to: 1) validate permission to\r
- * modify parent NS 2) Does NS exist already? 3) Create NS with\r
- * a) "user" as owner. NOTE: Per 10-15 request for AAF 1.0 4)\r
- * Loop through Roles with Parent NS, and map any that start\r
- * with this NS into this one 5) Loop through Perms with Parent\r
- * NS, and map any that start with this NS into this one\r
- */\r
- public Result<Void> createNS(AuthzTrans trans, Namespace namespace, boolean fromApproval) {\r
- Result<?> rq;\r
-\r
- if (namespace.name.endsWith(Question.DOT_ADMIN)\r
- || namespace.name.endsWith(Question.DOT_OWNER)) {\r
- return Result.err(Status.ERR_BadData,\r
- "'admin' and 'owner' are reserved names in AAF");\r
- }\r
-\r
- try {\r
- for (String u : namespace.owner) {\r
- Organization org = trans.org();\r
- Identity orgUser = org.getIdentity(trans, u);\r
- if (orgUser == null || !orgUser.isResponsible()) {\r
- // check if user has explicit permission\r
- String reason;\r
- if (org.isTestEnv() && (reason=org.validate(trans, Policy.AS_EMPLOYEE,\r
- new CassExecutor(trans, this), u))!=null) {\r
- return Result.err(Status.ERR_Policy,reason);\r
- }\r
- }\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,\r
- "Could not contact Organization for User Validation");\r
- }\r
-\r
- String user = trans.user();\r
- // 1) May Change Parent?\r
- int idx = namespace.name.lastIndexOf('.');\r
- String parent;\r
- if (idx < 0) {\r
- if (!q.isGranted(trans, user, Define.ROOT_NS,Question.NS, ".", "create")) {\r
- return Result.err(Result.ERR_Security,\r
- "%s may not create Root Namespaces", user);\r
- }\r
- parent = null;\r
- fromApproval = true;\r
- } else {\r
- parent = namespace.name.substring(0, idx);\r
- }\r
-\r
- if (!fromApproval) {\r
- Result<NsDAO.Data> rparent = q.deriveNs(trans, parent);\r
- if (rparent.notOK()) {\r
- return Result.err(rparent);\r
- }\r
- rparent = q.mayUser(trans, user, rparent.value, Access.write);\r
- if (rparent.notOK()) {\r
- return Result.err(rparent);\r
- }\r
- }\r
-\r
- // 2) Does requested NS exist\r
- if (q.nsDAO.read(trans, namespace.name).isOKhasData()) {\r
- return Result.err(Status.ERR_ConflictAlreadyExists,\r
- "Target Namespace already exists");\r
- }\r
-\r
- // Someone must be responsible.\r
- if (namespace.owner == null || namespace.owner.isEmpty()) {\r
- return Result\r
- .err(Status.ERR_Policy,\r
- "Namespaces must be assigned at least one responsible party");\r
- }\r
-\r
- // 3) Create NS\r
- Date now = new Date();\r
-\r
- Result<Void> r;\r
- // 3a) Admin\r
-\r
- try {\r
- // Originally, added the enterer as Admin, but that's not necessary,\r
- // or helpful for Operations folks..\r
- // Admins can be empty, because they can be changed by lower level\r
- // NSs\r
- // if(ns.admin(false).isEmpty()) {\r
- // ns.admin(true).add(user);\r
- // }\r
- if (namespace.admin != null) {\r
- for (String u : namespace.admin) {\r
- if ((r = checkValidID(trans, now, u)).notOK()) {\r
- return r;\r
- }\r
- }\r
- }\r
-\r
- // 3b) Responsible\r
- Organization org = trans.org();\r
- for (String u : namespace.owner) {\r
- Identity orgUser = org.getIdentity(trans, u);\r
- if (orgUser == null) {\r
- return Result\r
- .err(Status.ERR_BadData,\r
- "NS must be created with an %s approved Responsible Party",\r
- org.getName());\r
- }\r
- }\r
- } catch (Exception e) {\r
- return Result.err(Status.ERR_UserNotFound, e.getMessage());\r
- }\r
-\r
- // VALIDATIONS done... Add NS\r
- if ((rq = q.nsDAO.create(trans, namespace.data())).notOK()) {\r
- return Result.err(rq);\r
- }\r
-\r
- // Since Namespace is now created, we need to grab all subsequent errors\r
- ErrBuilder eb = new ErrBuilder();\r
-\r
- // Add UserRole(s)\r
- UserRoleDAO.Data urdd = new UserRoleDAO.Data();\r
- urdd.expires = trans.org().expiration(null, Expiration.UserInRole).getTime();\r
- urdd.role(namespace.name, Question.ADMIN);\r
- for (String admin : namespace.admin) {\r
- urdd.user = admin;\r
- eb.log(q.userRoleDAO.create(trans, urdd));\r
- }\r
- urdd.role(namespace.name,Question.OWNER);\r
- for (String owner : namespace.owner) {\r
- urdd.user = owner;\r
- eb.log(q.userRoleDAO.create(trans, urdd));\r
- }\r
-\r
- addNSAdminRolesPerms(trans, eb, namespace.name);\r
-\r
- addNSOwnerRolesPerms(trans, eb, namespace.name);\r
-\r
- if (parent != null) {\r
- // Build up with any errors\r
-\r
- Result<NsDAO.Data> parentNS = q.deriveNs(trans, parent);\r
- String targetNs = parentNS.value.name; // Get the Parent Namespace,\r
- // not target\r
- String targetName = namespace.name.substring(parentNS.value.name.length() + 1); // Remove the Parent Namespace from the\r
- // Target + a dot, and you'll get the name\r
- int targetNameDot = targetName.length() + 1;\r
-\r
- // 4) Change any roles with children matching this NS, and\r
- Result<List<RoleDAO.Data>> rrdc = q.roleDAO.readChildren(trans, targetNs, targetName);\r
- if (rrdc.isOKhasData()) {\r
- for (RoleDAO.Data rdd : rrdc.value) {\r
- // Remove old Role from Perms, save them off\r
- List<PermDAO.Data> lpdd = new ArrayList<PermDAO.Data>();\r
- for(String p : rdd.perms(false)) {\r
- Result<PermDAO.Data> rpdd = PermDAO.Data.decode(trans,q,p);\r
- if(rpdd.isOKhasData()) {\r
- PermDAO.Data pdd = rpdd.value;\r
- lpdd.add(pdd);\r
- q.permDAO.delRole(trans, pdd, rdd);\r
- } else{\r
- trans.error().log(rpdd.errorString());\r
- }\r
- }\r
- \r
- // Save off Old keys\r
- String delP1 = rdd.ns;\r
- String delP2 = rdd.name;\r
-\r
- // Write in new key\r
- rdd.ns = namespace.name;\r
- rdd.name = (delP2.length() > targetNameDot) ? delP2\r
- .substring(targetNameDot) : "";\r
- \r
- // Need to use non-cached, because switching namespaces, not\r
- // "create" per se\r
- if ((rq = q.roleDAO.create(trans, rdd)).isOK()) {\r
- // Put Role back into Perm, with correct info\r
- for(PermDAO.Data pdd : lpdd) {\r
- q.permDAO.addRole(trans, pdd, rdd);\r
- }\r
- // Change data for User Roles \r
- Result<List<UserRoleDAO.Data>> rurd = q.userRoleDAO.readByRole(trans, rdd.fullName());\r
- if(rurd.isOKhasData()) {\r
- for(UserRoleDAO.Data urd : rurd.value) {\r
- urd.ns = rdd.ns;\r
- urd.rname = rdd.name;\r
- q.userRoleDAO.update(trans, urd);\r
- }\r
- }\r
- // Now delete old one\r
- rdd.ns = delP1;\r
- rdd.name = delP2;\r
- if ((rq = q.roleDAO.delete(trans, rdd, false)).notOK()) {\r
- eb.log(rq);\r
- }\r
- } else {\r
- eb.log(rq);\r
- }\r
- }\r
- }\r
-\r
- // 4) Change any Permissions with children matching this NS, and\r
- Result<List<PermDAO.Data>> rpdc = q.permDAO.readChildren(trans,targetNs, targetName);\r
- if (rpdc.isOKhasData()) {\r
- for (PermDAO.Data pdd : rpdc.value) {\r
- // Remove old Perm from Roles, save them off\r
- List<RoleDAO.Data> lrdd = new ArrayList<RoleDAO.Data>();\r
- \r
- for(String rl : pdd.roles(false)) {\r
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans,q,rl);\r
- if(rrdd.isOKhasData()) {\r
- RoleDAO.Data rdd = rrdd.value;\r
- lrdd.add(rdd);\r
- q.roleDAO.delPerm(trans, rdd, pdd);\r
- } else{\r
- trans.error().log(rrdd.errorString());\r
- }\r
- }\r
- \r
- // Save off Old keys\r
- String delP1 = pdd.ns;\r
- String delP2 = pdd.type;\r
- pdd.ns = namespace.name;\r
- pdd.type = (delP2.length() > targetNameDot) ? delP2\r
- .substring(targetNameDot) : "";\r
- if ((rq = q.permDAO.create(trans, pdd)).isOK()) {\r
- // Put Role back into Perm, with correct info\r
- for(RoleDAO.Data rdd : lrdd) {\r
- q.roleDAO.addPerm(trans, rdd, pdd);\r
- }\r
-\r
- pdd.ns = delP1;\r
- pdd.type = delP2;\r
- if ((rq = q.permDAO.delete(trans, pdd, false)).notOK()) {\r
- eb.log(rq);\r
- // } else {\r
- // Need to invalidate directly, because we're\r
- // switching places in NS, not normal cache behavior\r
- // q.permDAO.invalidate(trans,pdd);\r
- }\r
- } else {\r
- eb.log(rq);\r
- }\r
- }\r
- }\r
- if (eb.hasErr()) {\r
- return Result.err(Status.ERR_ActionNotCompleted,eb.sb.toString(), eb.vars());\r
- }\r
- }\r
- return Result.ok();\r
- }\r
-\r
- private void addNSAdminRolesPerms(AuthzTrans trans, ErrBuilder eb, String ns) {\r
- // Admin Role/Perm\r
- RoleDAO.Data rd = new RoleDAO.Data();\r
- rd.ns = ns;\r
- rd.name = "admin";\r
- rd.description = "AAF Namespace Administrators";\r
-\r
- PermDAO.Data pd = new PermDAO.Data();\r
- pd.ns = ns;\r
- pd.type = "access";\r
- pd.instance = Question.ASTERIX;\r
- pd.action = Question.ASTERIX;\r
- pd.description = "AAF Namespace Write Access";\r
-\r
- rd.perms = new HashSet<String>();\r
- rd.perms.add(pd.encode());\r
- eb.log(q.roleDAO.create(trans, rd));\r
-\r
- pd.roles = new HashSet<String>();\r
- pd.roles.add(rd.encode());\r
- eb.log(q.permDAO.create(trans, pd));\r
- }\r
-\r
- private void addNSOwnerRolesPerms(AuthzTrans trans, ErrBuilder eb, String ns) {\r
- RoleDAO.Data rd = new RoleDAO.Data();\r
- rd.ns = ns;\r
- rd.name = "owner";\r
- rd.description = "AAF Namespace Owners";\r
-\r
- PermDAO.Data pd = new PermDAO.Data();\r
- pd.ns = ns;\r
- pd.type = "access";\r
- pd.instance = Question.ASTERIX;\r
- pd.action = Question.READ;\r
- pd.description = "AAF Namespace Read Access";\r
-\r
- rd.perms = new HashSet<String>();\r
- rd.perms.add(pd.encode());\r
- eb.log(q.roleDAO.create(trans, rd));\r
-\r
- pd.roles = new HashSet<String>();\r
- pd.roles.add(rd.encode());\r
- eb.log(q.permDAO.create(trans, pd));\r
- }\r
-\r
- /**\r
- * deleteNS\r
- * \r
- * Delete Namespace\r
- * \r
- * @param trans\r
- * @param org\r
- * @param ns\r
- * @param force\r
- * @param user\r
- * @return\r
- * @throws DAOException\r
- * \r
- * \r
- * To delete an NS, you need to: 1) validate permission to\r
- * modify this NS 2) Find all Roles with this NS, and 2a) if\r
- * Force, delete them, else modify to Parent NS 3) Find all\r
- * Perms with this NS, and modify to Parent NS 3a) if Force,\r
- * delete them, else modify to Parent NS 4) Find all IDs\r
- * associated to this NS, and deny if exists. 5) Remove NS\r
- */\r
- public Result<Void> deleteNS(AuthzTrans trans, String ns) {\r
- boolean force = trans.forceRequested();\r
- boolean move = trans.moveRequested();\r
- // 1) Validate\r
- Result<List<NsDAO.Data>> nsl;\r
- if ((nsl = q.nsDAO.read(trans, ns)).notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_NsNotFound, "%s does not exist", ns);\r
- }\r
- NsDAO.Data nsd = nsl.value.get(0);\r
- NsType nt;\r
- if (move && !q.canMove(nt = NsType.fromType(nsd.type))) {\r
- return Result.err(Status.ERR_Denied, "Namespace Force=move not permitted for Type %s",nt.name());\r
- }\r
-\r
- Result<NsDAO.Data> dnr = q.mayUser(trans, trans.user(), nsd, Access.write);\r
- if (dnr.status != Status.OK) {\r
- return Result.err(dnr);\r
- }\r
-\r
- // 2) Find Parent\r
- String user = trans.user();\r
- int idx = ns.lastIndexOf('.');\r
- NsDAO.Data parent;\r
- if (idx < 0) {\r
- if (!q.isGranted(trans, user, Define.ROOT_NS,Question.NS, ".", "delete")) {\r
- return Result.err(Result.ERR_Security,\r
- "%s may not delete Root Namespaces", user);\r
- }\r
- parent = null;\r
- } else {\r
- Result<NsDAO.Data> rlparent = q.deriveNs(trans, ns.substring(0, idx));\r
- if (rlparent.notOKorIsEmpty()) {\r
- return Result.err(rlparent);\r
- }\r
- parent = rlparent.value;\r
- }\r
-\r
- // Build up with any errors\r
- // If sb != null below is an indication of error\r
- StringBuilder sb = null;\r
- ErrBuilder er = new ErrBuilder();\r
-\r
- // 2a) Deny if any IDs on Namespace\r
- Result<List<CredDAO.Data>> creds = q.credDAO.readNS(trans, ns);\r
- if (creds.isOKhasData()) {\r
- if (force || move) {\r
- for (CredDAO.Data cd : creds.value) {\r
- er.log(q.credDAO.delete(trans, cd, false));\r
- // Since we're deleting all the creds, we should delete all\r
- // the user Roles for that Cred\r
- Result<List<UserRoleDAO.Data>> rlurd = q.userRoleDAO\r
- .readByUser(trans, cd.id);\r
- if (rlurd.isOK()) {\r
- for (UserRoleDAO.Data data : rlurd.value) {\r
- q.userRoleDAO.delete(trans, data, false);\r
- }\r
- }\r
-\r
- }\r
- } else {\r
- // first possible StringBuilder Create.\r
- sb = new StringBuilder();\r
- sb.append('[');\r
- sb.append(ns);\r
- sb.append("] contains users");\r
- }\r
- }\r
-\r
- // 2b) Find (or delete if forced flag is set) dependencies\r
- // First, find if NS Perms are the only ones\r
- Result<List<PermDAO.Data>> rpdc = q.permDAO.readNS(trans, ns);\r
- if (rpdc.isOKhasData()) {\r
- // Since there are now NS perms, we have to count NON-NS perms.\r
- // FYI, if we delete them now, and the NS is not deleted, it is in\r
- // an inconsistent state.\r
- boolean nonaccess = false;\r
- for (PermDAO.Data pdd : rpdc.value) {\r
- if (!"access".equals(pdd.type)) {\r
- nonaccess = true;\r
- break;\r
- }\r
- }\r
- if (nonaccess && !force && !move) {\r
- if (sb == null) {\r
- sb = new StringBuilder();\r
- sb.append('[');\r
- sb.append(ns);\r
- sb.append("] contains ");\r
- } else {\r
- sb.append(", ");\r
- }\r
- sb.append("permissions");\r
- }\r
- }\r
-\r
- Result<List<RoleDAO.Data>> rrdc = q.roleDAO.readNS(trans, ns);\r
- if (rrdc.isOKhasData()) {\r
- // Since there are now NS roles, we have to count NON-NS roles.\r
- // FYI, if we delete th)em now, and the NS is not deleted, it is in\r
- // an inconsistent state.\r
- int count = rrdc.value.size();\r
- for (RoleDAO.Data rdd : rrdc.value) {\r
- if ("admin".equals(rdd.name) || "owner".equals(rdd.name)) {\r
- --count;\r
- }\r
- }\r
- if (count > 0 && !force && !move) {\r
- if (sb == null) {\r
- sb = new StringBuilder();\r
- sb.append('[');\r
- sb.append(ns);\r
- sb.append("] contains ");\r
- } else {\r
- sb.append(", ");\r
- }\r
- sb.append("roles");\r
- }\r
- }\r
-\r
- // 2c) Deny if dependencies exist that would be moved to root level\r
- // parent is root level parent here. Need to find closest parent ns that\r
- // exists\r
- if (sb != null) {\r
- if (!force && !move) {\r
- sb.append(".\n Delete dependencies and try again. Note: using \"force=true\" will delete all. \"force=move\" will delete Creds, but move Roles and Perms to parent.");\r
- return Result.err(Status.ERR_DependencyExists, sb.toString());\r
- }\r
-\r
- if (move && (parent == null || parent.type == NsType.COMPANY.type)) {\r
- return Result\r
- .err(Status.ERR_DependencyExists,\r
- "Cannot move users, roles or permissions to [%s].\nDelete dependencies and try again",\r
- parent.name);\r
- }\r
- } else if (move && parent != null) {\r
- sb = new StringBuilder();\r
- // 3) Change any roles with children matching this NS, and\r
- moveRoles(trans, parent, sb, rrdc);\r
- // 4) Change any Perms with children matching this NS, and\r
- movePerms(trans, parent, sb, rpdc);\r
- }\r
-\r
- if (sb != null && sb.length() > 0) {\r
- return Result.err(Status.ERR_DependencyExists, sb.toString());\r
- }\r
-\r
- if (er.hasErr()) {\r
- if (trans.debug().isLoggable()) {\r
- trans.debug().log(er.toString());\r
- }\r
- return Result.err(Status.ERR_DependencyExists,\r
- "Namespace members cannot be deleted for %s", ns);\r
- }\r
-\r
- // 5) OK... good to go for NS Deletion...\r
- if (!rpdc.isEmpty()) {\r
- for (PermDAO.Data perm : rpdc.value) {\r
- deletePerm(trans, perm, true, true);\r
- }\r
- }\r
- if (!rrdc.isEmpty()) {\r
- for (RoleDAO.Data role : rrdc.value) {\r
- deleteRole(trans, role, true, true);\r
- }\r
- }\r
-\r
- return q.nsDAO.delete(trans, nsd, false);\r
- }\r
-\r
- public Result<List<String>> getOwners(AuthzTrans trans, String ns,\r
- boolean includeExpired) {\r
- return getUsersByRole(trans, ns + Question.DOT_OWNER, includeExpired);\r
- }\r
-\r
- private Result<Void> mayAddOwner(AuthzTrans trans, String ns, String id) {\r
- Result<NsDAO.Data> rq = q.deriveNs(trans, ns);\r
- if (rq.notOK()) {\r
- return Result.err(rq);\r
- }\r
-\r
- rq = q.mayUser(trans, trans.user(), rq.value, Access.write);\r
- if (rq.notOK()) {\r
- return Result.err(rq);\r
- }\r
-\r
- Identity user;\r
- Organization org = trans.org();\r
- try {\r
- if ((user = org.getIdentity(trans, id)) == null) {\r
- return Result.err(Status.ERR_Policy,\r
- "%s reports that this is not a valid credential",\r
- org.getName());\r
- }\r
- if (user.isResponsible()) {\r
- return Result.ok();\r
- } else {\r
- String reason="This is not a Test Environment";\r
- if (org.isTestEnv() && (reason = org.validate(trans, Policy.AS_EMPLOYEE, \r
- new CassExecutor(trans, this), id))==null) {\r
- return Result.ok();\r
- }\r
- return Result.err(Status.ERR_Policy,reason);\r
- }\r
- } catch (Exception e) {\r
- return Result.err(e);\r
- }\r
- }\r
-\r
- private Result<Void> mayAddAdmin(AuthzTrans trans, String ns, String id) {\r
- // Does NS Exist?\r
- Result<Void> r = checkValidID(trans, new Date(), id);\r
- if (r.notOK()) {\r
- return r;\r
- }\r
- // Is id able to be an Admin\r
- Result<NsDAO.Data> rq = q.deriveNs(trans, ns);\r
- if (rq.notOK()) {\r
- return Result.err(rq);\r
- }\r
- \r
- rq = q.mayUser(trans, trans.user(), rq.value, Access.write);\r
- if (rq.notOK()) {\r
- return Result.err(rq);\r
- }\r
- return r;\r
- }\r
-\r
- private Result<Void> checkValidID(AuthzTrans trans, Date now, String user) {\r
- Organization org = trans.org();\r
- if (user.endsWith(org.getRealm())) {\r
- try {\r
- if (org.getIdentity(trans, user) == null) {\r
- return Result.err(Status.ERR_Denied,\r
- "%s reports that %s is a faulty ID", org.getName(),\r
- user);\r
- }\r
- return Result.ok();\r
- } catch (Exception e) {\r
- return Result.err(Result.ERR_Security,\r
- "%s is not a valid %s Credential", user, org.getName());\r
- }\r
- } else {\r
- Result<List<CredDAO.Data>> cdr = q.credDAO.readID(trans, user);\r
- if (cdr.notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_Security,\r
- "%s is not a valid AAF Credential", user);\r
- }\r
- \r
- for (CredDAO.Data cd : cdr.value) {\r
- if (cd.expires.after(now)) {\r
- return Result.ok();\r
- }\r
- }\r
- }\r
- return Result.err(Result.ERR_Security, "%s has expired", user);\r
- }\r
-\r
- public Result<Void> delOwner(AuthzTrans trans, String ns, String id) {\r
- Result<NsDAO.Data> rq = q.deriveNs(trans, ns);\r
- if (rq.notOK()) {\r
- return Result.err(rq);\r
- }\r
-\r
- rq = q.mayUser(trans, trans.user(), rq.value, Access.write);\r
- if (rq.notOK()) {\r
- return Result.err(rq);\r
- }\r
-\r
- return delUserRole(trans, id, ns,Question.OWNER);\r
- }\r
-\r
- public Result<List<String>> getAdmins(AuthzTrans trans, String ns, boolean includeExpired) {\r
- return getUsersByRole(trans, ns + Question.DOT_ADMIN, includeExpired);\r
- }\r
-\r
- public Result<Void> delAdmin(AuthzTrans trans, String ns, String id) {\r
- Result<NsDAO.Data> rq = q.deriveNs(trans, ns);\r
- if (rq.notOK()) {\r
- return Result.err(rq);\r
- }\r
-\r
- rq = q.mayUser(trans, trans.user(), rq.value, Access.write);\r
- if (rq.notOK()) {\r
- return Result.err(rq);\r
- }\r
-\r
- return delUserRole(trans, id, ns, Question.ADMIN);\r
- }\r
-\r
- /**\r
- * Helper function that moves permissions from a namespace being deleted to\r
- * its parent namespace\r
- * \r
- * @param trans\r
- * @param parent\r
- * @param sb\r
- * @param rpdc\r
- * - list of permissions in namespace being deleted\r
- */\r
- private void movePerms(AuthzTrans trans, NsDAO.Data parent,\r
- StringBuilder sb, Result<List<PermDAO.Data>> rpdc) {\r
-\r
- Result<Void> rv;\r
- Result<PermDAO.Data> pd;\r
-\r
- if (rpdc.isOKhasData()) {\r
- for (PermDAO.Data pdd : rpdc.value) {\r
- String delP2 = pdd.type;\r
- if ("access".equals(delP2)) {\r
- continue;\r
- }\r
- // Remove old Perm from Roles, save them off\r
- List<RoleDAO.Data> lrdd = new ArrayList<RoleDAO.Data>();\r
- \r
- for(String rl : pdd.roles(false)) {\r
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans,q,rl);\r
- if(rrdd.isOKhasData()) {\r
- RoleDAO.Data rdd = rrdd.value;\r
- lrdd.add(rdd);\r
- q.roleDAO.delPerm(trans, rdd, pdd);\r
- } else{\r
- trans.error().log(rrdd.errorString());\r
- }\r
- }\r
- \r
- // Save off Old keys\r
- String delP1 = pdd.ns;\r
- NsSplit nss = new NsSplit(parent, pdd.fullType());\r
- pdd.ns = nss.ns;\r
- pdd.type = nss.name;\r
- // Use direct Create/Delete, because switching namespaces\r
- if ((pd = q.permDAO.create(trans, pdd)).isOK()) {\r
- // Put Role back into Perm, with correct info\r
- for(RoleDAO.Data rdd : lrdd) {\r
- q.roleDAO.addPerm(trans, rdd, pdd);\r
- }\r
-\r
- pdd.ns = delP1;\r
- pdd.type = delP2;\r
- if ((rv = q.permDAO.delete(trans, pdd, false)).notOK()) {\r
- sb.append(rv.details);\r
- sb.append('\n');\r
- // } else {\r
- // Need to invalidate directly, because we're switching\r
- // places in NS, not normal cache behavior\r
- // q.permDAO.invalidate(trans,pdd);\r
- }\r
- } else {\r
- sb.append(pd.details);\r
- sb.append('\n');\r
- }\r
- }\r
- }\r
- }\r
-\r
- /**\r
- * Helper function that moves roles from a namespace being deleted to its\r
- * parent namespace\r
- * \r
- * @param trans\r
- * @param parent\r
- * @param sb\r
- * @param rrdc\r
- * - list of roles in namespace being deleted\r
- */\r
- private void moveRoles(AuthzTrans trans, NsDAO.Data parent,\r
- StringBuilder sb, Result<List<RoleDAO.Data>> rrdc) {\r
-\r
- Result<Void> rv;\r
- Result<RoleDAO.Data> rd;\r
-\r
- if (rrdc.isOKhasData()) {\r
- for (RoleDAO.Data rdd : rrdc.value) {\r
- String delP2 = rdd.name;\r
- if ("admin".equals(delP2) || "owner".equals(delP2)) {\r
- continue;\r
- }\r
- // Remove old Role from Perms, save them off\r
- List<PermDAO.Data> lpdd = new ArrayList<PermDAO.Data>();\r
- for(String p : rdd.perms(false)) {\r
- Result<PermDAO.Data> rpdd = PermDAO.Data.decode(trans,q,p);\r
- if(rpdd.isOKhasData()) {\r
- PermDAO.Data pdd = rpdd.value;\r
- lpdd.add(pdd);\r
- q.permDAO.delRole(trans, pdd, rdd);\r
- } else{\r
- trans.error().log(rpdd.errorString());\r
- }\r
- }\r
- \r
- // Save off Old keys\r
- String delP1 = rdd.ns;\r
-\r
- NsSplit nss = new NsSplit(parent, rdd.fullName());\r
- rdd.ns = nss.ns;\r
- rdd.name = nss.name;\r
- // Use direct Create/Delete, because switching namespaces\r
- if ((rd = q.roleDAO.create(trans, rdd)).isOK()) {\r
- // Put Role back into Perm, with correct info\r
- for(PermDAO.Data pdd : lpdd) {\r
- q.permDAO.addRole(trans, pdd, rdd);\r
- }\r
-\r
- rdd.ns = delP1;\r
- rdd.name = delP2;\r
- if ((rv = q.roleDAO.delete(trans, rdd, true)).notOK()) {\r
- sb.append(rv.details);\r
- sb.append('\n');\r
- // } else {\r
- // Need to invalidate directly, because we're switching\r
- // places in NS, not normal cache behavior\r
- // q.roleDAO.invalidate(trans,rdd);\r
- }\r
- } else {\r
- sb.append(rd.details);\r
- sb.append('\n');\r
- }\r
- }\r
- }\r
- }\r
-\r
- /**\r
- * Create Permission (and any missing Permission between this and Parent) if\r
- * we have permission\r
- * \r
- * Pass in the desired Management Permission for this Permission\r
- * \r
- * If Force is set, then Roles listed will be created, if allowed,\r
- * pre-granted.\r
- */\r
- public Result<Void> createPerm(AuthzTrans trans, PermDAO.Data perm, boolean fromApproval) {\r
- String user = trans.user();\r
- // Next, see if User is allowed to Manage Parent Permission\r
-\r
- Result<NsDAO.Data> rnsd;\r
- if (!fromApproval) {\r
- rnsd = q.mayUser(trans, user, perm, Access.write);\r
- if (rnsd.notOK()) {\r
- return Result.err(rnsd);\r
- }\r
- } else {\r
- rnsd = q.deriveNs(trans, perm.ns);\r
- }\r
-\r
- // Does Child exist?\r
- if (!trans.forceRequested()) {\r
- if (q.permDAO.read(trans, perm).isOKhasData()) {\r
- return Result.err(Status.ERR_ConflictAlreadyExists,\r
- "Permission [%s.%s|%s|%s] already exists.", perm.ns,\r
- perm.type, perm.instance, perm.action);\r
- }\r
- }\r
-\r
- // Attempt to add perms to roles, creating as possible\r
- Set<String> roles;\r
- String pstring = perm.encode();\r
-\r
- // For each Role\r
- for (String role : roles = perm.roles(true)) {\r
- Result<RoleDAO.Data> rdd = RoleDAO.Data.decode(trans,q,role);\r
- if(rdd.isOKhasData()) {\r
- RoleDAO.Data rd = rdd.value;\r
- if (!fromApproval) {\r
- // May User write to the Role in question.\r
- Result<NsDAO.Data> rns = q.mayUser(trans, user, rd,\r
- Access.write);\r
- if (rns.notOK()) {\r
- // Remove the role from Add, because\r
- roles.remove(role); // Don't allow adding\r
- trans.warn()\r
- .log("User [%s] does not have permission to relate Permissions to Role [%s]",\r
- user, role);\r
- }\r
- }\r
-\r
- Result<List<RoleDAO.Data>> rlrd;\r
- if ((rlrd = q.roleDAO.read(trans, rd)).notOKorIsEmpty()) {\r
- rd.perms(true).add(pstring);\r
- if (q.roleDAO.create(trans, rd).notOK()) {\r
- roles.remove(role); // Role doesn't exist, and can't be\r
- // created\r
- }\r
- } else {\r
- rd = rlrd.value.get(0);\r
- if (!rd.perms.contains(pstring)) {\r
- q.roleDAO.addPerm(trans, rd, perm);\r
- }\r
- }\r
- }\r
- }\r
-\r
- Result<PermDAO.Data> pdr = q.permDAO.create(trans, perm);\r
- if (pdr.isOK()) {\r
- return Result.ok();\r
- } else { \r
- return Result.err(pdr);\r
- }\r
- }\r
-\r
- public Result<Void> deletePerm(final AuthzTrans trans, final PermDAO.Data perm, boolean force, boolean fromApproval) {\r
- String user = trans.user();\r
-\r
- // Next, see if User is allowed to Manage Permission\r
- Result<NsDAO.Data> rnsd;\r
- if (!fromApproval) {\r
- rnsd = q.mayUser(trans, user, perm, Access.write);\r
- if (rnsd.notOK()) {\r
- return Result.err(rnsd);\r
- }\r
- }\r
- // Does Perm exist?\r
- Result<List<PermDAO.Data>> pdr = q.permDAO.read(trans, perm);\r
- if (pdr.notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_PermissionNotFound,"Permission [%s.%s|%s|%s] does not exist.",\r
- perm.ns,perm.type, perm.instance, perm.action);\r
- }\r
- // Get perm, but with rest of data.\r
- PermDAO.Data fullperm = pdr.value.get(0);\r
-\r
- // Attached to any Roles?\r
- if (fullperm.roles != null) {\r
- if (force) {\r
- for (String role : fullperm.roles) {\r
- Result<Void> rv = null;\r
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans, q, role);\r
- if(rrdd.isOKhasData()) {\r
- trans.debug().log("Removing", role, "from", fullperm, "on Perm Delete");\r
- if ((rv = q.roleDAO.delPerm(trans, rrdd.value, fullperm)).notOK()) {\r
- if (rv.notOK()) {\r
- trans.error().log("Error removing Role during delFromPermRole: ",\r
- trans.getUserPrincipal(),\r
- rv.errorString());\r
- }\r
- }\r
- } else {\r
- return Result.err(rrdd);\r
- }\r
- }\r
- } else if (!fullperm.roles.isEmpty()) {\r
- return Result\r
- .err(Status.ERR_DependencyExists,\r
- "Permission [%s.%s|%s|%s] cannot be deleted as it is attached to 1 or more roles.",\r
- fullperm.ns, fullperm.type, fullperm.instance, fullperm.action);\r
- }\r
- }\r
-\r
- return q.permDAO.delete(trans, fullperm, false);\r
- }\r
-\r
- public Result<Void> deleteRole(final AuthzTrans trans, final RoleDAO.Data role, boolean force, boolean fromApproval) {\r
- String user = trans.user();\r
-\r
- // Next, see if User is allowed to Manage Role\r
- Result<NsDAO.Data> rnsd;\r
- if (!fromApproval) {\r
- rnsd = q.mayUser(trans, user, role, Access.write);\r
- if (rnsd.notOK()) {\r
- return Result.err(rnsd);\r
- }\r
- }\r
-\r
- // Are there any Users Attached to Role?\r
- Result<List<UserRoleDAO.Data>> urdr = q.userRoleDAO.readByRole(trans,role.fullName());\r
- if (force) {\r
- if (urdr.isOKhasData()) {\r
- for (UserRoleDAO.Data urd : urdr.value) {\r
- q.userRoleDAO.delete(trans, urd, false);\r
- }\r
- }\r
- } else if (urdr.isOKhasData()) {\r
- return Result.err(Status.ERR_DependencyExists,\r
- "Role [%s.%s] cannot be deleted as it is used by 1 or more Users.",\r
- role.ns, role.name);\r
- }\r
-\r
- // Does Role exist?\r
- Result<List<RoleDAO.Data>> rdr = q.roleDAO.read(trans, role);\r
- if (rdr.notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_RoleNotFound,\r
- "Role [%s.%s] does not exist", role.ns, role.name);\r
- }\r
- RoleDAO.Data fullrole = rdr.value.get(0); // full key search\r
-\r
- // Remove Self from Permissions... always, force or not. Force only applies to Dependencies (Users)\r
- if (fullrole.perms != null) {\r
- for (String perm : fullrole.perms(false)) {\r
- Result<PermDAO.Data> rpd = PermDAO.Data.decode(trans,q,perm);\r
- if (rpd.isOK()) {\r
- trans.debug().log("Removing", perm, "from", fullrole,"on Role Delete");\r
-\r
- Result<?> r = q.permDAO.delRole(trans, rpd.value, fullrole);\r
- if (r.notOK()) {\r
- trans.error().log("ERR_FDR1 unable to remove",fullrole,"from",perm,':',r.status,'-',r.details);\r
- }\r
- } else {\r
- trans.error().log("ERR_FDR2 Could not remove",perm,"from",fullrole);\r
- }\r
- }\r
- }\r
- return q.roleDAO.delete(trans, fullrole, false);\r
- }\r
-\r
- /**\r
- * Only owner of Permission may add to Role\r
- * \r
- * If force set, however, Role will be created before Grant, if User is\r
- * allowed to create.\r
- * \r
- * @param trans\r
- * @param role\r
- * @param pd\r
- * @return\r
- */\r
- public Result<Void> addPermToRole(AuthzTrans trans, RoleDAO.Data role,PermDAO.Data pd, boolean fromApproval) {\r
- String user = trans.user();\r
- \r
- if (!fromApproval) {\r
- Result<NsDAO.Data> rRoleCo = q.deriveFirstNsForType(trans, role.ns, NsType.COMPANY);\r
- if(rRoleCo.notOK()) {\r
- return Result.err(rRoleCo);\r
- }\r
- Result<NsDAO.Data> rPermCo = q.deriveFirstNsForType(trans, pd.ns, NsType.COMPANY);\r
- if(rPermCo.notOK()) {\r
- return Result.err(rPermCo);\r
- }\r
-\r
- // Not from same company\r
- if(!rRoleCo.value.name.equals(rPermCo.value.name)) {\r
- Result<Data> r;\r
- // Only grant if User ALSO has Write ability in Other Company\r
- if((r = q.mayUser(trans, user, role, Access.write)).notOK()) {\r
- return Result.err(r);\r
- }\r
- }\r
- \r
-\r
- // Must be Perm Admin, or Granted Special Permission\r
- Result<NsDAO.Data> ucp = q.mayUser(trans, user, pd, Access.write);\r
- if (ucp.notOK()) {\r
- // Don't allow CLI potential Grantees to change their own AAF\r
- // Perms,\r
- if ((Define.ROOT_NS.equals(pd.ns) && Question.NS.equals(pd.type)) \r
- || !q.isGranted(trans, trans.user(),Define.ROOT_NS,Question.PERM, rPermCo.value.name, "grant")) {\r
- // Not otherwise granted\r
- // TODO Needed?\r
- return Result.err(ucp);\r
- }\r
- // Final Check... Don't allow Grantees to add to Roles they are\r
- // part of\r
- Result<List<UserRoleDAO.Data>> rlurd = q.userRoleDAO\r
- .readByUser(trans, trans.user());\r
- if (rlurd.isOK()) {\r
- for (UserRoleDAO.Data ur : rlurd.value) {\r
- if (role.ns.equals(ur.ns) && role.name.equals(ur.rname)) {\r
- return Result.err(ucp);\r
- }\r
- }\r
- }\r
- }\r
- }\r
-\r
- Result<List<PermDAO.Data>> rlpd = q.permDAO.read(trans, pd);\r
- if (rlpd.notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_PermissionNotFound,\r
- "Permission must exist to add to Role");\r
- }\r
-\r
- Result<List<RoleDAO.Data>> rlrd = q.roleDAO.read(trans, role); // Already\r
- // Checked\r
- // for\r
- // can\r
- // change\r
- // Role\r
- Result<Void> rv;\r
-\r
- if (rlrd.notOKorIsEmpty()) {\r
- if (trans.forceRequested()) {\r
- Result<NsDAO.Data> ucr = q.mayUser(trans, user, role,\r
- Access.write);\r
- if (ucr.notOK()) {\r
- return Result\r
- .err(Status.ERR_Denied,\r
- "Role [%s.%s] does not exist. User [%s] cannot create.",\r
- role.ns, role.name, user);\r
- }\r
-\r
- role.perms(true).add(pd.encode());\r
- Result<RoleDAO.Data> rdd = q.roleDAO.create(trans, role);\r
- if (rdd.isOK()) {\r
- rv = Result.ok();\r
- } else {\r
- rv = Result.err(rdd);\r
- }\r
- } else {\r
- return Result.err(Status.ERR_RoleNotFound,\r
- "Role [%s.%s] does not exist.", role.ns, role.name);\r
- }\r
- } else {\r
- role = rlrd.value.get(0);\r
- if (role.perms(false).contains(pd.encode())) {\r
- return Result.err(Status.ERR_ConflictAlreadyExists,\r
- "Permission [%s.%s] is already a member of role [%s,%s]",\r
- pd.ns, pd.type, role.ns, role.name);\r
- }\r
- role.perms(true).add(pd.encode()); // this is added for Caching\r
- // access purposes... doesn't\r
- // affect addPerm\r
- rv = q.roleDAO.addPerm(trans, role, pd);\r
- }\r
- if (rv.status == Status.OK) {\r
- return q.permDAO.addRole(trans, pd, role);\r
- // exploring how to add information message to successful http\r
- // request\r
- }\r
- return rv;\r
- }\r
-\r
- /**\r
- * Either Owner of Role or Permission may delete from Role\r
- * \r
- * @param trans\r
- * @param role\r
- * @param pd\r
- * @return\r
- */\r
- public Result<Void> delPermFromRole(AuthzTrans trans, RoleDAO.Data role,PermDAO.Data pd, boolean fromApproval) {\r
- String user = trans.user();\r
- if (!fromApproval) {\r
- Result<NsDAO.Data> ucr = q.mayUser(trans, user, role, Access.write);\r
- Result<NsDAO.Data> ucp = q.mayUser(trans, user, pd, Access.write);\r
-\r
- // If Can't change either Role or Perm, then deny\r
- if (ucr.notOK() && ucp.notOK()) {\r
- return Result.err(Status.ERR_Denied,\r
- "User [" + trans.user()\r
- + "] does not have permission to delete ["\r
- + pd.encode() + "] from Role ["\r
- + role.fullName() + ']');\r
- }\r
- }\r
-\r
- Result<List<RoleDAO.Data>> rlr = q.roleDAO.read(trans, role);\r
- if (rlr.notOKorIsEmpty()) {\r
- // If Bad Data, clean out\r
- Result<List<PermDAO.Data>> rlp = q.permDAO.read(trans, pd);\r
- if (rlp.isOKhasData()) {\r
- for (PermDAO.Data pv : rlp.value) {\r
- q.permDAO.delRole(trans, pv, role);\r
- }\r
- }\r
- return Result.err(rlr);\r
- }\r
- String perm1 = pd.encode();\r
- boolean notFound;\r
- if (trans.forceRequested()) {\r
- notFound = false;\r
- } else { // only check if force not set.\r
- notFound = true;\r
- for (RoleDAO.Data r : rlr.value) {\r
- if (r.perms != null) {\r
- for (String perm : r.perms) {\r
- if (perm1.equals(perm)) {\r
- notFound = false;\r
- break;\r
- }\r
- }\r
- if(!notFound) {\r
- break;\r
- }\r
- }\r
- }\r
- }\r
- if (notFound) { // Need to check both, in case of corruption\r
- return Result.err(Status.ERR_PermissionNotFound,\r
- "Permission [%s.%s|%s|%s] not associated with any Role",\r
- pd.ns,pd.type,pd.instance,pd.action);\r
- }\r
-\r
- // Read Perm for full data\r
- Result<List<PermDAO.Data>> rlp = q.permDAO.read(trans, pd);\r
- Result<Void> rv = null;\r
- if (rlp.isOKhasData()) {\r
- for (PermDAO.Data pv : rlp.value) {\r
- if ((rv = q.permDAO.delRole(trans, pv, role)).isOK()) {\r
- if ((rv = q.roleDAO.delPerm(trans, role, pv)).notOK()) {\r
- trans.error().log(\r
- "Error removing Perm during delFromPermRole:",\r
- trans.getUserPrincipal(), rv.errorString());\r
- }\r
- } else {\r
- trans.error().log(\r
- "Error removing Role during delFromPermRole:",\r
- trans.getUserPrincipal(), rv.errorString());\r
- }\r
- }\r
- } else {\r
- rv = q.roleDAO.delPerm(trans, role, pd);\r
- if (rv.notOK()) {\r
- trans.error().log("Error removing Role during delFromPermRole",\r
- rv.errorString());\r
- }\r
- }\r
- return rv == null ? Result.ok() : rv;\r
- }\r
-\r
- public Result<Void> delPermFromRole(AuthzTrans trans, String role,PermDAO.Data pd) {\r
- Result<NsSplit> nss = q.deriveNsSplit(trans, role);\r
- if (nss.notOK()) {\r
- return Result.err(nss);\r
- }\r
- RoleDAO.Data rd = new RoleDAO.Data();\r
- rd.ns = nss.value.ns;\r
- rd.name = nss.value.name;\r
- return delPermFromRole(trans, rd, pd, false);\r
- }\r
-\r
- /**\r
- * Add a User to Role\r
- * \r
- * 1) Role must exist 2) User must be a known Credential (i.e. mechID ok if\r
- * Credential) or known Organizational User\r
- * \r
- * @param trans\r
- * @param org\r
- * @param urData\r
- * @return\r
- * @throws DAOException\r
- */\r
- public Result<Void> addUserRole(AuthzTrans trans,UserRoleDAO.Data urData) {\r
- Result<Void> rv;\r
- if(Question.ADMIN.equals(urData.rname)) {\r
- rv = mayAddAdmin(trans, urData.ns, urData.user);\r
- } else if(Question.OWNER.equals(urData.rname)) {\r
- rv = mayAddOwner(trans, urData.ns, urData.user);\r
- } else {\r
- rv = checkValidID(trans, new Date(), urData.user);\r
- }\r
- if(rv.notOK()) {\r
- return rv; \r
- }\r
- \r
- // Check if record exists\r
- if (q.userRoleDAO.read(trans, urData).isOKhasData()) {\r
- return Result.err(Status.ERR_ConflictAlreadyExists,\r
- "User Role exists");\r
- }\r
- if (q.roleDAO.read(trans, urData.ns, urData.rname).notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_RoleNotFound,\r
- "Role [%s.%s] does not exist", urData.ns, urData.rname);\r
- }\r
-\r
- urData.expires = trans.org().expiration(null, Expiration.UserInRole, urData.user).getTime();\r
- \r
- \r
- Result<UserRoleDAO.Data> udr = q.userRoleDAO.create(trans, urData);\r
- switch (udr.status) {\r
- case OK:\r
- return Result.ok();\r
- default:\r
- return Result.err(udr);\r
- }\r
- }\r
-\r
- public Result<Void> addUserRole(AuthzTrans trans, String user, String ns, String rname) {\r
- UserRoleDAO.Data urdd = new UserRoleDAO.Data();\r
- urdd.ns = ns;\r
- urdd.role(ns, rname);\r
- urdd.user = user;\r
- return addUserRole(trans,urdd);\r
- }\r
-\r
- /**\r
- * Extend User Role.\r
- * \r
- * extend the Expiration data, according to Organization rules.\r
- * \r
- * @param trans\r
- * @param org\r
- * @param urData\r
- * @return\r
- */\r
- public Result<Void> extendUserRole(AuthzTrans trans, UserRoleDAO.Data urData, boolean checkForExist) {\r
- // Check if record still exists\r
- if (checkForExist && q.userRoleDAO.read(trans, urData).notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_UserRoleNotFound,\r
- "User Role does not exist");\r
- }\r
- if (q.roleDAO.read(trans, urData.ns, urData.rname).notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_RoleNotFound,\r
- "Role [%s.%s] does not exist", urData.ns,urData.rname);\r
- }\r
- // Special case for "Admin" roles. Issue brought forward with Prod\r
- // problem 9/26\r
-\r
- urData.expires = trans.org().expiration(null, Expiration.UserInRole).getTime(); // get\r
- // Full\r
- // time\r
- // starting\r
- // today\r
- return q.userRoleDAO.update(trans, urData);\r
- }\r
-\r
- // ////////////////////////////////////////////////////\r
- // Special User Role Functions\r
- // These exist, because User Roles have Expiration dates, which must be\r
- // accounted for\r
- // Also, as of July, 2015, Namespace Owners and Admins are now regular User\r
- // Roles\r
- // ////////////////////////////////////////////////////\r
- public Result<List<String>> getUsersByRole(AuthzTrans trans, String role, boolean includeExpired) {\r
- Result<List<UserRoleDAO.Data>> rurdd = q.userRoleDAO.readByRole(trans,role);\r
- if (rurdd.notOK()) {\r
- return Result.err(rurdd);\r
- }\r
- Date now = new Date();\r
- List<UserRoleDAO.Data> list = rurdd.value;\r
- List<String> rv = new ArrayList<String>(list.size()); // presize\r
- for (UserRoleDAO.Data urdd : rurdd.value) {\r
- if (includeExpired || urdd.expires.after(now)) {\r
- rv.add(urdd.user);\r
- }\r
- }\r
- return Result.ok(rv);\r
- }\r
-\r
- public Result<Void> delUserRole(AuthzTrans trans, String user, String ns, String rname) {\r
- UserRoleDAO.Data urdd = new UserRoleDAO.Data();\r
- urdd.user = user;\r
- urdd.role(ns,rname);\r
- Result<List<UserRoleDAO.Data>> r = q.userRoleDAO.read(trans, urdd);\r
- if (r.status == 404 || r.isEmpty()) {\r
- return Result.err(Status.ERR_UserRoleNotFound,\r
- "UserRole [%s] [%s.%s]", user, ns, rname);\r
- }\r
- if (r.notOK()) {\r
- return Result.err(r);\r
- }\r
-\r
- return q.userRoleDAO.delete(trans, urdd, false);\r
- }\r
-\r
- public Result<List<Identity>> createFuture(AuthzTrans trans, FutureDAO.Data data, String id, String user,\r
- NsDAO.Data nsd, String op) {\r
- // Create Future Object\r
- List<Identity> approvers=null;\r
- Result<FutureDAO.Data> fr = q.futureDAO.create(trans, data, id);\r
- if (fr.isOK()) {\r
- // User Future ID as ticket for Approvals\r
- final UUID ticket = fr.value.id;\r
- ApprovalDAO.Data ad;\r
- try {\r
- Organization org = trans.org();\r
- approvers = org.getApprovers(trans, user);\r
- for (Identity u : approvers) {\r
- ad = new ApprovalDAO.Data();\r
- // Note ad.id is set by ApprovalDAO Create\r
- ad.ticket = ticket;\r
- ad.user = user;\r
- ad.approver = u.id();\r
- ad.status = ApprovalDAO.PENDING;\r
- ad.memo = data.memo;\r
- ad.type = org.getApproverType();\r
- ad.operation = op;\r
- // Note ad.updated is created in System\r
- Result<ApprovalDAO.Data> ar = q.approvalDAO.create(trans,ad);\r
- if (ar.notOK()) {\r
- return Result.err(Status.ERR_ActionNotCompleted,\r
- "Approval for %s, %s could not be created: %s",\r
- ad.user, ad.approver, ar.details);\r
- }\r
- }\r
- if (nsd != null) {\r
- Result<List<UserRoleDAO.Data>> rrbr = q.userRoleDAO\r
- .readByRole(trans, nsd.name + Question.DOT_OWNER);\r
- if (rrbr.isOK()) {\r
- for (UserRoleDAO.Data urd : rrbr.value) {\r
- ad = new ApprovalDAO.Data();\r
- // Note ad.id is set by ApprovalDAO Create\r
- ad.ticket = ticket;\r
- ad.user = user;\r
- ad.approver = urd.user;\r
- ad.status = ApprovalDAO.PENDING;\r
- ad.memo = data.memo;\r
- ad.type = "owner";\r
- ad.operation = op;\r
- // Note ad.updated is created in System\r
- Result<ApprovalDAO.Data> ar = q.approvalDAO.create(trans, ad);\r
- if (ar.notOK()) {\r
- return Result.err(Status.ERR_ActionNotCompleted,\r
- "Approval for %s, %s could not be created: %s",\r
- ad.user, ad.approver,\r
- ar.details);\r
- }\r
- }\r
- }\r
- }\r
- } catch (Exception e) {\r
- return Result.err(e);\r
- }\r
- }\r
- \r
- return Result.ok(approvers);\r
- }\r
-\r
- public Result<Void> performFutureOp(AuthzTrans trans, ApprovalDAO.Data cd) {\r
- Result<List<FutureDAO.Data>> fd = q.futureDAO.read(trans, cd.ticket);\r
- Result<List<ApprovalDAO.Data>> allApprovalsForTicket = q.approvalDAO\r
- .readByTicket(trans, cd.ticket);\r
- Result<Void> rv = Result.ok();\r
- for (FutureDAO.Data curr : fd.value) {\r
- if ("approved".equalsIgnoreCase(cd.status)) {\r
- if (allApprovalsForTicket.value.size() <= 1) {\r
- // should check if any other pendings before performing\r
- // actions\r
- try {\r
- if (FOP_ROLE.equalsIgnoreCase(curr.target)) {\r
- RoleDAO.Data data = new RoleDAO.Data();\r
- data.reconstitute(curr.construct);\r
- if ("C".equalsIgnoreCase(cd.operation)) {\r
- Result<RoleDAO.Data> rd;\r
- if ((rd = q.roleDAO.dao().create(trans, data)).notOK()) {\r
- rv = Result.err(rd);\r
- }\r
- } else if ("D".equalsIgnoreCase(cd.operation)) {\r
- rv = deleteRole(trans, data, true, true);\r
- }\r
- \r
- } else if (FOP_PERM.equalsIgnoreCase(curr.target)) {\r
- PermDAO.Data pdd = new PermDAO.Data();\r
- pdd.reconstitute(curr.construct);\r
- if ("C".equalsIgnoreCase(cd.operation)) {\r
- rv = createPerm(trans, pdd, true);\r
- } else if ("D".equalsIgnoreCase(cd.operation)) {\r
- rv = deletePerm(trans, pdd, true, true);\r
- } else if ("G".equalsIgnoreCase(cd.operation)) {\r
- Set<String> roles = pdd.roles(true);\r
- Result<RoleDAO.Data> rrdd = null;\r
- for (String roleStr : roles) {\r
- rrdd = RoleDAO.Data.decode(trans, q, roleStr);\r
- if (rrdd.isOKhasData()) {\r
- rv = addPermToRole(trans, rrdd.value, pdd, true);\r
- } else {\r
- trans.error().log(rrdd.errorString());\r
- }\r
- }\r
- } else if ("UG".equalsIgnoreCase(cd.operation)) {\r
- Set<String> roles = pdd.roles(true);\r
- Result<RoleDAO.Data> rrdd;\r
- for (String roleStr : roles) {\r
- rrdd = RoleDAO.Data.decode(trans, q, roleStr);\r
- if (rrdd.isOKhasData()) {\r
- rv = delPermFromRole(trans, rrdd.value, pdd, true);\r
- } else {\r
- trans.error().log(rrdd.errorString());\r
- }\r
- }\r
- }\r
- \r
- } else if (FOP_USER_ROLE.equalsIgnoreCase(curr.target)) {\r
- UserRoleDAO.Data data = new UserRoleDAO.Data();\r
- data.reconstitute(curr.construct);\r
- // if I am the last to approve, create user role\r
- if ("C".equalsIgnoreCase(cd.operation)) {\r
- rv = addUserRole(trans, data);\r
- } else if ("U".equals(cd.operation)) {\r
- rv = extendUserRole(trans, data, true);\r
- }\r
- \r
- } else if (FOP_NS.equalsIgnoreCase(curr.target)) {\r
- Namespace namespace = new Namespace();\r
- namespace.reconstitute(curr.construct);\r
- \r
- if ("C".equalsIgnoreCase(cd.operation)) {\r
- rv = createNS(trans, namespace, true);\r
- }\r
- \r
- } else if (FOP_DELEGATE.equalsIgnoreCase(curr.target)) {\r
- DelegateDAO.Data data = new DelegateDAO.Data();\r
- data.reconstitute(curr.construct);\r
- if ("C".equalsIgnoreCase(cd.operation)) {\r
- Result<DelegateDAO.Data> dd;\r
- if ((dd = q.delegateDAO.create(trans, data)).notOK()) {\r
- rv = Result.err(dd);\r
- }\r
- } else if ("U".equalsIgnoreCase(cd.operation)) {\r
- rv = q.delegateDAO.update(trans, data);\r
- }\r
- } else if (FOP_CRED.equalsIgnoreCase(curr.target)) {\r
- CredDAO.Data data = new CredDAO.Data();\r
- data.reconstitute(curr.construct);\r
- if ("C".equalsIgnoreCase(cd.operation)) {\r
- Result<CredDAO.Data> rd;\r
- if ((rd = q.credDAO.dao().create(trans, data)).notOK()) {\r
- rv = Result.err(rd);\r
- }\r
- }\r
- }\r
- } catch (IOException e) {\r
- trans.error().log("IOException: ", e.getMessage(),\r
- " \n occurred while performing", cd.memo,\r
- " from approval ", cd.id.toString());\r
- }\r
- }\r
- } else if ("denied".equalsIgnoreCase(cd.status)) {\r
- for (ApprovalDAO.Data ad : allApprovalsForTicket.value) {\r
- q.approvalDAO.delete(trans, ad, false);\r
- }\r
- q.futureDAO.delete(trans, curr, false);\r
- if (FOP_USER_ROLE.equalsIgnoreCase(curr.target)) {\r
- // if I am the last to approve, create user role\r
- if ("U".equals(cd.operation)) {\r
- UserRoleDAO.Data data = new UserRoleDAO.Data();\r
- try {\r
- data.reconstitute(curr.construct);\r
- } catch (IOException e) {\r
- trans.error().log("Cannot reconstitue",curr.memo);\r
- }\r
- rv = delUserRole(trans, data.user, data.ns, data.rname);\r
- }\r
- }\r
-\r
- }\r
- \r
- // if I am the last to approve, delete the future object\r
- if (rv.isOK() && allApprovalsForTicket.value.size() <= 1) {\r
- q.futureDAO.delete(trans, curr, false);\r
- }\r
- \r
- } // end for each\r
- return rv;\r
- \r
- }\r
-\r
- public Executor newExecutor(AuthzTrans trans) {\r
- return new CassExecutor(trans, this);\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.hl;\r
-\r
-import java.util.ArrayList;\r
-import java.util.Date;\r
-import java.util.HashMap;\r
-import java.util.List;\r
-import java.util.Map;\r
-import java.util.Set;\r
-import java.util.TreeSet;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-import org.onap.aaf.dao.aaf.cass.UserRoleDAO;\r
-\r
-/**\r
- * PermLookup is a Storage class for the various pieces of looking up Permission \r
- * during Transactions to avoid duplicate processing\r
- * \r
- *\r
- */\r
-// Package on purpose\r
-class PermLookup {\r
- private AuthzTrans trans;\r
- private String user;\r
- private Question q;\r
- private Result<List<UserRoleDAO.Data>> userRoles = null;\r
- private Result<List<RoleDAO.Data>> roles = null;\r
- private Result<Set<String>> permNames = null;\r
- private Result<List<PermDAO.Data>> perms = null;\r
- \r
- private PermLookup() {}\r
- \r
- static PermLookup get(AuthzTrans trans, Question q, String user) {\r
- PermLookup lp=null;\r
- Map<String, PermLookup> permMap = trans.get(Question.PERMS, null);\r
- if (permMap == null) {\r
- trans.put(Question.PERMS, permMap = new HashMap<String, PermLookup>());\r
- } else {\r
- lp = permMap.get(user);\r
- }\r
-\r
- if (lp == null) {\r
- lp = new PermLookup();\r
- lp.trans = trans;\r
- lp.user = user;\r
- lp.q = q;\r
- permMap.put(user, lp);\r
- }\r
- return lp;\r
- }\r
- \r
- public Result<List<UserRoleDAO.Data>> getUserRoles() {\r
- if(userRoles==null) {\r
- userRoles = q.userRoleDAO.readByUser(trans,user);\r
- if(userRoles.isOKhasData()) {\r
- List<UserRoleDAO.Data> lurdd = new ArrayList<UserRoleDAO.Data>();\r
- Date now = new Date();\r
- for(UserRoleDAO.Data urdd : userRoles.value) {\r
- if(urdd.expires.after(now)) { // Remove Expired\r
- lurdd.add(urdd);\r
- }\r
- }\r
- if(lurdd.size()==0) {\r
- return userRoles = Result.err(Status.ERR_UserNotFound,\r
- "%s not found or not associated with any Roles: ",\r
- user);\r
- } else {\r
- return userRoles = Result.ok(lurdd);\r
- }\r
- } else {\r
- return userRoles;\r
- }\r
- } else {\r
- return userRoles;\r
- }\r
- }\r
-\r
- public Result<List<RoleDAO.Data>> getRoles() {\r
- if(roles==null) {\r
- Result<List<UserRoleDAO.Data>> rur = getUserRoles();\r
- if(rur.isOK()) {\r
- List<RoleDAO.Data> lrdd = new ArrayList<RoleDAO.Data>();\r
- for (UserRoleDAO.Data urdata : rur.value) {\r
- // Gather all permissions from all Roles\r
- if(urdata.ns==null || urdata.rname==null) {\r
- trans.error().printf("DB Content Error: nulls in User Role %s %s", urdata.user,urdata.role);\r
- } else {\r
- Result<List<RoleDAO.Data>> rlrd = q.roleDAO.read(\r
- trans, urdata.ns, urdata.rname);\r
- if(rlrd.isOK()) {\r
- lrdd.addAll(rlrd.value);\r
- }\r
- }\r
- }\r
- return roles = Result.ok(lrdd);\r
- } else {\r
- return roles = Result.err(rur);\r
- }\r
- } else {\r
- return roles;\r
- }\r
- }\r
-\r
- public Result<Set<String>> getPermNames() {\r
- if(permNames==null) {\r
- Result<List<RoleDAO.Data>> rlrd = getRoles();\r
- if (rlrd.isOK()) {\r
- Set<String> pns = new TreeSet<String>();\r
- for (RoleDAO.Data rdata : rlrd.value) {\r
- pns.addAll(rdata.perms(false));\r
- }\r
- return permNames = Result.ok(pns);\r
- } else {\r
- return permNames = Result.err(rlrd);\r
- }\r
- } else {\r
- return permNames;\r
- }\r
- }\r
- \r
- public Result<List<PermDAO.Data>> getPerms(boolean lookup) {\r
- if(perms==null) {\r
- // Note: It should be ok for a Valid user to have no permissions -\r
- // 8/12/2013\r
- Result<Set<String>> rss = getPermNames();\r
- if(rss.isOK()) {\r
- List<PermDAO.Data> lpdd = new ArrayList<PermDAO.Data>();\r
- for (String perm : rss.value) {\r
- if(lookup) {\r
- Result<String[]> ap = PermDAO.Data.decodeToArray(trans, q, perm);\r
- if(ap.isOK()) {\r
- Result<List<PermDAO.Data>> rlpd = q.permDAO.read(perm,trans,ap);\r
- if (rlpd.isOKhasData()) {\r
- for (PermDAO.Data pData : rlpd.value) {\r
- lpdd.add(pData);\r
- }\r
- }\r
- } else {\r
- trans.error().log("In getPermsByUser, for", user, perm);\r
- }\r
- } else {\r
- Result<PermDAO.Data> pr = PermDAO.Data.decode(trans, q, perm);\r
- if (pr.notOK()) {\r
- trans.error().log("In getPermsByUser, for", user, pr.errorString());\r
- } else {\r
- lpdd.add(pr.value);\r
- }\r
- }\r
-\r
- }\r
- return perms = Result.ok(lpdd);\r
- } else {\r
- return perms = Result.err(rss);\r
- }\r
- } else {\r
- return perms;\r
- }\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.hl;\r
-\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.security.NoSuchAlgorithmException;\r
-import java.security.SecureRandom;\r
-import java.util.ArrayList;\r
-import java.util.Collections;\r
-import java.util.Comparator;\r
-import java.util.Date;\r
-import java.util.HashSet;\r
-import java.util.List;\r
-import java.util.Set;\r
-import java.util.TreeSet;\r
-\r
-import org.onap.aaf.authz.common.Define;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.env.AuthzTransFilter;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.org.Organization;\r
-import org.onap.aaf.authz.org.Organization.Identity;\r
-import org.onap.aaf.dao.AbsCassDAO;\r
-import org.onap.aaf.dao.CachedDAO;\r
-import org.onap.aaf.dao.DAOException;\r
-import org.onap.aaf.dao.aaf.cached.CachedCertDAO;\r
-import org.onap.aaf.dao.aaf.cached.CachedCredDAO;\r
-import org.onap.aaf.dao.aaf.cached.CachedNSDAO;\r
-import org.onap.aaf.dao.aaf.cached.CachedPermDAO;\r
-import org.onap.aaf.dao.aaf.cached.CachedRoleDAO;\r
-import org.onap.aaf.dao.aaf.cached.CachedUserRoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.ApprovalDAO;\r
-import org.onap.aaf.dao.aaf.cass.CacheInfoDAO;\r
-import org.onap.aaf.dao.aaf.cass.CertDAO;\r
-import org.onap.aaf.dao.aaf.cass.CredDAO;\r
-import org.onap.aaf.dao.aaf.cass.DelegateDAO;\r
-import org.onap.aaf.dao.aaf.cass.FutureDAO;\r
-import org.onap.aaf.dao.aaf.cass.HistoryDAO;\r
-import org.onap.aaf.dao.aaf.cass.NsDAO;\r
-import org.onap.aaf.dao.aaf.cass.NsSplit;\r
-import org.onap.aaf.dao.aaf.cass.NsType;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-import org.onap.aaf.dao.aaf.cass.UserRoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.NsDAO.Data;\r
-\r
-import org.onap.aaf.cadi.Hash;\r
-import org.onap.aaf.cadi.aaf.PermEval;\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.Slot;\r
-import org.onap.aaf.inno.env.TimeTaken;\r
-import org.onap.aaf.inno.env.util.Chrono;\r
-import com.datastax.driver.core.Cluster;\r
-\r
-/**\r
- * Question HL DAO\r
- * \r
- * A Data Access Combination Object which asks Security and other Questions\r
- * \r
- *\r
- */\r
-public class Question {\r
- // DON'T CHANGE FROM lower Case!!!\r
- public static enum Type {\r
- ns, role, perm, cred\r
- };\r
-\r
- public static final String OWNER="owner";\r
- public static final String ADMIN="admin";\r
- public static final String DOT_OWNER=".owner";\r
- public static final String DOT_ADMIN=".admin";\r
- static final String ASTERIX = "*";\r
-\r
- public static enum Access {\r
- read, write, create\r
- };\r
-\r
- public static final String READ = Access.read.name();\r
- public static final String WRITE = Access.write.name();\r
- public static final String CREATE = Access.create.name();\r
-\r
- public static final String ROLE = Type.role.name();\r
- public static final String PERM = Type.perm.name();\r
- public static final String NS = Type.ns.name();\r
- public static final String CRED = Type.cred.name();\r
- private static final String DELG = "delg";\r
- public static final String ATTRIB = "attrib";\r
-\r
-\r
- public static final int MAX_SCOPE = 10;\r
- public static final int APP_SCOPE = 3;\r
- public static final int COMPANY_SCOPE = 2;\r
- static Slot PERMS;\r
-\r
- private static Set<String> specialLog = null;\r
- public static final SecureRandom random = new SecureRandom();\r
- private static long traceID = random.nextLong();\r
- private static final String SPECIAL_LOG_SLOT = "SPECIAL_LOG_SLOT";\r
- private static Slot specialLogSlot = null;\r
- private static Slot transIDSlot = null;\r
-\r
-\r
- public final HistoryDAO historyDAO;\r
- public final CachedNSDAO nsDAO;\r
- public final CachedRoleDAO roleDAO;\r
- public final CachedPermDAO permDAO;\r
- public final CachedUserRoleDAO userRoleDAO;\r
- public final CachedCredDAO credDAO;\r
- public final CachedCertDAO certDAO;\r
- public final DelegateDAO delegateDAO;\r
- public final FutureDAO futureDAO;\r
- public final ApprovalDAO approvalDAO;\r
- private final CacheInfoDAO cacheInfoDAO;\r
-\r
- // final ContactDAO contDAO;\r
- // private static final String DOMAIN = "@aaf.att.com";\r
- // private static final int DOMAIN_LENGTH = 0;\r
-\r
- public Question(AuthzTrans trans, Cluster cluster, String keyspace, boolean startClean) throws APIException, IOException {\r
- PERMS = trans.slot("USER_PERMS");\r
- trans.init().log("Instantiating DAOs");\r
- historyDAO = new HistoryDAO(trans, cluster, keyspace);\r
-\r
- // Deal with Cached Entries\r
- cacheInfoDAO = new CacheInfoDAO(trans, historyDAO);\r
-\r
- nsDAO = new CachedNSDAO(new NsDAO(trans, historyDAO, cacheInfoDAO),\r
- cacheInfoDAO);\r
- permDAO = new CachedPermDAO(\r
- new PermDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO);\r
- roleDAO = new CachedRoleDAO(\r
- new RoleDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO);\r
- userRoleDAO = new CachedUserRoleDAO(new UserRoleDAO(trans, historyDAO,\r
- cacheInfoDAO), cacheInfoDAO);\r
- credDAO = new CachedCredDAO(\r
- new CredDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO);\r
- certDAO = new CachedCertDAO(\r
- new CertDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO);\r
-\r
- futureDAO = new FutureDAO(trans, historyDAO);\r
- delegateDAO = new DelegateDAO(trans, historyDAO);\r
- approvalDAO = new ApprovalDAO(trans, historyDAO);\r
-\r
- // Only want to aggressively cleanse User related Caches... The others,\r
- // just normal refresh\r
- if(startClean) {\r
- CachedDAO.startCleansing(trans.env(), credDAO, userRoleDAO);\r
- CachedDAO.startRefresh(trans.env(), cacheInfoDAO);\r
- }\r
- // Set a Timer to Check Caches to send messages for Caching changes\r
- \r
- if(specialLogSlot==null) {\r
- specialLogSlot = trans.slot(SPECIAL_LOG_SLOT);\r
- transIDSlot = trans.slot(AuthzTransFilter.TRANS_ID_SLOT);\r
- }\r
- \r
- AbsCassDAO.primePSIs(trans);\r
- }\r
-\r
-\r
- public void close(AuthzTrans trans) {\r
- historyDAO.close(trans);\r
- cacheInfoDAO.close(trans);\r
- nsDAO.close(trans);\r
- permDAO.close(trans);\r
- roleDAO.close(trans);\r
- userRoleDAO.close(trans);\r
- credDAO.close(trans);\r
- certDAO.close(trans);\r
- delegateDAO.close(trans);\r
- futureDAO.close(trans);\r
- approvalDAO.close(trans);\r
- }\r
-\r
- public Result<PermDAO.Data> permFrom(AuthzTrans trans, String type,\r
- String instance, String action) {\r
- Result<NsDAO.Data> rnd = deriveNs(trans, type);\r
- if (rnd.isOK()) {\r
- return Result.ok(new PermDAO.Data(new NsSplit(rnd.value, type),\r
- instance, action));\r
- } else {\r
- return Result.err(rnd);\r
- }\r
- }\r
-\r
- /**\r
- * getPermsByUser\r
- * \r
- * Because this call is frequently called internally, AND because we already\r
- * look for it in the initial Call, we cache within the Transaction\r
- * \r
- * @param trans\r
- * @param user\r
- * @return\r
- */\r
- public Result<List<PermDAO.Data>> getPermsByUser(AuthzTrans trans, String user, boolean lookup) {\r
- return PermLookup.get(trans, this, user).getPerms(lookup);\r
- }\r
- \r
- public Result<List<PermDAO.Data>> getPermsByUserFromRolesFilter(AuthzTrans trans, String user, String forUser) {\r
- PermLookup plUser = PermLookup.get(trans, this, user);\r
- Result<Set<String>> plPermNames = plUser.getPermNames();\r
- if(plPermNames.notOK()) {\r
- return Result.err(plPermNames);\r
- }\r
- \r
- Set<String> nss;\r
- if(forUser.equals(user)) {\r
- nss = null;\r
- } else {\r
- // Setup a TreeSet to check on Namespaces to \r
- nss = new TreeSet<String>();\r
- PermLookup fUser = PermLookup.get(trans, this, forUser);\r
- Result<Set<String>> forUpn = fUser.getPermNames();\r
- if(forUpn.notOK()) {\r
- return Result.err(forUpn);\r
- }\r
- \r
- for(String pn : forUpn.value) {\r
- Result<String[]> decoded = PermDAO.Data.decodeToArray(trans, this, pn);\r
- if(decoded.isOKhasData()) {\r
- nss.add(decoded.value[0]);\r
- } else {\r
- trans.error().log(pn,", derived from a Role, is invalid:",decoded.errorString());\r
- }\r
- }\r
- }\r
-\r
- List<PermDAO.Data> rlpUser = new ArrayList<PermDAO.Data>();\r
- Result<PermDAO.Data> rpdd;\r
- PermDAO.Data pdd;\r
- for(String pn : plPermNames.value) {\r
- rpdd = PermDAO.Data.decode(trans, this, pn);\r
- if(rpdd.isOKhasData()) {\r
- pdd=rpdd.value;\r
- if(nss==null || nss.contains(pdd.ns)) {\r
- rlpUser.add(pdd);\r
- }\r
- } else {\r
- trans.error().log(pn,", derived from a Role, is invalid. Run Data Cleanup:",rpdd.errorString());\r
- }\r
- }\r
- return Result.ok(rlpUser); \r
- }\r
-\r
- public Result<List<PermDAO.Data>> getPermsByType(AuthzTrans trans, String perm) {\r
- Result<NsSplit> nss = deriveNsSplit(trans, perm);\r
- if (nss.notOK()) {\r
- return Result.err(nss);\r
- }\r
- return permDAO.readByType(trans, nss.value.ns, nss.value.name);\r
- }\r
-\r
- public Result<List<PermDAO.Data>> getPermsByName(AuthzTrans trans,\r
- String type, String instance, String action) {\r
- Result<NsSplit> nss = deriveNsSplit(trans, type);\r
- if (nss.notOK()) {\r
- return Result.err(nss);\r
- }\r
- return permDAO.read(trans, nss.value.ns, nss.value.name, instance,action);\r
- }\r
-\r
- public Result<List<PermDAO.Data>> getPermsByRole(AuthzTrans trans, String role, boolean lookup) {\r
- Result<NsSplit> nss = deriveNsSplit(trans, role);\r
- if (nss.notOK()) {\r
- return Result.err(nss);\r
- }\r
-\r
- Result<List<RoleDAO.Data>> rlrd = roleDAO.read(trans, nss.value.ns,\r
- nss.value.name);\r
- if (rlrd.notOKorIsEmpty()) {\r
- return Result.err(rlrd);\r
- }\r
- // Using Set to avoid duplicates\r
- Set<String> permNames = new HashSet<String>();\r
- if (rlrd.isOKhasData()) {\r
- for (RoleDAO.Data drr : rlrd.value) {\r
- permNames.addAll(drr.perms(false));\r
- }\r
- }\r
-\r
- // Note: It should be ok for a Valid user to have no permissions -\r
- // 8/12/2013\r
- List<PermDAO.Data> perms = new ArrayList<PermDAO.Data>();\r
- for (String perm : permNames) {\r
- Result<PermDAO.Data> pr = PermDAO.Data.decode(trans, this, perm);\r
- if (pr.notOK()) {\r
- return Result.err(pr);\r
- }\r
-\r
- if(lookup) {\r
- Result<List<PermDAO.Data>> rlpd = permDAO.read(trans, pr.value);\r
- if (rlpd.isOKhasData()) {\r
- for (PermDAO.Data pData : rlpd.value) {\r
- perms.add(pData);\r
- }\r
- }\r
- } else {\r
- perms.add(pr.value);\r
- }\r
- }\r
-\r
- return Result.ok(perms);\r
- }\r
-\r
- public Result<List<RoleDAO.Data>> getRolesByName(AuthzTrans trans,\r
- String role) {\r
- Result<NsSplit> nss = deriveNsSplit(trans, role);\r
- if (nss.notOK()) {\r
- return Result.err(nss);\r
- }\r
- String r = nss.value.name;\r
- if (r.endsWith(".*")) { // do children Search\r
- return roleDAO.readChildren(trans, nss.value.ns,\r
- r.substring(0, r.length() - 2));\r
- } else if (ASTERIX.equals(r)) {\r
- return roleDAO.readChildren(trans, nss.value.ns, ASTERIX);\r
- } else {\r
- return roleDAO.read(trans, nss.value.ns, r);\r
- }\r
- }\r
-\r
- /**\r
- * Derive NS\r
- * \r
- * Given a Child Namespace, figure out what the best Namespace parent is.\r
- * \r
- * For instance, if in the NS table, the parent "com.att" exists, but not\r
- * "com.att.child" or "com.att.a.b.c", then passing in either\r
- * "com.att.child" or "com.att.a.b.c" will return "com.att"\r
- * \r
- * Uses recursive search on Cached DAO data\r
- * \r
- * @param trans\r
- * @param child\r
- * @return\r
- */\r
- public Result<NsDAO.Data> deriveNs(AuthzTrans trans, String child) {\r
- Result<List<NsDAO.Data>> r = nsDAO.read(trans, child);\r
- \r
- if (r.isOKhasData()) {\r
- return Result.ok(r.value.get(0));\r
- } else {\r
- int dot = child == null ? -1 : child.lastIndexOf('.');\r
- if (dot < 0) {\r
- return Result.err(Status.ERR_NsNotFound,\r
- "No Namespace for [%s]", child);\r
- } else {\r
- return deriveNs(trans, child.substring(0, dot));\r
- }\r
- }\r
- }\r
-\r
- public Result<NsDAO.Data> deriveFirstNsForType(AuthzTrans trans, String str, NsType type) {\r
- NsDAO.Data nsd;\r
-\r
- System.out.println("value of str before for loop ---------0---++++++++++++++++++" +str);\r
- for(int idx = str.indexOf('.');idx>=0;idx=str.indexOf('.',idx+1)) {\r
- // System.out.println("printing value of str-----------------1------------++++++++++++++++++++++" +str);\r
- Result<List<Data>> rld = nsDAO.read(trans, str.substring(0,idx));\r
- System.out.println("value of idx is -----------------++++++++++++++++++++++++++" +idx);\r
- System.out.println("printing value of str.substring-----------------1------------++++++++++++++++++++++" + (str.substring(0,idx)));\r
- System.out.println("value of ResultListData ------------------2------------+++++++++++++++++++++++++++" +rld);\r
- if(rld.isOKhasData()) {\r
- System.out.println("In if loop -----------------3-------------- ++++++++++++++++");\r
- System.out.println("value of nsd=rld.value.get(0).type -----------4------++++++++++++++++++++++++++++++++++++" +(nsd=rld.value.get(0)).type);\r
- System.out.println("value of rld.value.get(0).name.toString()+++++++++++++++++++++++++++++++ " +rld.value.get(0).name);\r
- if(type.type == (nsd=rld.value.get(0)).type) {\r
- return Result.ok(nsd);\r
- }\r
- } else {\r
- System.out.println("In else loop ----------------4------------+++++++++++++++++++++++");\r
- return Result.err(Status.ERR_NsNotFound,"There is no valid Company Namespace for %s",str.substring(0,idx));\r
- }\r
- }\r
- return Result.err(Status.ERR_NotFound, str + " does not contain type " + type.name());\r
- }\r
-\r
- public Result<NsSplit> deriveNsSplit(AuthzTrans trans, String child) {\r
- Result<NsDAO.Data> ndd = deriveNs(trans, child);\r
- if (ndd.isOK()) {\r
- NsSplit nss = new NsSplit(ndd.value, child);\r
- if (nss.isOK()) {\r
- return Result.ok(nss);\r
- } else {\r
- return Result.err(Status.ERR_NsNotFound,\r
- "Cannot split [%s] into valid namespace elements",\r
- child);\r
- }\r
- }\r
- return Result.err(ndd);\r
- }\r
-\r
- /**\r
- * Translate an ID into it's domain\r
- * \r
- * i.e. myid1234@myapp.att.com results in domain of com.att.myapp\r
- * \r
- * @param id\r
- * @return\r
- */\r
- public static String domain2ns(String id) {\r
- int at = id.indexOf('@');\r
- if (at >= 0) {\r
- String[] domain = id.substring(at + 1).split("\\.");\r
- StringBuilder ns = new StringBuilder(id.length());\r
- boolean first = true;\r
- for (int i = domain.length - 1; i >= 0; --i) {\r
- if (first) {\r
- first = false;\r
- } else {\r
- ns.append('.');\r
- }\r
- ns.append(domain[i]);\r
- }\r
- return ns.toString();\r
- } else {\r
- return "";\r
- }\r
-\r
- }\r
-\r
- /**\r
- * Validate Namespace of ID@Domain\r
- * \r
- * Namespace is reverse order of Domain.\r
- * \r
- * i.e. myid1234@myapp.att.com results in domain of com.att.myapp\r
- * \r
- * @param trans\r
- * @param id\r
- * @return\r
- */\r
- public Result<NsDAO.Data> validNSOfDomain(AuthzTrans trans, String id) {\r
- // Take domain, reverse order, and check on NS\r
- String ns;\r
- if(id.indexOf('@')<0) { // it's already an ns, not an ID\r
- ns = id;\r
- } else {\r
- ns = domain2ns(id);\r
- }\r
- if (ns.length() > 0) {\r
- if(!trans.org().getDomain().equals(ns)) { \r
- Result<List<NsDAO.Data>> rlnsd = nsDAO.read(trans, ns);\r
- if (rlnsd.isOKhasData()) {\r
- return Result.ok(rlnsd.value.get(0));\r
- }\r
- }\r
- }\r
- return Result.err(Status.ERR_NsNotFound,\r
- "A Namespace is not available for %s", id);\r
- }\r
-\r
- public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,NsDAO.Data ndd, Access access) {\r
- // <ns>.access|:role:<role name>|<read|write>\r
- String ns = ndd.name;\r
- int last;\r
- do {\r
- if (isGranted(trans, user, ns, "access", ":ns", access.name())) {\r
- return Result.ok(ndd);\r
- }\r
- if ((last = ns.lastIndexOf('.')) >= 0) {\r
- ns = ns.substring(0, last);\r
- }\r
- } while (last >= 0);\r
- // <root ns>.ns|:<client ns>:ns|<access>\r
- // AAF-724 - Make consistent response for May User", and not take the\r
- // last check... too confusing.\r
- Result<NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd, ":" + ndd.name + ":ns", access.name());\r
- if (rv.isOK()) {\r
- return rv;\r
- } else if(rv.status==Result.ERR_Backend) {\r
- return Result.err(rv);\r
- } else {\r
- return Result.err(Status.ERR_Denied, "[%s] may not %s in NS [%s]",\r
- user, access.name(), ndd.name);\r
- }\r
- }\r
-\r
- public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user, RoleDAO.Data rdd, Access access) {\r
- Result<NsDAO.Data> rnsd = deriveNs(trans, rdd.ns);\r
- if (rnsd.isOK()) {\r
- return mayUser(trans, user, rnsd.value, rdd, access);\r
- }\r
- return rnsd;\r
- }\r
-\r
- public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user, NsDAO.Data ndd, RoleDAO.Data rdd, Access access) {\r
- // 1) Is User in the Role?\r
- Result<List<UserRoleDAO.Data>> rurd = userRoleDAO.readUserInRole(trans, user, rdd.fullName());\r
- if (rurd.isOKhasData()) {\r
- return Result.ok(ndd);\r
- }\r
-\r
- String roleInst = ":role:" + rdd.name;\r
- // <ns>.access|:role:<role name>|<read|write>\r
- String ns = rdd.ns;\r
- int last;\r
- do {\r
- if (isGranted(trans, user, ns,"access", roleInst, access.name())) {\r
- return Result.ok(ndd);\r
- }\r
- if ((last = ns.lastIndexOf('.')) >= 0) {\r
- ns = ns.substring(0, last);\r
- }\r
- } while (last >= 0);\r
-\r
- // Check if Access by Global Role perm\r
- // <root ns>.ns|:<client ns>:role:name|<access>\r
- Result<NsDAO.Data> rnsd = mayUserVirtueOfNS(trans, user, ndd, ":"\r
- + rdd.ns + roleInst, access.name());\r
- if (rnsd.isOK()) {\r
- return rnsd;\r
- } else if(rnsd.status==Result.ERR_Backend) {\r
- return Result.err(rnsd);\r
- }\r
-\r
- // Check if Access to Whole NS\r
- // AAF-724 - Make consistent response for May User", and not take the\r
- // last check... too confusing.\r
- Result<org.onap.aaf.dao.aaf.cass.NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd, \r
- ":" + rdd.ns + ":ns", access.name());\r
- if (rv.isOK()) {\r
- return rv;\r
- } else if(rnsd.status==Result.ERR_Backend) {\r
- return Result.err(rnsd);\r
- } else {\r
- return Result.err(Status.ERR_Denied, "[%s] may not %s Role [%s]",\r
- user, access.name(), rdd.fullName());\r
- }\r
-\r
- }\r
-\r
- public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,PermDAO.Data pdd, Access access) {\r
- Result<NsDAO.Data> rnsd = deriveNs(trans, pdd.ns);\r
- if (rnsd.isOK()) {\r
- return mayUser(trans, user, rnsd.value, pdd, access);\r
- }\r
- return rnsd;\r
- }\r
-\r
- public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,NsDAO.Data ndd, PermDAO.Data pdd, Access access) {\r
- if (isGranted(trans, user, pdd.ns, pdd.type, pdd.instance, pdd.action)) {\r
- return Result.ok(ndd);\r
- }\r
- String permInst = ":perm:" + pdd.type + ':' + pdd.instance + ':' + pdd.action;\r
- // <ns>.access|:role:<role name>|<read|write>\r
- String ns = ndd.name;\r
- int last;\r
- do {\r
- if (isGranted(trans, user, ns, "access", permInst, access.name())) {\r
- return Result.ok(ndd);\r
- }\r
- if ((last = ns.lastIndexOf('.')) >= 0) {\r
- ns = ns.substring(0, last);\r
- }\r
- } while (last >= 0);\r
-\r
- // Check if Access by NS perm\r
- // <root ns>.ns|:<client ns>:role:name|<access>\r
- Result<NsDAO.Data> rnsd = mayUserVirtueOfNS(trans, user, ndd, ":" + pdd.ns + permInst, access.name());\r
- if (rnsd.isOK()) {\r
- return rnsd;\r
- } else if(rnsd.status==Result.ERR_Backend) {\r
- return Result.err(rnsd);\r
- }\r
-\r
- // Check if Access to Whole NS\r
- // AAF-724 - Make consistent response for May User", and not take the\r
- // last check... too confusing.\r
- Result<NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd, ":" + pdd.ns + ":ns", access.name());\r
- if (rv.isOK()) {\r
- return rv;\r
- } else {\r
- return Result.err(Status.ERR_Denied,\r
- "[%s] may not %s Perm [%s|%s|%s]", user, access.name(),\r
- pdd.fullType(), pdd.instance, pdd.action);\r
- }\r
-\r
- }\r
-\r
- public Result<Void> mayUser(AuthzTrans trans, DelegateDAO.Data dd, Access access) {\r
- try {\r
- boolean isUser = trans.user().equals(dd.user);\r
- boolean isDelegate = dd.delegate != null\r
- && (dd.user.equals(dd.delegate) || trans.user().equals(\r
- dd.delegate));\r
- Organization org = trans.org();\r
- switch (access) {\r
- case create:\r
- if (org.getIdentity(trans, dd.user) == null) {\r
- return Result.err(Status.ERR_UserNotFound,\r
- "[%s] is not a user in the company database.",\r
- dd.user);\r
- }\r
- if (!dd.user.equals(dd.delegate) && org.getIdentity(trans, dd.delegate) == null) {\r
- return Result.err(Status.ERR_UserNotFound,\r
- "[%s] is not a user in the company database.",\r
- dd.delegate);\r
- }\r
- if (!trans.forceRequested() && dd.user != null && dd.user.equals(dd.delegate)) {\r
- return Result.err(Status.ERR_BadData,\r
- "[%s] cannot be a delegate for self", dd.user);\r
- }\r
- if (!isUser && !isGranted(trans, trans.user(), Define.ROOT_NS,DELG,\r
- org.getDomain(), Question.CREATE)) {\r
- return Result.err(Status.ERR_Denied,\r
- "[%s] may not create a delegate for [%s]",\r
- trans.user(), dd.user);\r
- }\r
- break;\r
- case read:\r
- case write:\r
- if (!isUser && !isDelegate && \r
- !isGranted(trans, trans.user(), Define.ROOT_NS,DELG,org.getDomain(), access.name())) {\r
- return Result.err(Status.ERR_Denied,\r
- "[%s] may not %s delegates for [%s]", trans.user(),\r
- access.name(), dd.user);\r
- }\r
- break;\r
- default:\r
- return Result.err(Status.ERR_BadData,"Unknown Access type [%s]", access.name());\r
- }\r
- } catch (Exception e) {\r
- return Result.err(e);\r
- }\r
- return Result.ok();\r
- }\r
-\r
- /*\r
- * Check (recursively, if necessary), if able to do something based on NS\r
- */\r
- private Result<NsDAO.Data> mayUserVirtueOfNS(AuthzTrans trans, String user, NsDAO.Data nsd, String ns_and_type, String access) {\r
- String ns = nsd.name;\r
-\r
- // If an ADMIN of the Namespace, then allow\r
- \r
- Result<List<UserRoleDAO.Data>> rurd;\r
- if ((rurd = userRoleDAO.readUserInRole(trans, user, nsd.name+ADMIN)).isOKhasData()) {\r
- return Result.ok(nsd);\r
- } else if(rurd.status==Result.ERR_Backend) {\r
- return Result.err(rurd);\r
- }\r
- \r
- // If Specially granted Global Permission\r
- if (isGranted(trans, user, Define.ROOT_NS,NS, ns_and_type, access)) {\r
- return Result.ok(nsd);\r
- }\r
-\r
- // Check recur\r
-\r
- int dot = ns.length();\r
- if ((dot = ns.lastIndexOf('.', dot - 1)) >= 0) {\r
- Result<NsDAO.Data> rnsd = deriveNs(trans, ns.substring(0, dot));\r
- if (rnsd.isOK()) {\r
- rnsd = mayUserVirtueOfNS(trans, user, rnsd.value, ns_and_type,access);\r
- } else if(rnsd.status==Result.ERR_Backend) {\r
- return Result.err(rnsd);\r
- }\r
- if (rnsd.isOK()) {\r
- return Result.ok(nsd);\r
- } else if(rnsd.status==Result.ERR_Backend) {\r
- return Result.err(rnsd);\r
- }\r
- }\r
- return Result.err(Status.ERR_Denied, "%s may not %s %s", user, access,\r
- ns_and_type);\r
- }\r
-\r
- \r
- /**\r
- * isGranted\r
- * \r
- * Important function - Check internal Permission Schemes for Permission to\r
- * do things\r
- * \r
- * @param trans\r
- * @param type\r
- * @param instance\r
- * @param action\r
- * @return\r
- */\r
- public boolean isGranted(AuthzTrans trans, String user, String ns, String type,String instance, String action) {\r
- Result<List<PermDAO.Data>> perms = getPermsByUser(trans, user, false);\r
- if (perms.isOK()) {\r
- for (PermDAO.Data pd : perms.value) {\r
- if (ns.equals(pd.ns)) {\r
- if (type.equals(pd.type)) {\r
- if (PermEval.evalInstance(pd.instance, instance)) {\r
- if(PermEval.evalAction(pd.action, action)) { // don't return action here, might miss other action \r
- return true;\r
- }\r
- }\r
- }\r
- }\r
- }\r
- }\r
- return false;\r
- }\r
-\r
- public Result<Date> doesUserCredMatch(AuthzTrans trans, String user, byte[] cred) throws DAOException {\r
- Result<List<CredDAO.Data>> result;\r
- TimeTaken tt = trans.start("Read DB Cred", Env.REMOTE);\r
- try {\r
- result = credDAO.readID(trans, user);\r
- } finally {\r
- tt.done();\r
- }\r
-\r
- Result<Date> rv = null;\r
- if(result.isOK()) {\r
- if (result.isEmpty()) {\r
- rv = Result.err(Status.ERR_UserNotFound, user);\r
- if (willSpecialLog(trans,user)) {\r
- trans.audit().log("Special DEBUG:", user, " does not exist in DB");\r
- }\r
- } else {\r
- Date now = new Date();//long now = System.currentTimeMillis();\r
- ByteBuffer md5=null;\r
- \r
- // Bug noticed 6/22. Sorting on the result can cause Concurrency Issues. \r
- List<CredDAO.Data> cddl;\r
- if(result.value.size() > 1) {\r
- cddl = new ArrayList<CredDAO.Data>(result.value.size());\r
- for(CredDAO.Data old : result.value) {\r
- if(old.type==CredDAO.BASIC_AUTH || old.type==CredDAO.BASIC_AUTH_SHA256) {\r
- cddl.add(old);\r
- }\r
- }\r
- if(cddl.size()>1) {\r
- Collections.sort(cddl,new Comparator<CredDAO.Data>() {\r
- @Override\r
- public int compare(org.onap.aaf.dao.aaf.cass.CredDAO.Data a,\r
- org.onap.aaf.dao.aaf.cass.CredDAO.Data b) {\r
- return b.expires.compareTo(a.expires);\r
- }\r
- });\r
- }\r
- } else {\r
- cddl = result.value;\r
- }\r
- \r
- for (CredDAO.Data cdd : cddl) {\r
- if (cdd.expires.after(now)) {\r
- try {\r
- switch(cdd.type) {\r
- case CredDAO.BASIC_AUTH:\r
- if(md5==null) {\r
- md5=ByteBuffer.wrap(Hash.encryptMD5(cred));\r
- }\r
- if(md5.compareTo(cdd.cred)==0) {\r
- return Result.ok(cdd.expires);\r
- } else if (willSpecialLog(trans,user)) {\r
- trans.audit().log("Special DEBUG:", user, "Client sent: ", trans.encryptor().encrypt(new String(cred)) ,cdd.expires);\r
- }\r
- break;\r
- case CredDAO.BASIC_AUTH_SHA256:\r
- ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + cred.length);\r
- bb.putInt(cdd.other);\r
- bb.put(cred);\r
- byte[] hash = Hash.hashSHA256(bb.array());\r
- \r
- ByteBuffer sha256 = ByteBuffer.wrap(hash);\r
- if(sha256.compareTo(cdd.cred)==0) {\r
- return Result.ok(cdd.expires);\r
- } else if (willSpecialLog(trans,user)) {\r
- trans.audit().log("Special DEBUG:", user, "Client sent: ", trans.encryptor().encrypt(new String(cred)) ,cdd.expires);\r
- }\r
- break;\r
- default:\r
- trans.error().log("Unknown Credential Type %s for %s, %s",Integer.toString(cdd.type),cdd.id, Chrono.dateTime(cdd.expires));\r
- }\r
- } catch (NoSuchAlgorithmException e) {\r
- trans.error().log(e);\r
- }\r
- } else {\r
- rv = Result.err(Status.ERR_Security,\r
- "Credentials expired " + cdd.expires.toString());\r
- }\r
- } // end for each\r
- }\r
- } else {\r
- return Result.err(result);\r
- }\r
- return rv == null ? Result.create((Date) null, Status.ERR_Security,\r
- "Wrong credential") : rv;\r
- }\r
-\r
-\r
- public Result<CredDAO.Data> userCredSetup(AuthzTrans trans, CredDAO.Data cred) {\r
- if(cred.type==CredDAO.RAW) {\r
- TimeTaken tt = trans.start("Hash Cred", Env.SUB);\r
- try {\r
- cred.type = CredDAO.BASIC_AUTH_SHA256;\r
- cred.other = random.nextInt();\r
- ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + cred.cred.capacity());\r
- bb.putInt(cred.other);\r
- bb.put(cred.cred);\r
- byte[] hash = Hash.hashSHA256(bb.array());\r
- cred.cred = ByteBuffer.wrap(hash);\r
- return Result.ok(cred);\r
- } catch (NoSuchAlgorithmException e) {\r
- return Result.err(Status.ERR_General,e.getLocalizedMessage());\r
- } finally {\r
- tt.done();\r
- }\r
- \r
- }\r
- return Result.err(Status.ERR_Security,"invalid/unreadable credential");\r
- }\r
-\r
-\r
- public static final String APPROVED = "APPROVE";\r
- public static final String REJECT = "REJECT";\r
- public static final String PENDING = "PENDING";\r
-\r
- public Result<Void> canAddUser(AuthzTrans trans, UserRoleDAO.Data data,\r
- List<ApprovalDAO.Data> approvals) {\r
- // get the approval policy for the organization\r
-\r
- // get the list of approvals with an accept status\r
-\r
- // validate the approvals against the policy\r
-\r
- // for now check if all approvals are received and return\r
- // SUCCESS/FAILURE/SKIP\r
- boolean bReject = false;\r
- boolean bPending = false;\r
-\r
- for (ApprovalDAO.Data approval : approvals) {\r
- if (approval.status.equals(REJECT)) {\r
- bReject = true;\r
- } else if (approval.status.equals(PENDING)) {\r
- bPending = true;\r
- }\r
- }\r
- if (bReject) {\r
- return Result.err(Status.ERR_Policy,\r
- "Approval Polocy not conformed");\r
- }\r
- if (bPending) {\r
- return Result.err(Status.ERR_ActionNotCompleted,\r
- "Required Approvals not received");\r
- }\r
-\r
- return Result.ok();\r
- }\r
-\r
- private static final String NO_CACHE_NAME = "No Cache Data named %s";\r
-\r
- public Result<Void> clearCache(AuthzTrans trans, String cname) {\r
- boolean all = "all".equals(cname);\r
- Result<Void> rv = null;\r
-\r
- if (all || NsDAO.TABLE.equals(cname)) {\r
- int seg[] = series(NsDAO.CACHE_SEG);\r
- for(int i: seg) {cacheClear(trans, NsDAO.TABLE,i);}\r
- rv = cacheInfoDAO.touch(trans, NsDAO.TABLE, seg);\r
- }\r
- if (all || PermDAO.TABLE.equals(cname)) {\r
- int seg[] = series(NsDAO.CACHE_SEG);\r
- for(int i: seg) {cacheClear(trans, PermDAO.TABLE,i);}\r
- rv = cacheInfoDAO.touch(trans, PermDAO.TABLE,seg);\r
- }\r
- if (all || RoleDAO.TABLE.equals(cname)) {\r
- int seg[] = series(NsDAO.CACHE_SEG);\r
- for(int i: seg) {cacheClear(trans, RoleDAO.TABLE,i);}\r
- rv = cacheInfoDAO.touch(trans, RoleDAO.TABLE,seg);\r
- }\r
- if (all || UserRoleDAO.TABLE.equals(cname)) {\r
- int seg[] = series(NsDAO.CACHE_SEG);\r
- for(int i: seg) {cacheClear(trans, UserRoleDAO.TABLE,i);}\r
- rv = cacheInfoDAO.touch(trans, UserRoleDAO.TABLE,seg);\r
- }\r
- if (all || CredDAO.TABLE.equals(cname)) {\r
- int seg[] = series(NsDAO.CACHE_SEG);\r
- for(int i: seg) {cacheClear(trans, CredDAO.TABLE,i);}\r
- rv = cacheInfoDAO.touch(trans, CredDAO.TABLE,seg);\r
- }\r
- if (all || CertDAO.TABLE.equals(cname)) {\r
- int seg[] = series(NsDAO.CACHE_SEG);\r
- for(int i: seg) {cacheClear(trans, CertDAO.TABLE,i);}\r
- rv = cacheInfoDAO.touch(trans, CertDAO.TABLE,seg);\r
- }\r
-\r
- if (rv == null) {\r
- rv = Result.err(Status.ERR_BadData, NO_CACHE_NAME, cname);\r
- }\r
- return rv;\r
- }\r
-\r
- public Result<Void> cacheClear(AuthzTrans trans, String cname,Integer segment) {\r
- Result<Void> rv;\r
- if (NsDAO.TABLE.equals(cname)) {\r
- rv = nsDAO.invalidate(segment);\r
- } else if (PermDAO.TABLE.equals(cname)) {\r
- rv = permDAO.invalidate(segment);\r
- } else if (RoleDAO.TABLE.equals(cname)) {\r
- rv = roleDAO.invalidate(segment);\r
- } else if (UserRoleDAO.TABLE.equals(cname)) {\r
- rv = userRoleDAO.invalidate(segment);\r
- } else if (CredDAO.TABLE.equals(cname)) {\r
- rv = credDAO.invalidate(segment);\r
- } else if (CertDAO.TABLE.equals(cname)) {\r
- rv = certDAO.invalidate(segment);\r
- } else {\r
- rv = Result.err(Status.ERR_BadData, NO_CACHE_NAME, cname);\r
- }\r
- return rv;\r
- }\r
-\r
- private int[] series(int max) {\r
- int[] series = new int[max];\r
- for (int i = 0; i < max; ++i)\r
- series[i] = i;\r
- return series;\r
- }\r
-\r
- public boolean isDelegated(AuthzTrans trans, String user, String approver) {\r
- Result<List<DelegateDAO.Data>> userDelegatedFor = delegateDAO\r
- .readByDelegate(trans, user);\r
- for (DelegateDAO.Data curr : userDelegatedFor.value) {\r
- if (curr.user.equals(approver) && curr.delegate.equals(user)\r
- && curr.expires.after(new Date())) {\r
- return true;\r
- }\r
- }\r
- return false;\r
- }\r
-\r
- public static boolean willSpecialLog(AuthzTrans trans, String user) {\r
- Boolean b = trans.get(specialLogSlot, null);\r
- if(b==null) {\r
- if(specialLog==null) {\r
- return false;\r
- } else {\r
- b = specialLog.contains(user);\r
- trans.put(specialLogSlot, b);\r
- }\r
- }\r
- return b;\r
- }\r
- \r
- public static void logEncryptTrace(AuthzTrans trans, String data) {\r
- long ti;\r
- trans.put(transIDSlot, ti=nextTraceID());\r
- trans.trace().log("id="+Long.toHexString(ti)+",data=\""+trans.env().encryptor().encrypt(data)+'"');\r
- }\r
-\r
- private synchronized static long nextTraceID() {\r
- return ++traceID;\r
- }\r
-\r
- public static synchronized boolean specialLogOn(AuthzTrans trans, String id) {\r
- if (specialLog == null) {\r
- specialLog = new HashSet<String>();\r
- }\r
- boolean rc = specialLog.add(id);\r
- if(rc) {\r
- trans.trace().log("Trace on for",id); \r
- }\r
- return rc;\r
- }\r
-\r
- public static synchronized boolean specialLogOff(AuthzTrans trans, String id) {\r
- if(specialLog==null) {\r
- return false;\r
- }\r
- boolean rv = specialLog.remove(id);\r
- if (specialLog.isEmpty()) {\r
- specialLog = null;\r
- }\r
- if(rv) {\r
- trans.trace().log("Trace off for",id);\r
- }\r
- return rv;\r
- }\r
-\r
- /** \r
- * canMove\r
- * Which Types can be moved\r
- * @param nsType\r
- * @return\r
- */\r
- public boolean canMove(NsType nsType) {\r
- boolean rv;\r
- switch(nsType) {\r
- case DOT:\r
- case ROOT:\r
- case COMPANY:\r
- case UNKNOWN:\r
- rv = false;\r
- break;\r
- default:\r
- rv = true;\r
- }\r
- return rv;\r
- }\r
-\r
- public Result<String> isOwnerSponsor(AuthzTrans trans, String user, String ns, Identity mechID) {\r
- \r
- Identity caller;\r
- Organization org = trans.org();\r
- try {\r
- caller = org.getIdentity(trans, user);\r
- if(caller==null || !caller.isFound()) {\r
- return Result.err(Status.ERR_NotFound,"%s is not a registered %s entity",user,org.getName());\r
- }\r
- } catch (Exception e) {\r
- return Result.err(e);\r
- }\r
- String sponsor = mechID.responsibleTo();\r
- Result<List<UserRoleDAO.Data>> rur = userRoleDAO.read(trans, user,ns+DOT_OWNER);\r
- boolean isOwner = false;\r
- if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){\r
- if(urdd.expires.after(new Date())) {\r
- isOwner = true;\r
- }\r
- }};\r
- if(!isOwner) {\r
- return Result.err(Status.ERR_Policy,"%s is not a current owner of %s",user,ns);\r
- }\r
- \r
- if(!caller.id().equals(sponsor)) {\r
- return Result.err(Status.ERR_Denied,"%s is not the sponsor of %s",user,mechID.id());\r
- }\r
- return Result.ok(sponsor);\r
- }\r
- \r
- public boolean isAdmin(AuthzTrans trans, String user, String ns) {\r
- Date now = new Date();\r
- Result<List<UserRoleDAO.Data>> rur = userRoleDAO.read(trans, user,ns+ADMIN);\r
- if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){\r
- if(urdd.expires.after(now)) {\r
- return true;\r
- }\r
- }};\r
- return false;\r
- }\r
- \r
- public boolean isOwner(AuthzTrans trans, String user, String ns) {\r
- Result<List<UserRoleDAO.Data>> rur = userRoleDAO.read(trans, user,ns+DOT_OWNER);\r
- Date now = new Date();\r
- if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){\r
- if(urdd.expires.after(now)) {\r
- return true;\r
- }\r
- }};\r
- return false;\r
- }\r
-\r
- public int countOwner(AuthzTrans trans, String user, String ns) {\r
- Result<List<UserRoleDAO.Data>> rur = userRoleDAO.read(trans, user,ns+DOT_OWNER);\r
- Date now = new Date();\r
- int count = 0;\r
- if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){\r
- if(urdd.expires.after(now)) {\r
- ++count;\r
- }\r
- }};\r
- return count;\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.session;\r
-\r
-import java.io.IOException;\r
-\r
-import javax.servlet.Filter;\r
-import javax.servlet.FilterChain;\r
-import javax.servlet.FilterConfig;\r
-import javax.servlet.ServletException;\r
-import javax.servlet.ServletRequest;\r
-import javax.servlet.ServletResponse;\r
-\r
-import org.onap.aaf.cssa.rserv.TransFilter;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.EnvStore;\r
-import org.onap.aaf.inno.env.Slot;\r
-import org.onap.aaf.inno.env.TransStore;\r
-import org.onap.aaf.inno.env.util.Pool;\r
-import org.onap.aaf.inno.env.util.Pool.Creator;\r
-import org.onap.aaf.inno.env.util.Pool.Pooled;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.Session;\r
-\r
-public class SessionFilter<TRANS extends TransStore> implements Filter {\r
- public static final String SESSION_SLOT = "__SESSION__";\r
- private static Slot sessionSlot;\r
- private static Pool<Session> pool;\r
-\r
- public SessionFilter(EnvStore<?> env, Cluster cluster, String keyspace) {\r
- synchronized(env) {\r
- if(sessionSlot==null) {\r
- sessionSlot = env.slot(SESSION_SLOT);\r
- }\r
- if(pool==null) {\r
- pool = new Pool<Session>(new SessionCreator(env,cluster,keyspace));\r
- }\r
- }\r
- }\r
-\r
- @Override\r
- public void init(FilterConfig fc) throws ServletException {\r
- // Session does not need any sort of configuration from Filter\r
- }\r
-\r
- @Override\r
- public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws IOException, ServletException {\r
- @SuppressWarnings("unchecked")\r
- TRANS trans = (TRANS)req.getAttribute(TransFilter.TRANS_TAG);\r
- try {\r
- Pooled<Session> psess = pool.get();\r
- try {\r
- trans.put(sessionSlot, psess.content);\r
- chain.doFilter(req, resp);\r
- } finally {\r
- psess.done();\r
- }\r
- } catch (APIException e) {\r
- throw new ServletException(e);\r
- }\r
- }\r
-\r
- public Pooled<Session> load(TRANS trans) throws APIException {\r
- Pooled<Session> psess = pool.get();\r
- trans.put(sessionSlot, psess.content);\r
- return psess;\r
- }\r
- \r
- \r
- /**\r
- * Clear will drain the pool, so that new Sessions will be constructed.\r
- * \r
- * Suitable for Management calls. \r
- */\r
- public static void clear() {\r
- if(pool!=null) {\r
- pool.drain();\r
- } \r
- }\r
- \r
- @Override\r
- public void destroy() {\r
- pool.drain();\r
- }\r
-\r
- private class SessionCreator implements Creator<Session> {\r
- private Cluster cluster;\r
- private String keyspace;\r
- private Env env;\r
- \r
- public SessionCreator(Env env, Cluster cluster, String keyspace) {\r
- this.cluster = cluster;\r
- this.keyspace = keyspace;\r
- this.env = env;\r
- }\r
- \r
- @Override\r
- public Session create() throws APIException {\r
- env.info().log("Creating a Cassandra Session");\r
- return cluster.connect(keyspace);\r
- }\r
-\r
- @Override\r
- public void destroy(Session t) {\r
- env.info().log("Shutting down a Cassandra Session");\r
- t.close();\r
- }\r
-\r
- @Override\r
- public boolean isValid(Session t) {\r
- return true;\r
- }\r
-\r
- @Override\r
- public void reuse(Session t) {\r
- // Nothing is needed to reuse this Session\r
- }\r
- \r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.cass.hl;\r
-\r
-import static junit.framework.Assert.assertEquals;\r
-import static junit.framework.Assert.assertFalse;\r
-import static junit.framework.Assert.assertTrue;\r
-\r
-import java.security.Principal;\r
-import java.util.ArrayList;\r
-import java.util.Date;\r
-import java.util.List;\r
-\r
-import org.junit.AfterClass;\r
-import org.junit.BeforeClass;\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.NsDAO;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.UserRoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.NsDAO.Data;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-import org.onap.aaf.dao.aaf.hl.Question.Access;\r
-import org.onap.aaf.dao.aaf.test.AbsJUCass;\r
-\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.TimeTaken;\r
-\r
-public class JU_Question extends AbsJUCass {\r
-\r
- private static final int EXPIRES_IN = 60000000;\r
- private static final String COM_TEST_JU = "com.test.ju_question";\r
- private static final String JU9999_JU_TEST_COM = "ju9999@ju.test.com";\r
- private static final String JU9998_JU_TEST_COM = "ju9998@ju.test.com";\r
- private static final String READ = "read";\r
- private static final int NFR_1 = 80;\r
- private static final int NFR_2 = 4000;\r
- private static final int ROLE_LEVEL1 = 1000;\r
- private static final int PERM_LEVEL1 = 1000;\r
-// private static final int PERM_LEVEL2 = 20;\r
- private static Question q;\r
- private static NsDAO.Data ndd;\r
-\r
- @BeforeClass\r
- public static void startupBeforeClass() throws Exception {\r
- details=false;\r
- AuthzTrans trans = env.newTransNoAvg();\r
- q = new Question(trans,cluster,AUTHZ, false);\r
- ndd = new NsDAO.Data();\r
- ndd.name=COM_TEST_JU;\r
- ndd.type=3; // app\r
- ndd.parent="com.test";\r
- ndd.description="Temporary Namespace for JU_Question";\r
- q.nsDAO.create(trans, ndd);\r
- }\r
- \r
- @AfterClass\r
- public static void endAfterClass() throws Exception {\r
- q.nsDAO.delete(trans, ndd,false);\r
- }\r
-// @Test\r
- public void mayUserRead_EmptyPerm() {\r
- PermDAO.Data pdd = new PermDAO.Data();\r
- Result<NsDAO.Data> result = q.mayUser(trans,JU9999_JU_TEST_COM,pdd,Access.read);\r
- assertFalse(result.isOK());\r
- }\r
-\r
-// @Test\r
- public void mayUserRead_OnePermNotExist() {\r
- Result<NsDAO.Data> result = q.mayUser(trans,JU9999_JU_TEST_COM,newPerm(0,0,READ),Access.read);\r
- assertFalse(result.isOK());\r
- assertEquals("Denied - ["+ JU9999_JU_TEST_COM +"] may not read Perm [" + COM_TEST_JU + ".myPerm0|myInstance0|read]",result.errorString());\r
- }\r
- \r
-// @Test\r
- public void mayUserRead_OnePermExistDenied() {\r
- PermDAO.Data perm = newPerm(0,0,READ);\r
- q.permDAO.create(trans,perm);\r
- try {\r
- Result<NsDAO.Data> result;\r
- TimeTaken tt = trans.start("q.mayUser...", Env.SUB);\r
- try {\r
- result = q.mayUser(trans,JU9999_JU_TEST_COM,perm,Access.read);\r
- } finally {\r
- tt.done();\r
- assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis()<NFR_1);\r
- }\r
- assertFalse(result.isOK());\r
- assertEquals("Denied - ["+ JU9999_JU_TEST_COM +"] may not read Perm ["+COM_TEST_JU + ".myPerm0|myInstance0|read]",result.errorString());\r
- } finally {\r
- q.permDAO.delete(trans, perm, false);\r
- }\r
- }\r
-\r
-// @Test\r
- public void mayUserRead_OnePermOneRoleExistOK() {\r
- PermDAO.Data perm = newPerm(0,0,READ);\r
- RoleDAO.Data role = newRole(0,perm);\r
- UserRoleDAO.Data ur = newUserRole(role,JU9999_JU_TEST_COM,EXPIRES_IN);\r
- try {\r
- q.permDAO.create(trans,perm);\r
- q.roleDAO.create(trans,role);\r
- q.userRoleDAO.create(trans,ur);\r
- \r
- Result<NsDAO.Data> result;\r
- TimeTaken tt = trans.start("q.mayUser...", Env.SUB);\r
- try {\r
- result = q.mayUser(trans,JU9999_JU_TEST_COM,perm,Access.read);\r
- } finally {\r
- tt.done();\r
- assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis()<NFR_1);\r
- }\r
- assertTrue(result.isOK());\r
- } finally {\r
- q.permDAO.delete(trans, perm, false);\r
- q.roleDAO.delete(trans, role, false);\r
- q.userRoleDAO.delete(trans, ur, false);\r
- }\r
- }\r
-\r
-// @Test\r
- public void filter_OnePermOneRoleExistOK() {\r
- PermDAO.Data perm = newPerm(0,0,READ);\r
- RoleDAO.Data role = newRole(0,perm);\r
- UserRoleDAO.Data ur1 = newUserRole(role,JU9998_JU_TEST_COM,EXPIRES_IN);\r
- UserRoleDAO.Data ur2 = newUserRole(role,JU9999_JU_TEST_COM,EXPIRES_IN);\r
- try {\r
- q.permDAO.create(trans,perm);\r
- q.roleDAO.create(trans,role);\r
- q.userRoleDAO.create(trans,ur1);\r
- q.userRoleDAO.create(trans,ur2);\r
- \r
- Result<List<PermDAO.Data>> pres;\r
- TimeTaken tt = trans.start("q.getPerms...", Env.SUB);\r
- try {\r
- pres = q.getPermsByUserFromRolesFilter(trans, JU9999_JU_TEST_COM, JU9999_JU_TEST_COM);\r
- } finally {\r
- tt.done();\r
- trans.info().log("filter_OnePermOneRleExistOK",tt);\r
- assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis()<NFR_1);\r
- }\r
- assertTrue(pres.isOK());\r
- \r
- try {\r
- pres = q.getPermsByUserFromRolesFilter(trans, JU9999_JU_TEST_COM, JU9998_JU_TEST_COM);\r
- } finally {\r
- tt.done();\r
- trans.info().log("filter_OnePermOneRleExistOK No Value",tt);\r
- assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis()<NFR_1);\r
- }\r
- assertFalse(pres.isOKhasData());\r
-\r
- } finally {\r
- q.permDAO.delete(trans, perm, false);\r
- q.roleDAO.delete(trans, role, false);\r
- q.userRoleDAO.delete(trans, ur1, false);\r
- q.userRoleDAO.delete(trans, ur2, false);\r
- }\r
- }\r
-\r
-// @Test\r
- public void mayUserRead_OnePermMultiRoleExistOK() {\r
- PermDAO.Data perm = newPerm(0,0,READ);\r
- List<RoleDAO.Data> lrole = new ArrayList<RoleDAO.Data>();\r
- List<UserRoleDAO.Data> lur = new ArrayList<UserRoleDAO.Data>();\r
- try {\r
- q.permDAO.create(trans,perm);\r
- for(int i=0;i<ROLE_LEVEL1;++i) {\r
- RoleDAO.Data role = newRole(i,perm);\r
- lrole.add(role);\r
- q.roleDAO.create(trans,role);\r
- \r
- UserRoleDAO.Data ur = newUserRole(role,JU9999_JU_TEST_COM,60000000);\r
- lur.add(ur);\r
- q.userRoleDAO.create(trans,ur);\r
- }\r
- \r
- Result<NsDAO.Data> result;\r
- TimeTaken tt = trans.start("mayUserRead_OnePermMultiRoleExistOK", Env.SUB);\r
- try {\r
- result = q.mayUser(trans,JU9999_JU_TEST_COM,perm,Access.read);\r
- } finally {\r
- tt.done();\r
- env.info().log(tt,ROLE_LEVEL1,"iterations");\r
- assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);\r
- }\r
- assertTrue(result.isOK());\r
- } finally {\r
- q.permDAO.delete(trans, perm, false);\r
- for(RoleDAO.Data role : lrole) {\r
- q.roleDAO.delete(trans, role, false);\r
- }\r
- for(UserRoleDAO.Data ur : lur) {\r
- q.userRoleDAO.delete(trans, ur, false);\r
- }\r
- }\r
- }\r
-\r
- @Test\r
- public void mayUserRead_MultiPermOneRoleExistOK() {\r
- RoleDAO.Data role = newRole(0);\r
- UserRoleDAO.Data ur = newUserRole(role,JU9999_JU_TEST_COM,EXPIRES_IN);\r
- List<PermDAO.Data> lperm = new ArrayList<PermDAO.Data>();\r
- try {\r
- for(int i=0;i<PERM_LEVEL1;++i) {\r
- lperm.add(newPerm(i,i,READ,role));\r
- }\r
- q.roleDAO.create(trans, role);\r
- q.userRoleDAO.create(trans, ur);\r
- \r
- Result<NsDAO.Data> result;\r
- TimeTaken tt = trans.start("mayUserRead_MultiPermOneRoleExistOK", Env.SUB);\r
- try {\r
- result = q.mayUser(trans,JU9999_JU_TEST_COM,lperm.get(PERM_LEVEL1-1),Access.read);\r
- } finally {\r
- tt.done();\r
- env.info().log(tt,PERM_LEVEL1,"iterations");\r
- assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);\r
- }\r
- assertTrue(result.isOK());\r
- } finally {\r
- for(PermDAO.Data perm : lperm) {\r
- q.permDAO.delete(trans, perm, false);\r
- }\r
- q.roleDAO.delete(trans, role, false);\r
- q.userRoleDAO.delete(trans, ur, false);\r
- }\r
- }\r
-\r
-//// @Test\r
-// public void mayUserRead_MultiPermMultiRoleExistOK() {\r
-// List<PermDAO.Data> lperm = new ArrayList<PermDAO.Data>();\r
-// List<RoleDAO.Data> lrole = new ArrayList<RoleDAO.Data>();\r
-// List<UserRoleDAO.Data> lur = new ArrayList<UserRoleDAO.Data>();\r
-//\r
-// try {\r
-// RoleDAO.Data role;\r
-// UserRoleDAO.Data ur;\r
-// for(int i=0;i<ROLE_LEVEL1;++i) {\r
-// lrole.add(role=newRole(i));\r
-// q.roleDAO.create(trans, role);\r
-// lur.add(ur=newUserRole(role, JU9999_JU_TEST_COM, EXPIRES_IN));\r
-// q.userRoleDAO.create(trans, ur);\r
-// for(int j=0;j<PERM_LEVEL2;++j) {\r
-// lperm.add(newPerm(i,j,READ,role));\r
-// }\r
-// }\r
-// \r
-// Result<NsDAO.Data> result;\r
-// TimeTaken tt = trans.start("mayUserRead_MultiPermMultiRoleExistOK", Env.SUB);\r
-// try {\r
-// result = q.mayUser(trans,JU9999_JU_TEST_COM,lperm.get(ROLE_LEVEL1*PERM_LEVEL2-1),Access.read);\r
-// } finally {\r
-// tt.done();\r
-// env.info().log(tt,lperm.size(),"perms",", ",lrole.size(),"role");\r
-// assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);\r
-// }\r
-// assertTrue(result.isOK());\r
-// } finally {\r
-// for(PermDAO.Data perm : lperm) {\r
-// q.permDAO.delete(trans, perm, false);\r
-// }\r
-// for(RoleDAO.Data role : lrole) {\r
-// q.roleDAO.delete(trans, role, false);\r
-// }\r
-// for(UserRoleDAO.Data ur : lur) {\r
-// q.userRoleDAO.delete(trans, ur, false);\r
-// }\r
-// }\r
-// }\r
-\r
- @Test\r
- public void mayUserRead_MultiPermMultiRoleExist_10x10() {\r
- env.info().log("Original Filter Method 10x10");\r
- mayUserRead_MultiPermMultiRoleExist(10,10);\r
- env.info().log("New Filter Method 10x10");\r
- mayUserRead_MultiPermMultiRoleExist_NewOK(10,10);\r
- }\r
-\r
-// @Test\r
- public void mayUserRead_MultiPermMultiRoleExist_20x10() {\r
- env.info().log("mayUserRead_MultiPermMultiRoleExist_20x10");\r
- mayUserRead_MultiPermMultiRoleExist_NewOK(20,10);\r
- }\r
-\r
-// @Test\r
- public void mayUserRead_MultiPermMultiRoleExist_100x10() {\r
- env.info().log("mayUserRead_MultiPermMultiRoleExist_100x10");\r
- mayUserRead_MultiPermMultiRoleExist_NewOK(100,10);\r
- }\r
-\r
-// @Test\r
- public void mayUserRead_MultiPermMultiRoleExist_100x20() {\r
- env.info().log("mayUserRead_MultiPermMultiRoleExist_100x20");\r
- mayUserRead_MultiPermMultiRoleExist_NewOK(100,20);\r
- }\r
-\r
-// @Test\r
- public void mayUserRead_MultiPermMultiRoleExist_1000x20() {\r
- env.info().log("mayUserRead_MultiPermMultiRoleExist_1000x20");\r
- mayUserRead_MultiPermMultiRoleExist_NewOK(1000,20);\r
- }\r
-\r
- private void mayUserRead_MultiPermMultiRoleExist(int roleLevel, int permLevel) {\r
- List<PermDAO.Data> lperm = new ArrayList<PermDAO.Data>();\r
- List<RoleDAO.Data> lrole = new ArrayList<RoleDAO.Data>();\r
- List<UserRoleDAO.Data> lur = new ArrayList<UserRoleDAO.Data>();\r
- load(roleLevel, permLevel, lperm,lrole,lur);\r
-\r
-\r
- Result<List<PermDAO.Data>> pres;\r
- trans.setUser(new Principal() {\r
- @Override\r
- public String getName() {\r
- return JU9999_JU_TEST_COM;\r
- }\r
- });\r
-\r
- try {\r
- TimeTaken group = trans.start(" Original Security Method (1st time)", Env.SUB);\r
- try {\r
- TimeTaken tt = trans.start(" Get User Perms for "+JU9998_JU_TEST_COM, Env.SUB);\r
- try {\r
- pres = q.getPermsByUser(trans,JU9998_JU_TEST_COM,true);\r
- } finally {\r
- tt.done();\r
- env.info().log(tt," Looked up (full) getPermsByUser for",JU9998_JU_TEST_COM);\r
- }\r
- assertTrue(pres.isOK());\r
- tt = trans.start(" q.mayUser", Env.SUB);\r
- List<PermDAO.Data> reduced = new ArrayList<PermDAO.Data>();\r
- \r
- try {\r
- for(PermDAO.Data p : pres.value) {\r
- Result<Data> r = q.mayUser(trans,JU9999_JU_TEST_COM,p,Access.read);\r
- if(r.isOK()) {\r
- reduced.add(p);\r
- }\r
- }\r
- } finally {\r
- tt.done();\r
- env.info().log(tt," reduced" + pres.value.size(),"perms","to",reduced.size());\r
- // assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);\r
- }\r
- // assertFalse(result.isOK());\r
- } finally {\r
- group.done();\r
- env.info().log(group," Original Validation Method (1st pass)");\r
- }\r
- \r
-\r
- } finally {\r
- unload(lperm, lrole, lur);\r
- }\r
- }\r
-\r
- private void mayUserRead_MultiPermMultiRoleExist_NewOK(int roleLevel, int permLevel) {\r
- List<PermDAO.Data> lperm = new ArrayList<PermDAO.Data>();\r
- List<RoleDAO.Data> lrole = new ArrayList<RoleDAO.Data>();\r
- List<UserRoleDAO.Data> lur = new ArrayList<UserRoleDAO.Data>();\r
- load(roleLevel, permLevel, lperm,lrole,lur);\r
-\r
- try {\r
-\r
- Result<List<PermDAO.Data>> pres;\r
- TimeTaken tt = trans.start(" mayUserRead_MultiPermMultiRoleExist_New New Filter", Env.SUB);\r
- try {\r
- pres = q.getPermsByUserFromRolesFilter(trans, JU9999_JU_TEST_COM, JU9998_JU_TEST_COM);\r
- } finally {\r
- tt.done();\r
- env.info().log(tt,lperm.size(),"perms",", ",lrole.size(),"role", lur.size(), "UserRoles");\r
-// assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);\r
- }\r
-// assertTrue(pres.isOKhasData());\r
-\r
- tt = trans.start(" mayUserRead_MultiPermMultiRoleExist_New New Filter (2nd time)", Env.SUB);\r
- try {\r
- pres = q.getPermsByUserFromRolesFilter(trans, JU9999_JU_TEST_COM, JU9998_JU_TEST_COM);\r
- } finally {\r
- tt.done();\r
- env.info().log(tt,lperm.size(),"perms",", ",lrole.size(),"role", lur.size(), "UserRoles");\r
- assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);\r
- }\r
-// assertTrue(pres.isOKhasData());\r
-\r
- } finally {\r
- unload(lperm, lrole, lur);\r
- }\r
- }\r
-\r
-\r
- private void load(int roleLevel, int permLevel, List<PermDAO.Data> lperm , List<RoleDAO.Data> lrole, List<UserRoleDAO.Data> lur) {\r
- RoleDAO.Data role;\r
- UserRoleDAO.Data ur;\r
- PermDAO.Data perm;\r
- \r
- int onethirdR=roleLevel/3;\r
- int twothirdR=onethirdR*2;\r
- int onethirdP=permLevel/3;\r
- int twothirdP=onethirdP*2;\r
-\r
- for(int i=0;i<roleLevel;++i) {\r
- lrole.add(role=newRole(i));\r
- if(i<onethirdR) { // one has\r
- lur.add(ur=newUserRole(role, JU9998_JU_TEST_COM, EXPIRES_IN));\r
- q.userRoleDAO.create(trans, ur);\r
- for(int j=0;j<onethirdP;++j) {\r
- lperm.add(perm=newPerm(i,j,READ,role));\r
- q.permDAO.create(trans, perm);\r
- }\r
- } else if(i<twothirdR) { // both have\r
- lur.add(ur=newUserRole(role, JU9998_JU_TEST_COM, EXPIRES_IN));\r
- q.userRoleDAO.create(trans, ur);\r
- lur.add(ur=newUserRole(role, JU9999_JU_TEST_COM, EXPIRES_IN));\r
- q.userRoleDAO.create(trans, ur);\r
- for(int j=onethirdP;j<twothirdP;++j) {\r
- lperm.add(perm=newPerm(i,j,READ,role));\r
- q.permDAO.create(trans, perm);\r
- }\r
- } else { // other has\r
- lur.add(ur=newUserRole(role, JU9999_JU_TEST_COM, EXPIRES_IN));\r
- q.userRoleDAO.create(trans, ur);\r
- for(int j=twothirdP;j<permLevel;++j) {\r
- lperm.add(perm=newPerm(i,j,READ,role));\r
- q.permDAO.create(trans, perm);\r
- }\r
- }\r
- q.roleDAO.create(trans, role);\r
- }\r
-\r
- }\r
- \r
- private void unload(List<PermDAO.Data> lperm , List<RoleDAO.Data> lrole, List<UserRoleDAO.Data> lur) {\r
- for(PermDAO.Data perm : lperm) {\r
- q.permDAO.delete(trans, perm, false);\r
- }\r
- for(RoleDAO.Data role : lrole) {\r
- q.roleDAO.delete(trans, role, false);\r
- }\r
- for(UserRoleDAO.Data ur : lur) {\r
- q.userRoleDAO.delete(trans, ur, false);\r
- }\r
-\r
- }\r
- private PermDAO.Data newPerm(int permNum, int instNum, String action, RoleDAO.Data ... grant) {\r
- PermDAO.Data pdd = new PermDAO.Data();\r
- pdd.ns=COM_TEST_JU;\r
- pdd.type="myPerm"+permNum;\r
- pdd.instance="myInstance"+instNum;\r
- pdd.action=action;\r
- for(RoleDAO.Data r : grant) {\r
- pdd.roles(true).add(r.fullName());\r
- r.perms(true).add(pdd.encode());\r
- }\r
- return pdd;\r
- }\r
-\r
- private RoleDAO.Data newRole(int roleNum, PermDAO.Data ... grant) {\r
- RoleDAO.Data rdd = new RoleDAO.Data();\r
- rdd.ns = COM_TEST_JU+roleNum;\r
- rdd.name = "myRole"+roleNum;\r
- for(PermDAO.Data p : grant) {\r
- rdd.perms(true).add(p.encode());\r
- p.roles(true).add(rdd.fullName());\r
- }\r
- return rdd;\r
- }\r
-\r
- private UserRoleDAO.Data newUserRole(RoleDAO.Data role,String user, long offset) {\r
- UserRoleDAO.Data urd = new UserRoleDAO.Data();\r
- urd.user=user;\r
- urd.role(role);\r
- urd.expires=new Date(System.currentTimeMillis()+offset);\r
- return urd;\r
- }\r
-\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import java.util.Date;\r
-import java.util.List;\r
-import java.util.Map;\r
-import java.util.Timer;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.cache.Cache;\r
-import org.onap.aaf.cache.Cache.Dated;\r
-import org.onap.aaf.dao.CIDAO;\r
-import org.onap.aaf.dao.Cached;\r
-import org.onap.aaf.dao.Cached.Getter;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-\r
-//import org.onap.aaf.dao.Cached.Refresh;\r
-import org.onap.aaf.inno.env.Trans;\r
-\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_Cached {\r
- Cached cached;\r
- @Mock\r
- CIDAO<Trans> ciDaoMock;\r
- @Mock\r
- AuthzEnv authzEnvMock;\r
- @Mock\r
- CIDAO<AuthzTrans> cidaoATMock;\r
- \r
- String name = "nameString";\r
- \r
- @Before\r
- public void setUp(){\r
- cached = new Cached(ciDaoMock, name, 0);\r
- }\r
- \r
- @Test(expected=ArithmeticException.class)\r
- public void testCachedIdx(){\r
- int Result = cached.cacheIdx("1234567890"); \r
- }\r
- \r
- @Test(expected=ArithmeticException.class)\r
- public void testInvalidate(){\r
- int Res = cached.invalidate(name);\r
- }\r
- \r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testStopTimer(){\r
- cached.stopTimer();\r
- assertTrue(true);\r
- }\r
-\r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testStartRefresh(){\r
- cached.startRefresh(authzEnvMock, cidaoATMock);\r
- assertTrue(true);\r
- }\r
-// @Mock\r
-// Trans transMock;\r
-// @Mock\r
-// Getter<DAO> getterMock;\r
-// \r
-// @Test\r
-// public void testGet(){\r
-// cached.get(transMock, name, getterMock);\r
-// fail("not implemented");\r
-// }\r
-// \r
-// @SuppressWarnings("unchecked")\r
-// public Result<List<DATA>> get(TRANS trans, String key, Getter<DATA> getter) {\r
-// List<DATA> ld = null;\r
-// Result<List<DATA>> rld = null;\r
-// \r
-// int cacheIdx = cacheIdx(key);\r
-// Map<String, Dated> map = ((Map<String,Dated>)cache[cacheIdx]);\r
-// \r
-// // Check for saved element in cache\r
-// Dated cached = map.get(key);\r
-// // Note: These Segment Timestamps are kept up to date with DB\r
-// Date dbStamp = info.get(trans, name,cacheIdx);\r
-// \r
-// // Check for cache Entry and whether it is still good (a good Cache Entry is same or after DBEntry, so we use "before" syntax)\r
-// if(cached!=null && dbStamp.before(cached.timestamp)) {\r
-// ld = (List<DATA>)cached.data;\r
-// rld = Result.ok(ld);\r
-// } else {\r
-// rld = getter.get();\r
-// if(rld.isOK()) { // only store valid lists\r
-// map.put(key, new Dated(rld.value)); // successful item found gets put in cache\r
-//// } else if(rld.status == Result.ERR_Backend){\r
-//// map.remove(key);\r
-// }\r
-// }\r
-// return rld;\r
-// }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import java.util.ArrayList;\r
-import java.util.List;\r
-\r
-import org.junit.Assert;\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.dao.CIDAO;\r
-import org.onap.aaf.dao.CachedDAO;\r
-import org.onap.aaf.dao.DAO;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-\r
-import org.onap.aaf.inno.env.Trans;\r
-\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_CachedDAO {\r
- CachedDAO cachedDAO;\r
- @Mock\r
- DAO daoMock;\r
- @Mock\r
- CIDAO<Trans> ciDAOMock; \r
- int segsize=1;\r
- Object[ ] objs = new Object[2];\r
- \r
- @Before\r
- public void setUp(){\r
- objs[0] = "helo";\r
- objs[1] = "polo";\r
- cachedDAO = new CachedDAO(daoMock, ciDAOMock, segsize);\r
- }\r
- \r
- @Test\r
- public void testKeyFromObjs(){\r
- String result = cachedDAO.keyFromObjs(objs);\r
- System.out.println("value of resut " +result);\r
- assertTrue(true);\r
- }\r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import java.io.IOException;\r
-import java.util.ArrayList;\r
-import java.util.List;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.dao.CassAccess;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Env;\r
-//import org.onap.aaf.dao.CassAccess.Resettable;\r
-import com.datastax.driver.core.Cluster.Builder;\r
-\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_CassAccess {\r
- CassAccess cassAccess;\r
- \r
- public static final String KEYSPACE = "authz";\r
- public static final String CASSANDRA_CLUSTERS = "cassandra.clusters";\r
- public static final String CASSANDRA_CLUSTERS_PORT = "cassandra.clusters.port";\r
- public static final String CASSANDRA_CLUSTERS_USER_NAME = "cassandra.clusters.user";\r
- public static final String CASSANDRA_CLUSTERS_PASSWORD = "cassandra.clusters.password";\r
- public static final String CASSANDRA_RESET_EXCEPTIONS = "cassandra.reset.exceptions";\r
- public static final String LATITUDE = "LATITUDE";\r
- public static final String LONGITUDE = "LONGITUDE";\r
- //private static final List<Resettable> resetExceptions = new ArrayList<Resettable>();\r
- public static final String ERR_ACCESS_MSG = "Accessing Backend";\r
- private static Builder cb = null;\r
- @Mock\r
- Env envMock;\r
- String prefix=null;\r
- \r
- @Before\r
- public void setUp(){\r
- cassAccess = new CassAccess();\r
- }\r
-\r
-\r
- @Test(expected=APIException.class)\r
- public void testCluster() throws APIException, IOException {\r
- cassAccess.cluster(envMock, prefix);\r
- \r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-import org.onap.aaf.dao.Loader;\r
-import org.powermock.api.mockito.PowerMockito;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-\r
-import org.onap.aaf.inno.env.Data;\r
-import org.onap.aaf.inno.env.Trans;\r
-import org.onap.aaf.inno.env.TransStore;\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.ConsistencyLevel;\r
-\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_CassDAOImpl {\r
-\r
-public static final String CASS_READ_CONSISTENCY="cassandra.readConsistency";\r
-public static final String CASS_WRITE_CONSISTENCY="cassandra.writeConsistency";\r
-\r
-CassDAOImpl cassDAOImpl;\r
-\r
-\r
-@Mock\r
-TransStore transStoreMock;\r
-@SuppressWarnings("rawtypes")\r
-Class dcMock;\r
-@SuppressWarnings("rawtypes")\r
-Loader loaderMock;\r
-Cluster clusterMock;\r
-Class<Data> classDataMock;\r
-ConsistencyLevel consistencyLevelMock;\r
-Trans transMock;\r
-\r
-@Mock\r
-AuthzTrans authzTransMock;\r
-\r
-\r
-\r
- @SuppressWarnings({ "rawtypes", "unchecked" })\r
- @Before\r
- public void setUp()\r
- {\r
- String name = "name";\r
- String keySpace = "keySpace";\r
- String table = "table";\r
- cassDAOImpl = new CassDAOImpl(transStoreMock, name, clusterMock, keySpace, classDataMock, table, consistencyLevelMock, consistencyLevelMock);\r
- }\r
-\r
- \r
- @Test \r
- public void testReadConsistency() {\r
- String table = "users";\r
- PowerMockito.when(authzTransMock.getProperty(CASS_READ_CONSISTENCY+'.'+table)).thenReturn("TWO");\r
- ConsistencyLevel consistencyLevel = cassDAOImpl.readConsistency(authzTransMock, table);\r
- System.out.println("Consistency level" + consistencyLevel.name());\r
- assertEquals("TWO", consistencyLevel.name());\r
- }\r
- \r
- @Test \r
- public void testWriteConsistency() {\r
- String table = "users";\r
- PowerMockito.when(authzTransMock.getProperty(CASS_WRITE_CONSISTENCY+'.'+table)).thenReturn(null);\r
- ConsistencyLevel consistencyLevel = cassDAOImpl.writeConsistency(authzTransMock, table);\r
- System.out.println("Consistency level" + consistencyLevel.name());\r
- assertEquals("ONE", consistencyLevel.name());\r
- }\r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.onap.aaf.dao.DAOException;\r
-import org.powermock.api.mockito.PowerMockito;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_DAOException {\r
-DAOException daoException;\r
-\r
- //DAOException daoException = new DAOException();\r
- String message = "message";\r
- Throwable cause; \r
- @Before\r
- public void setUp(){\r
- daoException = new DAOException(); \r
- }\r
-\r
- @Test\r
- public void test(){\r
- assertTrue(true);\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-import java.io.File;\r
-import java.io.FileInputStream;\r
-import java.io.IOException;\r
-import java.io.InputStream;\r
-import java.net.URL;\r
-import java.security.NoSuchAlgorithmException;\r
-import java.util.Properties;\r
-\r
-import org.junit.After;\r
-import org.junit.AfterClass;\r
-import org.junit.Before;\r
-import org.junit.BeforeClass;\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.dao.CassAccess;\r
-import org.onap.aaf.dao.CassDAOImpl;\r
-\r
-import org.onap.aaf.cadi.Hash;\r
-import org.onap.aaf.cadi.Symm;\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.Trans.Metric;\r
-import com.datastax.driver.core.Cluster;\r
-\r
-import junit.framework.Assert;\r
-\r
-/**\r
- * Do Setup of Cassandra for Cassandra JUnit Testing\r
- * \r
- *\r
- */\r
-public class AbsJUCass {\r
- protected static final String AUTHZ = "authz";\r
- protected static Cluster cluster;\r
- protected static AuthzEnv env;\r
- protected static int iterations = 0;\r
- protected static float totals=0.0f;\r
- protected static float remote = 0.0f;\r
- protected static float json = 0.0f;\r
- protected static AuthzTrans trans;\r
- protected static boolean details = true;\r
- \r
- @BeforeClass \r
- public static void startup() throws APIException, IOException {\r
- synchronized(AUTHZ) {\r
- if(env==null) {\r
- final String resource = "cadi.properties";\r
- File f = new File("etc" + resource);\r
- InputStream is=null;\r
- Properties props = new Properties();\r
- try {\r
- if(f.exists()) {\r
- is = new FileInputStream(f);\r
- } else {\r
- URL rsrc = ClassLoader.getSystemResource(resource);\r
- is = rsrc.openStream();\r
- }\r
- props.load(is);\r
- } finally {\r
- if(is==null) {\r
- env= new AuthzEnv();\r
- Assert.fail(resource + " must exist in etc dir, or in Classpath");\r
- }\r
- is.close();\r
- }\r
- env = new AuthzEnv(props);\r
- }\r
- }\r
- cluster = CassAccess.cluster(env,"LOCAL");\r
-\r
- env.info().log("Connecting to Cluster");\r
- try {\r
- cluster.connect(AUTHZ);\r
- } catch(Exception e) {\r
- cluster=null;\r
- env.error().log(e);\r
- Assert.fail("Not able to connect to DB: " + e.getLocalizedMessage());\r
- }\r
- env.info().log("Connected");\r
- \r
- // Load special data here\r
- \r
- // WebPhone\r
- env.setProperty("java.naming.provider.url","ldap://ldap.webphone.att.com:389");\r
- env.setProperty("com.sun.jndi.ldap.connect.pool","true");\r
- \r
- iterations = 0;\r
- \r
- }\r
- \r
- @AfterClass\r
- public static void shutdown() {\r
- if(cluster!=null) {\r
- cluster.close();\r
- cluster = null;\r
- }\r
- }\r
-\r
- @Before\r
- public void newTrans() {\r
- trans = env.newTrans();\r
- \r
- trans.setProperty(CassDAOImpl.USER_NAME, System.getProperty("user.name"));\r
- }\r
- \r
- @After\r
- public void auditTrail() {\r
- if(totals==0) { // "updateTotals()" was not called... just do one Trans\r
- StringBuilder sb = new StringBuilder();\r
- Metric metric = trans.auditTrail(4, sb, Env.JSON, Env.REMOTE);\r
- if(details) {\r
- env.info().log(\r
- sb,\r
- "Total time:",\r
- totals += metric.total,\r
- "JSON time: ",\r
- metric.buckets[0],\r
- "REMOTE time: ",\r
- metric.buckets[1]\r
- );\r
- } else {\r
- totals += metric.total;\r
- }\r
- }\r
- }\r
- \r
- protected void updateTotals() {\r
- Metric metric = trans.auditTrail(0, null, Env.JSON, Env.REMOTE);\r
- totals+=metric.total;\r
- json +=metric.buckets[0];\r
- remote+=metric.buckets[1];\r
- }\r
-\r
-\r
- @AfterClass\r
- public static void print() {\r
- float transTime;\r
- if(iterations==0) {\r
- transTime=totals;\r
- } else {\r
- transTime=totals/iterations;\r
- }\r
- env.info().log(\r
- "Total time:",\r
- totals, \r
- "JSON time:",\r
- json,\r
- "REMOTE time:",\r
- remote,\r
- "Iterations:",\r
- iterations,\r
- "Transaction time:",\r
- transTime\r
- );\r
- }\r
- \r
- /**\r
- * Take a User/Pass and turn into an MD5 Hashed BasicAuth\r
- * \r
- * @param user\r
- * @param pass\r
- * @return\r
- * @throws IOException\r
- * @throws NoSuchAlgorithmException\r
- */\r
- public static byte[] userPassToBytes(String user, String pass)\r
- throws IOException, NoSuchAlgorithmException {\r
- // Take the form of BasicAuth, so as to allow any character in Password\r
- // (this is an issue in 1.0)\r
- // Also, it makes it quicker to evaluate Basic Auth direct questions\r
- String ba = Symm.base64url.encode(user + ':' + pass);\r
- // Take MD5 Hash, so that data in DB can't be reversed out.\r
- return Hash.encryptMD5(ba.getBytes());\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-\r
-import static org.junit.Assert.assertEquals;\r
-import static org.junit.Assert.assertNotSame;\r
-import static org.junit.Assert.assertTrue;\r
-\r
-import java.util.Date;\r
-import java.util.List;\r
-import java.util.UUID;\r
-\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.ApprovalDAO;\r
-import org.onap.aaf.dao.aaf.cass.ApprovalDAO.Data;\r
-\r
-public class JU_ApprovalDAO extends AbsJUCass {\r
- @Test\r
- public void testCRUD() throws Exception {\r
- ApprovalDAO rrDAO = new ApprovalDAO(trans, cluster, AUTHZ);\r
- ApprovalDAO.Data data = new ApprovalDAO.Data();\r
- \r
- data.ticket = UUID.randomUUID(); // normally, read from Future object\r
- data.user = "testid@test.com";\r
- data.approver = "mySuper@att.com";\r
- data.type = "supervisor";\r
- data.status = "pending";\r
- data.operation = "C";\r
- data.updated = new Date();\r
- \r
- try {\r
- // Test create\r
- rrDAO.create(trans, data);\r
- \r
- // Test Read by Ticket\r
- Result<List<ApprovalDAO.Data>> rlad;\r
- rlad = rrDAO.readByTicket(trans, data.ticket);\r
- assertTrue(rlad.isOK());\r
- assertEquals(1,rlad.value.size());\r
- compare(data,rlad.value.get(0));\r
- \r
- // Hold onto original ID for deletion, and read tests\r
- UUID id = rlad.value.get(0).id;\r
- \r
- try {\r
- // Test Read by User\r
- rlad = rrDAO.readByUser(trans, data.user);\r
- assertTrue(rlad.isOKhasData());\r
- boolean ok = false;\r
- for(ApprovalDAO.Data a : rlad.value) {\r
- if(a.id.equals(id)) {\r
- ok = true;\r
- compare(data,a);\r
- }\r
- }\r
- assertTrue(ok);\r
- \r
- // Test Read by Approver\r
- rlad = rrDAO.readByApprover(trans, data.approver);\r
- assertTrue(rlad.isOKhasData());\r
- ok = false;\r
- for(ApprovalDAO.Data a : rlad.value) {\r
- if(a.id.equals(id)) {\r
- ok = true;\r
- compare(data,a);\r
- }\r
- }\r
- assertTrue(ok);\r
- \r
- // Test Read by ID\r
- rlad = rrDAO.read(trans, id);\r
- assertTrue(rlad.isOKhasData());\r
- ok = false;\r
- for(ApprovalDAO.Data a : rlad.value) {\r
- if(a.id.equals(id)) {\r
- ok = true;\r
- compare(data,a);\r
- }\r
- }\r
- assertTrue(ok);\r
- \r
- // Test Update\r
- data.status = "approved";\r
- data.id = id;\r
- assertTrue(rrDAO.update(trans, data).isOK());\r
- \r
- rlad = rrDAO.read(trans, id);\r
- assertTrue(rlad.isOKhasData());\r
- ok = false;\r
- for(ApprovalDAO.Data a : rlad.value) {\r
- if(a.id.equals(id)) {\r
- ok = true;\r
- compare(data,a);\r
- }\r
- }\r
- assertTrue(ok);\r
-\r
- } finally {\r
- // Delete\r
- data.id = id;\r
- rrDAO.delete(trans, data, true);\r
- rlad = rrDAO.read(trans, id);\r
- assertTrue(rlad.isOK());\r
- assertTrue(rlad.isEmpty());\r
- }\r
- \r
- } finally {\r
- rrDAO.close(trans);\r
- }\r
- }\r
-\r
- private void compare(Data d1, Data d2) {\r
- assertNotSame(d1.id,d2.id);\r
- assertEquals(d1.ticket,d2.ticket);\r
- assertEquals(d1.user,d2.user);\r
- assertEquals(d1.approver,d2.approver);\r
- assertEquals(d1.type,d2.type);\r
- assertEquals(d1.status,d2.status);\r
- assertEquals(d1.operation,d2.operation);\r
- assertNotSame(d1.updated,d2.updated);\r
- }\r
-\r
- \r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-import static org.junit.Assert.assertEquals;\r
-import static org.junit.Assert.assertTrue;\r
-\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.security.NoSuchAlgorithmException;\r
-import java.util.Date;\r
-import java.util.List;\r
-\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.ArtiDAO;\r
-import org.onap.aaf.dao.aaf.cass.ArtiDAO.Data;\r
-\r
-/**\r
- * UserDAO unit test.\r
- * User: tp007s\r
- * Date: 7/19/13\r
- */\r
-public class JU_ArtiDAO extends AbsJUCass {\r
- @Test\r
- public void test() throws IOException, NoSuchAlgorithmException {\r
- ArtiDAO adao = new ArtiDAO(trans,cluster,"authz");\r
- try {\r
- // Create\r
- ArtiDAO.Data data = new ArtiDAO.Data();\r
- data.mechid="m55555@perturbed.att.com";\r
- data.machine="perturbed1232.att.com";\r
- data.type(false).add("file");\r
- data.type(false).add("jks");\r
- data.sponsor="Fred Flintstone";\r
- data.ca="devl";\r
- data.dir="/opt/app/aft/keys";\r
- data.appName="kumquat";\r
- data.os_user="aft";\r
- data.notify="email:myname@bogus.email.com";\r
- data.expires=new Date();\r
- \r
-// Bytification\r
- ByteBuffer bb = data.bytify();\r
- Data bdata = new ArtiDAO.Data();\r
- bdata.reconstitute(bb);\r
- checkData1(data, bdata);\r
- \r
- \r
-// DB work\r
- adao.create(trans,data);\r
- try {\r
- // Validate Read with key fields in Data\r
- Result<List<ArtiDAO.Data>> rlcd = adao.read(trans,data);\r
- assertTrue(rlcd.isOKhasData());\r
- for(ArtiDAO.Data d : rlcd.value) {\r
- checkData1(data,d);\r
- }\r
- \r
- // Validate Read with key fields in Data\r
- rlcd = adao.read(trans,data.mechid, data.machine);\r
- assertTrue(rlcd.isOKhasData());\r
- for(ArtiDAO.Data d : rlcd.value) {\r
- checkData1(data,d);\r
- }\r
- \r
- // By Machine\r
- rlcd = adao.readByMachine(trans,data.machine);\r
- assertTrue(rlcd.isOKhasData());\r
- for(ArtiDAO.Data d : rlcd.value) {\r
- checkData1(data,d);\r
- }\r
- \r
- // By MechID\r
- rlcd = adao.readByMechID(trans,data.mechid);\r
- assertTrue(rlcd.isOKhasData());\r
- for(ArtiDAO.Data d : rlcd.value) {\r
- checkData1(data,d);\r
- }\r
- \r
- // Update\r
- data.sponsor = "Wilma Flintstone";\r
- adao.update(trans,data);\r
- rlcd = adao.read(trans,data);\r
- assertTrue(rlcd.isOKhasData());\r
- for(ArtiDAO.Data d : rlcd.value) {\r
- checkData1(data,d);\r
- } \r
-\r
- } finally {\r
- // Always delete data, even if failure.\r
- adao.delete(trans,data, true);\r
- }\r
- } finally {\r
- adao.close(trans);\r
- }\r
-\r
- \r
- }\r
-\r
- private void checkData1(Data data, Data d) {\r
- assertEquals(data.mechid,d.mechid);\r
- assertEquals(data.machine,d.machine);\r
- assertEquals(data.type(false).size(),d.type(false).size());\r
- for(String s: data.type(false)) {\r
- assertTrue(d.type(false).contains(s));\r
- }\r
- assertEquals(data.sponsor,d.sponsor);\r
- assertEquals(data.ca,d.ca);\r
- assertEquals(data.dir,d.dir);\r
- assertEquals(data.appName,d.appName);\r
- assertEquals(data.os_user,d.os_user);\r
- assertEquals(data.notify,d.notify);\r
- assertEquals(data.expires,d.expires);\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-import static org.junit.Assert.assertEquals;\r
-import static org.junit.Assert.assertTrue;\r
-\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.util.Date;\r
-\r
-import org.junit.Test;\r
-import org.onap.aaf.dao.aaf.cass.CredDAO;\r
-import org.onap.aaf.dao.aaf.cass.NsDAO;\r
-import org.onap.aaf.dao.aaf.cass.NsType;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.UserRoleDAO;\r
-\r
-public class JU_Bytification {\r
-\r
- @Test\r
- public void testNS() throws IOException {\r
- \r
- // Normal\r
- NsDAO.Data ns = new NsDAO.Data();\r
- ns.name = "com.att.<pass>";\r
- ns.type = NsType.APP.type;\r
-\r
- ByteBuffer bb = ns.bytify();\r
- \r
- NsDAO.Data nsr = new NsDAO.Data();\r
- nsr.reconstitute(bb);\r
- check(ns,nsr);\r
- \r
- // Empty admin\r
-// ns.admin(true).clear();\r
- bb = ns.bytify();\r
- nsr = new NsDAO.Data();\r
- nsr.reconstitute(bb);\r
- check(ns,nsr);\r
- \r
- // Empty responsible\r
-// ns.responsible(true).clear();\r
- bb = ns.bytify();\r
- nsr = new NsDAO.Data();\r
- nsr.reconstitute(bb);\r
- check(ns,nsr);\r
-\r
- bb = ns.bytify();\r
- nsr = new NsDAO.Data();\r
- nsr.reconstitute(bb);\r
- check(ns,nsr);\r
- }\r
- \r
- private void check(NsDAO.Data a, NsDAO.Data b) {\r
- assertEquals(a.name,b.name);\r
- assertEquals(a.type,b.type);\r
-// assertEquals(a.admin.size(),b.admin.size());\r
- \r
-// for(String s: a.admin) {\r
-// assertTrue(b.admin.contains(s));\r
-// }\r
-// \r
-// assertEquals(a.responsible.size(),b.responsible.size());\r
-// for(String s: a.responsible) {\r
-// assertTrue(b.responsible.contains(s));\r
-// }\r
- }\r
-\r
- @Test\r
- public void testRole() throws IOException {\r
- RoleDAO.Data rd1 = new RoleDAO.Data();\r
- rd1.ns = "com.att.<pass>";\r
- rd1.name = "my.role";\r
- rd1.perms(true).add("com.att.<pass>.my.Perm|myInstance|myAction");\r
- rd1.perms(true).add("com.att.<pass>.my.Perm|myInstance|myAction2");\r
-\r
- // Normal\r
- ByteBuffer bb = rd1.bytify();\r
- RoleDAO.Data rd2 = new RoleDAO.Data();\r
- rd2.reconstitute(bb);\r
- check(rd1,rd2);\r
- \r
- // Overshoot Buffer\r
- StringBuilder sb = new StringBuilder(300);\r
- sb.append("role|instance|veryLongAction...");\r
- for(int i=0;i<280;++i) {\r
- sb.append('a');\r
- }\r
- rd1.perms(true).add(sb.toString());\r
- bb = rd1.bytify();\r
- rd2 = new RoleDAO.Data();\r
- rd2.reconstitute(bb);\r
- check(rd1,rd2);\r
- \r
- // No Perms\r
- rd1.perms.clear();\r
- \r
- bb = rd1.bytify();\r
- rd2 = new RoleDAO.Data();\r
- rd2.reconstitute(bb);\r
- check(rd1,rd2);\r
- \r
- // 1000 Perms\r
- for(int i=0;i<1000;++i) {\r
- rd1.perms(true).add("com|inst|action"+ i);\r
- }\r
-\r
- bb = rd1.bytify();\r
- rd2 = new RoleDAO.Data();\r
- rd2.reconstitute(bb);\r
- check(rd1,rd2);\r
-\r
- }\r
- \r
- private void check(RoleDAO.Data a, RoleDAO.Data b) {\r
- assertEquals(a.ns,b.ns);\r
- assertEquals(a.name,b.name);\r
- \r
- assertEquals(a.perms.size(),b.perms.size());\r
- for(String s: a.perms) {\r
- assertTrue(b.perms.contains(s));\r
- }\r
- }\r
-\r
- @Test\r
- public void testPerm() throws IOException {\r
- PermDAO.Data pd1 = new PermDAO.Data();\r
- pd1.ns = "com.att.<pass>";\r
- pd1.type = "my.perm";\r
- pd1.instance = "instance";\r
- pd1.action = "read";\r
- pd1.roles(true).add("com.att.<pass>.my.Role");\r
- pd1.roles(true).add("com.att.<pass>.my.Role2");\r
-\r
- // Normal\r
- ByteBuffer bb = pd1.bytify();\r
- PermDAO.Data rd2 = new PermDAO.Data();\r
- rd2.reconstitute(bb);\r
- check(pd1,rd2);\r
- \r
- // No Perms\r
- pd1.roles.clear();\r
- \r
- bb = pd1.bytify();\r
- rd2 = new PermDAO.Data();\r
- rd2.reconstitute(bb);\r
- check(pd1,rd2);\r
- \r
- // 1000 Perms\r
- for(int i=0;i<1000;++i) {\r
- pd1.roles(true).add("com.att.<pass>.my.Role"+ i);\r
- }\r
-\r
- bb = pd1.bytify();\r
- rd2 = new PermDAO.Data();\r
- rd2.reconstitute(bb);\r
- check(pd1,rd2);\r
-\r
- }\r
- \r
- private void check(PermDAO.Data a, PermDAO.Data b) {\r
- assertEquals(a.ns,b.ns);\r
- assertEquals(a.type,b.type);\r
- assertEquals(a.instance,b.instance);\r
- assertEquals(a.action,b.action);\r
- \r
- assertEquals(a.roles.size(),b.roles.size());\r
- for(String s: a.roles) {\r
- assertTrue(b.roles.contains(s));\r
- }\r
- }\r
-\r
- @Test\r
- public void testUserRole() throws IOException {\r
- UserRoleDAO.Data urd1 = new UserRoleDAO.Data();\r
- urd1.user = "myname@abc.att.com";\r
- urd1.role("com.att.<pass>","my.role");\r
- urd1.expires = new Date();\r
-\r
- // Normal\r
- ByteBuffer bb = urd1.bytify();\r
- UserRoleDAO.Data urd2 = new UserRoleDAO.Data();\r
- urd2.reconstitute(bb);\r
- check(urd1,urd2);\r
- \r
- // A null\r
- urd1.expires = null; \r
- urd1.role = null;\r
- \r
- bb = urd1.bytify();\r
- urd2 = new UserRoleDAO.Data();\r
- urd2.reconstitute(bb);\r
- check(urd1,urd2);\r
- }\r
-\r
- private void check(UserRoleDAO.Data a, UserRoleDAO.Data b) {\r
- assertEquals(a.user,b.user);\r
- assertEquals(a.role,b.role);\r
- assertEquals(a.expires,b.expires);\r
- }\r
-\r
- \r
- @Test\r
- public void testCred() throws IOException {\r
- CredDAO.Data cd = new CredDAO.Data();\r
- cd.id = "m55555@abc.att.com";\r
- cd.ns = "com.att.abc";\r
- cd.type = 2;\r
- cd.cred = ByteBuffer.wrap(new byte[]{1,34,5,3,25,0,2,5,3,4});\r
- cd.expires = new Date();\r
-\r
- // Normal\r
- ByteBuffer bb = cd.bytify();\r
- CredDAO.Data cd2 = new CredDAO.Data();\r
- cd2.reconstitute(bb);\r
- check(cd,cd2);\r
- \r
- // nulls\r
- cd.expires = null;\r
- cd.cred = null;\r
- \r
- bb = cd.bytify();\r
- cd2 = new CredDAO.Data();\r
- cd2.reconstitute(bb);\r
- check(cd,cd2);\r
-\r
- }\r
-\r
- private void check(CredDAO.Data a, CredDAO.Data b) {\r
- assertEquals(a.id,b.id);\r
- assertEquals(a.ns,b.ns);\r
- assertEquals(a.type,b.type);\r
- if(a.cred==null) {\r
- assertEquals(a.cred,b.cred); \r
- } else {\r
- int l = a.cred.limit();\r
- assertEquals(l,b.cred.limit());\r
- for (int i=0;i<l;++i) {\r
- assertEquals(a.cred.get(),b.cred.get());\r
- }\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-import java.io.IOException;\r
-import java.util.Date;\r
-\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.CIDAO;\r
-import org.onap.aaf.dao.DAOException;\r
-import org.onap.aaf.dao.aaf.cass.CacheInfoDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.util.Chrono;\r
-\r
-import junit.framework.Assert;\r
-\r
-\r
-public class JU_CacheInfoDAO extends AbsJUCass {\r
-\r
- @Test\r
- public void test() throws DAOException, APIException, IOException {\r
- CIDAO<AuthzTrans> id = new CacheInfoDAO(trans, cluster, AUTHZ);\r
- Date date = new Date();\r
- \r
- id.touch(trans, RoleDAO.TABLE,1);\r
- try {\r
- Thread.sleep(3000);\r
- } catch (InterruptedException e) {\r
- }\r
- Result<Void> rid = id.check(trans);\r
- Assert.assertEquals(rid.status,Status.OK);\r
- Date[] dates = CacheInfoDAO.info.get(RoleDAO.TABLE);\r
- if(dates.length>0 && dates[1]!=null) {\r
- System.out.println(Chrono.dateStamp(dates[1]));\r
- System.out.println(Chrono.dateStamp(date));\r
- Assert.assertTrue(Math.abs(dates[1].getTime() - date.getTime())<20000); // allow for 4 seconds, given Remote DB\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-import static org.junit.Assert.assertEquals;\r
-import static org.junit.Assert.assertTrue;\r
-\r
-import java.io.IOException;\r
-import java.math.BigInteger;\r
-import java.nio.ByteBuffer;\r
-import java.security.NoSuchAlgorithmException;\r
-import java.util.List;\r
-\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.CertDAO;\r
-import org.onap.aaf.dao.aaf.cass.CertDAO.Data;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-\r
-/**\r
- * UserDAO unit test.\r
- * User: tp007s\r
- * Date: 7/19/13\r
- */\r
-public class JU_CertDAO extends AbsJUCass {\r
- @Test\r
- public void test() throws IOException, NoSuchAlgorithmException, APIException {\r
- CertDAO cdao = new CertDAO(trans,cluster,"authz");\r
- try {\r
- // Create\r
- CertDAO.Data data = new CertDAO.Data();\r
- data.serial=new BigInteger("11839383");\r
- data.id = "m55555@tguard.att.com";\r
- data.x500="CN=ju_cert.dao.att.com, OU=AAF, O=\"ATT Services, Inc.\", L=Southfield, ST=Michigan, C=US";\r
- data.x509="I'm a cert";\r
- data.ca = "aaf";\r
- cdao.create(trans,data);\r
-\r
-// Bytification\r
- ByteBuffer bb = data.bytify();\r
- Data bdata = new CertDAO.Data();\r
- bdata.reconstitute(bb);\r
- checkData1(data, bdata);\r
-\r
- // Validate Read with key fields in Data\r
- Result<List<CertDAO.Data>> rlcd = cdao.read(trans,data);\r
- assertTrue(rlcd.isOKhasData());\r
- for(CertDAO.Data d : rlcd.value) {\r
- checkData1(data,d);\r
- }\r
-\r
- // Validate Read with key fields in Data\r
- rlcd = cdao.read(trans,data.ca,data.serial);\r
- assertTrue(rlcd.isOKhasData());\r
- for(CertDAO.Data d : rlcd.value) {\r
- checkData1(data,d);\r
- }\r
-\r
- // Update\r
- data.id = "m66666.tguard.att.com";\r
- cdao.update(trans,data);\r
- rlcd = cdao.read(trans,data);\r
- assertTrue(rlcd.isOKhasData());\r
- for(CertDAO.Data d : rlcd.value) {\r
- checkData1(data,d);\r
- } \r
- \r
- cdao.delete(trans,data, true);\r
- } finally {\r
- cdao.close(trans);\r
- }\r
-\r
- \r
- }\r
-\r
- private void checkData1(Data data, Data d) {\r
- assertEquals(data.ca,d.ca);\r
- assertEquals(data.serial,d.serial);\r
- assertEquals(data.id,d.id);\r
- assertEquals(data.x500,d.x500);\r
- assertEquals(data.x509,d.x509);\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-import static org.junit.Assert.assertEquals;\r
-import static org.junit.Assert.assertTrue;\r
-\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.security.NoSuchAlgorithmException;\r
-import java.util.Date;\r
-import java.util.List;\r
-\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.CredDAO;\r
-import org.onap.aaf.dao.aaf.cass.CredDAO.Data;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-\r
-/**\r
- * UserDAO unit test.\r
- * User: tp007s\r
- * Date: 7/19/13\r
- */\r
-public class JU_CredDAO extends AbsJUCass {\r
- @Test\r
- public void test() throws IOException, NoSuchAlgorithmException, APIException {\r
- CredDAO udao = new CredDAO(trans,cluster,"authz");\r
- try {\r
- // Create\r
- CredDAO.Data data = new CredDAO.Data();\r
- data.id = "m55555@aaf.att.com";\r
- data.type = CredDAO.BASIC_AUTH;\r
- data.notes = "temp pass";\r
- data.cred = ByteBuffer.wrap(userPassToBytes("m55555","mypass"));\r
- data.other = 12;\r
- data.expires = new Date(System.currentTimeMillis() + 60000*60*24*90);\r
- udao.create(trans,data);\r
- \r
-// Bytification\r
- ByteBuffer bb = data.bytify();\r
- Data bdata = new CredDAO.Data();\r
- bdata.reconstitute(bb);\r
- checkData1(data, bdata);\r
-\r
- // Validate Read with key fields in Data\r
- Result<List<CredDAO.Data>> rlcd = udao.read(trans,data);\r
- assertTrue(rlcd.isOKhasData());\r
- for(CredDAO.Data d : rlcd.value) {\r
- checkData1(data,d);\r
- }\r
- \r
- // Update\r
- data.cred = ByteBuffer.wrap(userPassToBytes("m55555","mynewpass"));\r
- udao.update(trans,data);\r
- rlcd = udao.read(trans,data);\r
- assertTrue(rlcd.isOKhasData());\r
- for(CredDAO.Data d : rlcd.value) {\r
- checkData1(data,d);\r
- } \r
- \r
- udao.delete(trans,data, true);\r
- } finally {\r
- udao.close(trans);\r
- }\r
-\r
- \r
- }\r
-\r
- private void checkData1(Data data, Data d) {\r
- assertEquals(data.id,d.id);\r
- assertEquals(data.type,d.type);\r
- assertEquals(data.ns,d.ns);\r
- assertEquals(data.notes,d.notes);\r
- assertEquals(data.cred,d.cred);\r
- assertEquals(data.other,d.other);\r
- assertEquals(data.expires,d.expires);\r
- }\r
-\r
-// private String CONST_myName = "MyName";\r
-// public static final java.nio.ByteBuffer CONST_MY_CRED = get_CONST_MY_CRED();\r
-// public static final int CONST_CRED_TYPE = 11;\r
-//\r
-// public static final Date CONST_UPDATE_DATE = new Date(System.currentTimeMillis()+60000*24);\r
-// @Test\r
-// public void test() {\r
-// UserDAO ud = new UserDAO(trans, cluster,"authz");\r
-// try {\r
-// UserDAO.Data data = createPrototypeUserData();\r
-// ud.create(trans, data);\r
-//\r
-// // Validate Read with key fields in Data\r
-// for(UserDAO.Data d : ud.read(trans, data)) {\r
-// checkData1(data,d);\r
-// }\r
-//\r
-// // Validate readByName\r
-// for(UserDAO.Data d : ud.read(trans, CONST_myName)) {\r
-// checkData1(data,d);\r
-// }\r
-//\r
-// ud.delete(trans, data);\r
-// List<UserDAO.Data> d_2 = ud.read(trans, CONST_myName);\r
-//\r
-// // Validate that data was deleted\r
-// assertEquals("User should not be found after deleted", 0, d_2.size() );\r
-//\r
-// data = new UserDAO.Data();\r
-// data.name = CONST_myName;\r
-// data.cred = CONST_MY_CRED;\r
-// data.cred_type= CONST_CRED_TYPE;\r
-// data.expires = new Date(System.currentTimeMillis()+60000*24);\r
-// final Result<UserDAO.Data> user = ud.r_create(trans, data);\r
-// assertEquals("ud.createUser should work", Result.Status.OK, user.status);\r
-//\r
-// checkDataIgnoreDateDiff(data, user.value);\r
-//\r
-// // finally leave system in consistent state by deleting user again\r
-// ud.delete(trans,data);\r
-//\r
-// } catch (DAOException e) {\r
-// e.printStackTrace();\r
-// fail("Fail due to Exception");\r
-// } finally {\r
-// ud.close(trans);\r
-// }\r
-// }\r
-//\r
-// private UserDAO.Data createPrototypeUserData() {\r
-// UserDAO.Data data = new UserDAO.Data();\r
-// data.name = CONST_myName;\r
-//\r
-// data.cred_type = CONST_CRED_TYPE;\r
-// data.cred = CONST_MY_CRED;\r
-// data.expires = CONST_UPDATE_DATE;\r
-// return data;\r
-// }\r
-//\r
-// // @Test\r
-// // public void testReadByUser() throws Exception {\r
-// // // this test was done above in our super test, since it uses the same setup\r
-// // }\r
-//\r
-// @Test\r
-// public void testFunctionCreateUser() throws Exception {\r
-// String name = "roger_rabbit";\r
-// Integer credType = CONST_CRED_TYPE;\r
-// java.nio.ByteBuffer cred = CONST_MY_CRED;\r
-// final UserDAO ud = new UserDAO(trans, cluster,"authz");\r
-// final UserDAO.Data data = createPrototypeUserData();\r
-// Result<UserDAO.Data> ret = ud.r_create(trans, data);\r
-// Result<List<Data>> byUserNameLookup = ud.r_read(trans, name);\r
-// \r
-// assertEquals("sanity test w/ different username (different than other test cases) failed", name, byUserNameLookup.value.get(0).name);\r
-// assertEquals("delete roger_rabbit failed", true, ud.delete(trans, byUserNameLookup.value.get(0)));\r
-// }\r
-//\r
-// @Test\r
-// public void testLowLevelCassandraCreateData_Given_UserAlreadyPresent_ShouldPass() throws Exception {\r
-// UserDAO ud = new UserDAO(trans, cluster,"authz");\r
-//\r
-// final UserDAO.Data data = createPrototypeUserData();\r
-// final UserDAO.Data data1 = ud.create(trans, data);\r
-// final UserDAO.Data data2 = ud.create(trans, data);\r
-//\r
-// assertNotNull(data1);\r
-// assertNotNull(data2);\r
-//\r
-// assertEquals(CONST_myName, data1.name);\r
-// assertEquals(CONST_myName, data2.name);\r
-// }\r
-//\r
-// @Test\r
-// public void testCreateUser_Given_UserAlreadyPresent_ShouldFail() throws Exception {\r
-// UserDAO ud = new UserDAO(trans, cluster,"authz");\r
-//\r
-// final UserDAO.Data data = createPrototypeUserData();\r
-//\r
-// // make sure that some prev test did not leave the user in the DB\r
-// ud.delete(trans, data);\r
-//\r
-// // attempt to create same user twice !!!\r
-// \r
-// final Result<UserDAO.Data> data1 = ud.r_create(trans, data);\r
-// final Result<UserDAO.Data> data2 = ud.r_create(trans, data);\r
-//\r
-// assertNotNull(data1);\r
-// assertNotNull(data2);\r
-//\r
-// assertEquals(true, Result.Status.OK == data1.status);\r
-// assertEquals(false, Result.Status.OK == data2.status);\r
-// }\r
-//\r
-// private void checkData1(UserDAO.Data data, UserDAO.Data d) {\r
-// data.name = CONST_myName;\r
-//\r
-// data.cred_type = CONST_CRED_TYPE;\r
-// data.cred = CONST_MY_CRED;\r
-// data.expires = CONST_UPDATE_DATE;\r
-//\r
-// assertEquals(data.name, d.name);\r
-// assertEquals(data.cred_type, d.cred_type);\r
-// assertEquals(data.cred, d.cred);\r
-// assertEquals(data.expires, d.expires);\r
-//\r
-// }\r
-//\r
-// private void checkDataIgnoreDateDiff(UserDAO.Data data, UserDAO.Data d) {\r
-// data.name = CONST_myName;\r
-//\r
-// data.cred_type = CONST_CRED_TYPE;\r
-// data.cred = CONST_MY_CRED;\r
-// data.expires = CONST_UPDATE_DATE;\r
-//\r
-// assertEquals(data.name, d.name);\r
-// assertEquals(data.cred_type, d.cred_type);\r
-// assertEquals(data.cred, d.cred);\r
-// // we allow dates to be different, e.g. high level calls e.g. createUser sets the date itself.\r
-// //assertEquals(data.updated, d.updated);\r
-//\r
-// }\r
-//\r
-// /**\r
-// * Get a CONST_MY_CRED ByteBuffer, which is the java type for a cass blob.\r
-// * @return\r
-// */\r
-// private static java.nio.ByteBuffer get_CONST_MY_CRED() {\r
-// return ByteBuffer.wrap("Hello".getBytes());\r
-// }\r
-//\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-\r
-import static org.junit.Assert.assertEquals;\r
-import static org.junit.Assert.assertTrue;\r
-\r
-import java.nio.ByteBuffer;\r
-import java.util.Date;\r
-import java.util.List;\r
-\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.DelegateDAO;\r
-import org.onap.aaf.dao.aaf.cass.DelegateDAO.Data;\r
-\r
-\r
-public class JU_DelegateDAO extends AbsJUCass {\r
- @Test\r
- public void testCRUD() throws Exception {\r
- DelegateDAO dao = new DelegateDAO(trans, cluster, AUTHZ);\r
- DelegateDAO.Data data = new DelegateDAO.Data();\r
- data.user = "myname";\r
- data.delegate = "yourname";\r
- data.expires = new Date();\r
- \r
-// Bytification\r
- ByteBuffer bb = data.bytify();\r
- Data bdata = new DelegateDAO.Data();\r
- bdata.reconstitute(bb);\r
- compare(data, bdata);\r
-\r
- try {\r
- // Test create\r
- Result<Data> ddcr = dao.create(trans,data);\r
- assertTrue(ddcr.isOK());\r
- \r
- \r
- // Read by User\r
- Result<List<DelegateDAO.Data>> records = dao.read(trans,data.user);\r
- assertTrue(records.isOKhasData());\r
- for(DelegateDAO.Data rdata : records.value) \r
- compare(data,rdata);\r
-\r
- // Read by Delegate\r
- records = dao.readByDelegate(trans,data.delegate);\r
- assertTrue(records.isOKhasData());\r
- for(DelegateDAO.Data rdata : records.value) \r
- compare(data,rdata);\r
- \r
- // Update\r
- data.delegate = "hisname";\r
- data.expires = new Date();\r
- assertTrue(dao.update(trans, data).isOK());\r
-\r
- // Read by User\r
- records = dao.read(trans,data.user);\r
- assertTrue(records.isOKhasData());\r
- for(DelegateDAO.Data rdata : records.value) \r
- compare(data,rdata);\r
-\r
- // Read by Delegate\r
- records = dao.readByDelegate(trans,data.delegate);\r
- assertTrue(records.isOKhasData());\r
- for(DelegateDAO.Data rdata : records.value) \r
- compare(data,rdata);\r
-\r
- // Test delete\r
- dao.delete(trans,data, true);\r
- records = dao.read(trans,data.user);\r
- assertTrue(records.isEmpty());\r
- \r
- \r
- } finally {\r
- dao.close(trans);\r
- }\r
- }\r
- \r
- private void compare(Data d1, Data d2) {\r
- assertEquals(d1.user, d2.user);\r
- assertEquals(d1.delegate, d2.delegate);\r
- assertEquals(d1.expires,d2.expires);\r
- }\r
-\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-import static org.junit.Assert.assertEquals;\r
-import static org.junit.Assert.assertTrue;\r
-\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.security.NoSuchAlgorithmException;\r
-import java.util.Date;\r
-import java.util.List;\r
-\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.CredDAO;\r
-import org.onap.aaf.dao.aaf.cass.CredDAO.Data;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-\r
-public class JU_FastCalling extends AbsJUCass {\r
-\r
- @Test\r
- public void test() throws IOException, NoSuchAlgorithmException, APIException {\r
- trans.setProperty("cassandra.writeConsistency.cred","ONE");\r
- \r
- CredDAO udao = new CredDAO(env.newTransNoAvg(),cluster,"authz");\r
- System.out.println("Starting calls");\r
- for(iterations=0;iterations<8;++iterations) {\r
- try {\r
- // Create\r
- CredDAO.Data data = new CredDAO.Data();\r
- data.id = "m55555@aaf.att.com";\r
- data.type = CredDAO.BASIC_AUTH;\r
- data.cred = ByteBuffer.wrap(userPassToBytes("m55555","mypass"));\r
- data.expires = new Date(System.currentTimeMillis() + 60000*60*24*90);\r
- udao.create(trans,data);\r
- \r
- // Validate Read with key fields in Data\r
- Result<List<CredDAO.Data>> rlcd = udao.read(trans,data);\r
- assertTrue(rlcd.isOKhasData());\r
- for(CredDAO.Data d : rlcd.value) {\r
- checkData1(data,d);\r
- }\r
- \r
- // Update\r
- data.cred = ByteBuffer.wrap(userPassToBytes("m55555","mynewpass"));\r
- udao.update(trans,data);\r
- rlcd = udao.read(trans,data);\r
- assertTrue(rlcd.isOKhasData());\r
- for(CredDAO.Data d : rlcd.value) {\r
- checkData1(data,d);\r
- } \r
- \r
- udao.delete(trans,data, true);\r
- } finally {\r
- updateTotals();\r
- newTrans();\r
- }\r
- }\r
-\r
- }\r
-\r
- private void checkData1(Data data, Data d) {\r
- assertEquals(data.id,d.id);\r
- assertEquals(data.type,d.type);\r
- assertEquals(data.cred,d.cred);\r
- assertEquals(data.expires,d.expires);\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-import static org.junit.Assert.assertEquals;\r
-import static org.junit.Assert.assertNotNull;\r
-import static org.junit.Assert.assertTrue;\r
-\r
-import java.nio.ByteBuffer;\r
-import java.util.List;\r
-import java.util.Random;\r
-\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.HistoryDAO;\r
-\r
-public class JU_HistoryDAO extends AbsJUCass {\r
- \r
- @Test\r
- public void testCreate() throws Exception {\r
- HistoryDAO historyDAO = new HistoryDAO(trans, cluster, AUTHZ);\r
- HistoryDAO.Data data = createHistoryData();\r
- \r
- try {\r
- historyDAO.create(trans,data); \r
- Thread.sleep(200);// History Create is Async\r
- Result<List<HistoryDAO.Data>> records = historyDAO.readByUser(trans,data.user,data.yr_mon);\r
- assertTrue(records.isOKhasData());\r
- for(HistoryDAO.Data d : records.value) {\r
- assertHistory(data, d);\r
- }\r
- } finally {\r
- historyDAO.close(trans);\r
- }\r
- }\r
- \r
- @Test\r
- public void tesReadByUser() throws Exception {\r
- HistoryDAO historyDAO = new HistoryDAO(trans,cluster, AUTHZ);\r
- HistoryDAO.Data data = createHistoryData();\r
- \r
- try {\r
- historyDAO.create(trans,data);\r
- Thread.sleep(200);// History Create is Async\r
- Result<List<HistoryDAO.Data>> records = historyDAO.readByUser(trans, data.user,data.yr_mon);\r
- assertTrue(records.isOKhasData());\r
- for(HistoryDAO.Data d : records.value) {\r
- assertHistory(data, d);\r
- }\r
- } finally {\r
- historyDAO.close(trans);\r
- }\r
- }\r
- \r
-/*\r
- @Test\r
- public void readByUserAndMonth() throws Exception {\r
- HistoryDAO historyDAO = new HistoryDAO(trans,cluster, AUTHZ);\r
- HistoryDAO.Data data = createHistoryData();\r
- \r
- try {\r
- historyDAO.create(trans,data); \r
- Thread.sleep(200);// History Create is Async\r
- Result<List<HistoryDAO.Data>> records = historyDAO.readByUserAndMonth(trans,\r
- data.user, Integer.valueOf(String.valueOf(data.yr_mon).substring(0, 4)),\r
- Integer.valueOf(String.valueOf(data.yr_mon).substring(4, 6)));\r
- assertTrue(records.isOKhasData());\r
- for(HistoryDAO.Data d : records.value) {\r
- assertHistory(data, d);\r
- }\r
- } finally {\r
- historyDAO.close(trans);\r
- }\r
- }\r
-*/ \r
- //TODO readadd this\r
-// @Test\r
-// public void readByUserAndDay() throws Exception {\r
-// HistoryDAO historyDAO = new HistoryDAO(trans, cluster, AUTHZ);\r
-// HistoryDAO.Data data = createHistoryData();\r
-// \r
-// try {\r
-// historyDAO.create(trans, data); \r
-// Thread.sleep(200);// History Create is Async\r
-// \r
-// String dayTime = String.valueOf(data.day_time);\r
-// String day = null;\r
-// if (dayTime.length() < 8)\r
-// day = dayTime.substring(0, 1);\r
-// else \r
-// day = dayTime.substring(0, 2);\r
-// \r
-// List<HistoryDAO.Data> records = historyDAO.readByUserBetweenDates(trans,\r
-// data.user, Integer.valueOf(String.valueOf(data.yr_mon).substring(0, 4)),\r
-// Integer.valueOf(String.valueOf(data.yr_mon).substring(4, 6)),\r
-// Integer.valueOf(day), 0);\r
-// assertEquals(1,records.size());\r
-// for(HistoryDAO.Data d : records) {\r
-// assertHistory(data, d);\r
-// }\r
-// } finally {\r
-// historyDAO.close(trans);\r
-// }\r
-// }\r
- private HistoryDAO.Data createHistoryData() {\r
- HistoryDAO.Data data = HistoryDAO.newInitedData();\r
- Random random = new Random();\r
- data.user = "test" + random.nextInt();\r
- data.action = "add";\r
- data.target = "history";\r
- data.memo = "adding a row into history table";\r
-// data.detail().put("id", "test");\r
-// data.detail().put("name", "test");\r
- //String temp = "Test Blob Message";\r
- data.reconstruct = ByteBuffer.wrap("Temp Blob Message".getBytes()); \r
- return data;\r
- }\r
- \r
- private void assertHistory(HistoryDAO.Data ip, HistoryDAO.Data op) {\r
- assertEquals(ip.yr_mon, op.yr_mon); \r
-// assertEquals(ip.day_time, op.day_time); \r
- assertEquals(ip.user, op.user); \r
- assertEquals(ip.action, op.action);\r
- assertEquals(ip.target, op.target);\r
- assertEquals(ip.memo, op.memo);\r
- //TODO : have to see if third party assert utility can be used\r
-// assertTrue(CollectionUtils.isEqualCollection(ip.detail, op.detail));\r
-// for (String key : ip.detail().keySet()) {\r
-// assertNotNull(op.detail().get(key));\r
-// }\r
- assertNotNull(op.reconstruct);\r
- }\r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-import static org.junit.Assert.assertEquals;\r
-import static org.junit.Assert.assertFalse;\r
-import static org.junit.Assert.assertTrue;\r
-\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.util.HashMap;\r
-import java.util.List;\r
-import java.util.Map;\r
-import java.util.Map.Entry;\r
-import java.util.Set;\r
-\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.NsDAO;\r
-import org.onap.aaf.dao.aaf.cass.NsType;\r
-import org.onap.aaf.dao.aaf.cass.NsDAO.Data;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-\r
-\r
-public class JU_NsDAO extends AbsJUCass {\r
- private static final String CRM = "ju_crm";\r
- private static final String SWM = "ju_swm";\r
-\r
- @Test\r
- public void test() throws APIException, IOException {\r
- NsDAO nsd = new NsDAO(trans, cluster, AUTHZ);\r
- try {\r
- final String nsparent = "com.test";\r
- final String ns1 = nsparent +".ju_ns";\r
- final String ns2 = nsparent + ".ju_ns2";\r
- \r
- Map<String,String> oAttribs = new HashMap<String,String>();\r
- oAttribs.put(SWM, "swm_data");\r
- oAttribs.put(CRM, "crm_data");\r
- Data data = new NsDAO.Data();\r
- data.name = ns1;\r
- data.type = NsType.APP.type;\r
- data.attrib(true).putAll(oAttribs);\r
- \r
-\r
- Result<List<Data>> rdrr;\r
-\r
- // CREATE\r
- Result<Data> rdc = nsd.create(trans, data);\r
- assertTrue(rdc.isOK());\r
- \r
- try {\r
-// Bytification\r
- ByteBuffer bb = data.bytify();\r
- Data bdata = new NsDAO.Data();\r
- bdata.reconstitute(bb);\r
- compare(data, bdata);\r
-\r
- // Test READ by Object\r
- rdrr = nsd.read(trans, data);\r
- assertTrue(rdrr.isOKhasData());\r
- assertEquals(rdrr.value.size(),1);\r
- Data d = rdrr.value.get(0);\r
- assertEquals(d.name,data.name);\r
- assertEquals(d.type,data.type);\r
- attribsEqual(d.attrib(false),data.attrib(false));\r
- attribsEqual(oAttribs,data.attrib(false));\r
- \r
- // Test Read by Key\r
- rdrr = nsd.read(trans, data.name);\r
- assertTrue(rdrr.isOKhasData());\r
- assertEquals(rdrr.value.size(),1);\r
- d = rdrr.value.get(0);\r
- assertEquals(d.name,data.name);\r
- assertEquals(d.type,data.type);\r
- attribsEqual(d.attrib(false),data.attrib(false));\r
- attribsEqual(oAttribs,data.attrib(false));\r
- \r
- // Read NS by Type\r
- Result<Set<String>> rtypes = nsd.readNsByAttrib(trans, SWM);\r
- Set<String> types;\r
- if(rtypes.notOK()) {\r
- throw new IOException(rtypes.errorString());\r
- } else {\r
- types = rtypes.value;\r
- }\r
- assertEquals(1,types.size());\r
- assertEquals(true,types.contains(ns1));\r
- \r
- // Add second NS to test list of data returned\r
- Data data2 = new NsDAO.Data();\r
- data2.name = ns2;\r
- data2.type = 3; // app\r
- Result<Data> rdc2 = nsd.create(trans, data2);\r
- assertTrue(rdc2.isOK());\r
- \r
- // Interrupt - test PARENT\r
- Result<List<Data>> rdchildren = nsd.getChildren(trans, "com.test");\r
- assertTrue(rdchildren.isOKhasData());\r
- boolean child1 = false;\r
- boolean child2 = false;\r
- for(Data dchild : rdchildren.value) {\r
- if(ns1.equals(dchild.name))child1=true;\r
- if(ns2.equals(dchild.name))child2=true;\r
- }\r
- assertTrue(child1);\r
- assertTrue(child2);\r
-\r
- // FINISH DATA 2 by deleting\r
- Result<Void> rddr = nsd.delete(trans, data2, true);\r
- assertTrue(rddr.isOK());\r
-\r
- // ADD DESCRIPTION\r
- String description = "This is my test Namespace";\r
- assertFalse(description.equalsIgnoreCase(data.description));\r
- \r
- Result<Void> addDesc = nsd.addDescription(trans, data.name, description);\r
- assertTrue(addDesc.isOK());\r
- rdrr = nsd.read(trans, data);\r
- assertTrue(rdrr.isOKhasData());\r
- assertEquals(rdrr.value.size(),1);\r
- assertEquals(rdrr.value.get(0).description,description);\r
- \r
- // UPDATE\r
- String newDescription = "zz1234 Owns This Namespace Now";\r
- oAttribs.put("mso", "mso_data");\r
- data.attrib(true).put("mso", "mso_data");\r
- data.description = newDescription;\r
- Result<Void> update = nsd.update(trans, data);\r
- assertTrue(update.isOK());\r
- rdrr = nsd.read(trans, data);\r
- assertTrue(rdrr.isOKhasData());\r
- assertEquals(rdrr.value.size(),1);\r
- assertEquals(rdrr.value.get(0).description,newDescription);\r
- attribsEqual(oAttribs, rdrr.value.get(0).attrib);\r
- \r
- \r
- } catch (IOException e) {\r
- e.printStackTrace();\r
- } finally {\r
- // DELETE\r
- Result<Void> rddr = nsd.delete(trans, data, true);\r
- assertTrue(rddr.isOK());\r
- rdrr = nsd.read(trans, data);\r
- assertTrue(rdrr.isOK() && rdrr.isEmpty());\r
- assertEquals(rdrr.value.size(),0);\r
- }\r
- } finally {\r
- nsd.close(trans);\r
- }\r
- }\r
-\r
- private void compare(NsDAO.Data d, NsDAO.Data data) {\r
- assertEquals(d.name,data.name);\r
- assertEquals(d.type,data.type);\r
- attribsEqual(d.attrib(false),data.attrib(false));\r
- attribsEqual(d.attrib(false),data.attrib(false));\r
- }\r
- \r
- private void attribsEqual(Map<String,String> aa, Map<String,String> ba) {\r
- assertEquals(aa.size(),ba.size());\r
- for(Entry<String, String> es : aa.entrySet()) {\r
- assertEquals(es.getValue(),ba.get(es.getKey()));\r
- }\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-import static org.junit.Assert.assertEquals;\r
-\r
-import org.junit.AfterClass;\r
-import org.junit.Test;\r
-import org.onap.aaf.dao.aaf.cass.NsType;\r
-\r
-public class JU_NsType {\r
-\r
- @AfterClass\r
- public static void tearDownAfterClass() throws Exception {\r
- }\r
-\r
- @Test\r
- public void test() {\r
- NsType nt,nt2;\r
- String[] tests = new String[] {"DOT","ROOT","COMPANY","APP","STACKED_APP","STACK"};\r
- for(String s : tests) {\r
- nt = NsType.valueOf(s);\r
- assertEquals(s,nt.name());\r
- \r
- nt2 = NsType.fromString(s);\r
- assertEquals(nt,nt2);\r
- \r
- int t = nt.type;\r
- nt2 = NsType.fromType(t);\r
- assertEquals(nt,nt2);\r
- }\r
- \r
- nt = NsType.fromType(Integer.MIN_VALUE);\r
- assertEquals(nt,NsType.UNKNOWN);\r
- nt = NsType.fromString("Garbage");\r
- assertEquals(nt,NsType.UNKNOWN);\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-import static junit.framework.Assert.assertEquals;\r
-import static junit.framework.Assert.assertTrue;\r
-\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.util.List;\r
-import java.util.Set;\r
-\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO.Data;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-\r
-/**\r
- * Test the PermissionDAO\r
- * \r
- * Utilize AbsJUCass to initialize and pre-load Cass\r
- * \r
- *\r
- */\r
-public class JU_PermDAO extends AbsJUCass{\r
-\r
- @Test\r
- public void test() throws APIException, IOException {\r
- PermDAO pd = new PermDAO(trans,cluster,"authz");\r
- try {\r
- PermDAO.Data data = new PermDAO.Data();\r
- data.ns = "com.test.ju_perm";\r
- data.type = "MyType";\r
- data.instance = "MyInstance";\r
- data.action = "MyAction";\r
- data.roles(true).add(data.ns + ".dev");\r
- \r
-\r
-\r
- // CREATE\r
- Result<Data> rpdc = pd.create(trans,data);\r
- assertTrue(rpdc.isOK());\r
-\r
- Result<List<PermDAO.Data>> rlpd;\r
- try {\r
-// Bytification\r
- ByteBuffer bb = data.bytify();\r
- Data bdata = new PermDAO.Data();\r
- bdata.reconstitute(bb);\r
- compare(data, bdata);\r
-\r
- // Validate Read with key fields in Data\r
- if((rlpd = pd.read(trans,data)).isOK())\r
- for(PermDAO.Data d : rlpd.value) {\r
- checkData1(data,d);\r
- }\r
- \r
- // Validate readByName\r
- if((rlpd = pd.readByType(trans,data.ns, data.type)).isOK())\r
- for(PermDAO.Data d : rlpd.value) {\r
- checkData1(data,d);\r
- }\r
- \r
- // Add Role\r
- RoleDAO.Data role = new RoleDAO.Data();\r
- role.ns = data.ns;\r
- role.name = "test";\r
- \r
- Result<Void> rvpd = pd.addRole(trans, data, role.fullName());\r
- assertTrue(rvpd.isOK());\r
- // Validate Read with key fields in Data\r
- if((rlpd = pd.read(trans,data)).isOK())\r
- for(PermDAO.Data d : rlpd.value) {\r
- checkData2(data,d);\r
- }\r
- \r
- // Remove Role\r
- rvpd = pd.delRole(trans, data, role.fullName());\r
- assertTrue(rvpd.isOK());\r
- if((rlpd = pd.read(trans,data)).isOK())\r
- for(PermDAO.Data d : rlpd.value) {\r
- checkData1(data,d);\r
- }\r
- \r
- // Add Child\r
- Data data2 = new Data();\r
- data2.ns = data.ns;\r
- data2.type = data.type + ".2";\r
- data2.instance = data.instance;\r
- data2.action = data.action;\r
- \r
- rpdc = pd.create(trans, data2);\r
- assertTrue(rpdc.isOK());\r
- try {\r
- rlpd = pd.readChildren(trans, data.ns,data.type);\r
- assertTrue(rlpd.isOKhasData());\r
- assertEquals(rlpd.value.size(),1);\r
- assertEquals(rlpd.value.get(0).fullType(),data2.fullType());\r
- } finally {\r
- // Delete Child\r
- pd.delete(trans, data2,true);\r
-\r
- }\r
- } catch (IOException e) {\r
- e.printStackTrace();\r
- } finally {\r
- // DELETE\r
- Result<Void> rpdd = pd.delete(trans,data,true);\r
- assertTrue(rpdd.isOK());\r
- rlpd = pd.read(trans, data);\r
- assertTrue(rlpd.isOK() && rlpd.isEmpty());\r
- assertEquals(rlpd.value.size(),0);\r
- }\r
- } finally {\r
- pd.close(trans);\r
- }\r
- }\r
-\r
- private void compare(Data a, Data b) {\r
- assertEquals(a.ns,b.ns);\r
- assertEquals(a.type,b.type);\r
- assertEquals(a.instance,b.instance);\r
- assertEquals(a.action,b.action);\r
- assertEquals(a.roles(false).size(),b.roles(false).size());\r
- for(String s: a.roles(false)) {\r
- assertTrue(b.roles(false).contains(s));\r
- }\r
- }\r
- private void checkData1(Data data, Data d) {\r
- assertEquals(data.ns,d.ns);\r
- assertEquals(data.type,d.type);\r
- assertEquals(data.instance,d.instance);\r
- assertEquals(data.action,d.action);\r
- \r
- Set<String> ss = d.roles(true);\r
- assertEquals(1,ss.size());\r
- assertTrue(ss.contains(data.ns+".dev"));\r
- }\r
- \r
- private void checkData2(Data data, Data d) {\r
- assertEquals(data.ns,d.ns);\r
- assertEquals(data.type,d.type);\r
- assertEquals(data.instance,d.instance);\r
- assertEquals(data.action,d.action);\r
- \r
- Set<String> ss = d.roles(true);\r
- assertEquals(2,ss.size());\r
- assertTrue(ss.contains(data.ns+".dev"));\r
- assertTrue(ss.contains(data.ns+".test"));\r
- }\r
-\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-import static junit.framework.Assert.assertEquals;\r
-import static junit.framework.Assert.assertTrue;\r
-\r
-import java.io.IOException;\r
-import java.nio.ByteBuffer;\r
-import java.util.List;\r
-\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO.Data;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-\r
-\r
-public class JU_RoleDAO extends AbsJUCass {\r
-\r
- @Test\r
- public void test() throws IOException, APIException {\r
- RoleDAO rd = new RoleDAO(trans, cluster, AUTHZ);\r
- try {\r
- Data data = new RoleDAO.Data();\r
- data.ns = "com.test.ju_role";\r
- data.name = "role1";\r
-\r
-// Bytification\r
- ByteBuffer bb = data.bytify();\r
- Data bdata = new RoleDAO.Data();\r
- bdata.reconstitute(bb);\r
- compare(data, bdata);\r
-\r
- // CREATE\r
- Result<Data> rdc = rd.create(trans, data);\r
- assertTrue(rdc.isOK());\r
- Result<List<Data>> rdrr;\r
- try {\r
- // READ\r
- rdrr = rd.read(trans, data);\r
- assertTrue(rdrr.isOKhasData());\r
- assertEquals(rdrr.value.size(),1);\r
- Data d = rdrr.value.get(0);\r
- assertEquals(d.perms.size(),0);\r
- assertEquals(d.name,data.name);\r
- assertEquals(d.ns,data.ns);\r
-\r
- PermDAO.Data perm = new PermDAO.Data();\r
- perm.ns = data.ns;\r
- perm.type = "Perm";\r
- perm.instance = "perm1";\r
- perm.action = "write";\r
- \r
- // ADD Perm\r
- Result<Void> rdar = rd.addPerm(trans, data, perm);\r
- assertTrue(rdar.isOK());\r
- rdrr = rd.read(trans, data);\r
- assertTrue(rdrr.isOKhasData());\r
- assertEquals(rdrr.value.size(),1);\r
- assertEquals(rdrr.value.get(0).perms.size(),1);\r
- assertTrue(rdrr.value.get(0).perms.contains(perm.encode()));\r
- \r
- // DEL Perm\r
- rdar = rd.delPerm(trans, data,perm);\r
- assertTrue(rdar.isOK());\r
- rdrr = rd.read(trans, data);\r
- assertTrue(rdrr.isOKhasData());\r
- assertEquals(rdrr.value.size(),1);\r
- assertEquals(rdrr.value.get(0).perms.size(),0);\r
-\r
- // Add Child\r
- Data data2 = new Data();\r
- data2.ns = data.ns;\r
- data2.name = data.name + ".2";\r
- \r
- rdc = rd.create(trans, data2);\r
- assertTrue(rdc.isOK());\r
- try {\r
- rdrr = rd.readChildren(trans, data.ns,data.name);\r
- assertTrue(rdrr.isOKhasData());\r
- assertEquals(rdrr.value.size(),1);\r
- assertEquals(rdrr.value.get(0).name,data.name + ".2");\r
- \r
- rdrr = rd.readChildren(trans, data.ns,"*");\r
- assertTrue(rdrr.isOKhasData());\r
- assertEquals(rdrr.value.size(),2);\r
-\r
- } finally {\r
- // Delete Child\r
- rd.delete(trans, data2, true);\r
- }\r
- \r
- } finally {\r
- // DELETE\r
- Result<Void> rddr = rd.delete(trans, data, true);\r
- assertTrue(rddr.isOK());\r
- rdrr = rd.read(trans, data);\r
- assertTrue(rdrr.isOK() && rdrr.isEmpty());\r
- assertEquals(rdrr.value.size(),0);\r
- }\r
- } finally {\r
- rd.close(trans);\r
- }\r
- }\r
-\r
- private void compare(Data a, Data b) {\r
- assertEquals(a.name,b.name);\r
- assertEquals(a.description, b.description);\r
- assertEquals(a.ns,b.ns);\r
- assertEquals(a.perms(false).size(),b.perms(false).size());\r
- for(String p : a.perms(false)) {\r
- assertTrue(b.perms(false).contains(p));\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.dao.aaf.test;\r
-\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-\r
-import com.datastax.driver.core.Cluster;\r
-import com.datastax.driver.core.ResultSet;\r
-import com.datastax.driver.core.Row;\r
-import com.datastax.driver.core.Session;\r
-\r
-public class NS_ChildUpdate {\r
-\r
- public static void main(String[] args) {\r
- if(args.length < 3 ) {\r
- System.out.println("usage: NS_ChildUpdate machine mechid (encrypted)passwd");\r
- } else {\r
- try {\r
- AuthzEnv env = new AuthzEnv();\r
- env.setLog4JNames("log.properties","authz","authz","audit","init","trace");\r
- \r
- Cluster cluster = Cluster.builder()\r
- .addContactPoint(args[0])\r
- .withCredentials(args[1],env.decrypt(args[2], false))\r
- .build();\r
- \r
- Session session = cluster.connect("authz");\r
- try {\r
- ResultSet result = session.execute("SELECT name,parent FROM ns");\r
- int count = 0;\r
- for(Row r : result.all()) {\r
- ++count;\r
- String name = r.getString(0);\r
- String parent = r.getString(1);\r
- if(parent==null) {\r
- int idx = name.lastIndexOf('.');\r
- \r
- parent = idx>0?name.substring(0, idx):".";\r
- System.out.println("UPDATE " + name + " to " + parent);\r
- session.execute("UPDATE ns SET parent='" + parent + "' WHERE name='" + name + "';");\r
- }\r
- }\r
- System.out.println("Processed " + count + " records");\r
- } finally {\r
- session.close();\r
- cluster.close();\r
- }\r
- } catch (Exception e) {\r
- e.printStackTrace();\r
- }\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-#-------------------------------------------------------------------------------\r
-# ============LICENSE_START====================================================\r
-# * org.onap.aaf\r
-# * ===========================================================================\r
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
-# * ===========================================================================\r
-# * Licensed under the Apache License, Version 2.0 (the "License");\r
-# * you may not use this file except in compliance with the License.\r
-# * You may obtain a copy of the License at\r
-# * \r
-# * http://www.apache.org/licenses/LICENSE-2.0\r
-# * \r
-# * Unless required by applicable law or agreed to in writing, software\r
-# * distributed under the License is distributed on an "AS IS" BASIS,\r
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
-# * See the License for the specific language governing permissions and\r
-# * limitations under the License.\r
-# * ============LICENSE_END====================================================\r
-# *\r
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
-# *\r
-#-------------------------------------------------------------------------------\r
-###############################################################################\r
-# Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.\r
-###############################################################################\r
-##\r
-## AUTHZ API (authz-service) Properties\r
-##\r
-\r
-cadi_prop_file=com.att.aaf.props;com.att.aaf.common.props\r
-\r
-#cadi_trust_all_x509=true\r
-#cadi_alias=aaf.att\r
-https.protocols=TLSv1.1,TLSv1.2\r
-\r
-cm_url=https://XXX:8150\r
-\r
-basic_realm=localized\r
-basic_warn=false\r
-localhost_deny=false\r
-\r
-cass_group_name=com.att.aaf\r
-cass_cluster_name=mithrilcsp.sbc.com\r
-aaf_default_realm=com.att.csp\r
-\r
-aaf_url=https://DME2RESOLVE/service=com.att.authz.AuthorizationService/version=2.0/envContext=DEV/routeOffer=BAU_SE\r
-aaf_id=???\r
-aaf_password=enc:XXX\r
-\r
-aaf_user_expires=3000\r
-aaf_clean_interval=4000\r
-\r
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>\r
-<!--\r
- ============LICENSE_START====================================================\r
- * org.onap.aai\r
- * ===========================================================================\r
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * Copyright © 2017 Amdocs\r
- * ===========================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- * \r
- * http://www.apache.org/licenses/LICENSE-2.0\r
- * \r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END====================================================\r
- *\r
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- *\r
--->\r
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\r
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">\r
- <modelVersion>4.0.0</modelVersion>\r
-\r
- \r
- <parent>\r
- <groupId>org.onap.aaf.authz</groupId>\r
- <artifactId>parent</artifactId>\r
- <version>1.0.1-SNAPSHOT</version>\r
- <relativePath>../pom.xml</relativePath>\r
- </parent>\r
- \r
- <!-- No Parent on Purpose!!! -->\r
- <artifactId>authz-client</artifactId>\r
- <name>Authz Client</name>\r
- <description>Client and XSD Generated code for Authz</description>\r
- <groupId>org.onap.aaf.authz</groupId>\r
- <version>1.0.1-SNAPSHOT</version>\r
- <packaging>jar</packaging>\r
- <url>https://github.com/att/AAF</url>\r
-\r
- <developers>\r
- <developer>\r
- <name>Jonathan Gathman</name>\r
- <email></email>\r
- <organization>ATT</organization>\r
- <organizationUrl></organizationUrl>\r
- </developer>\r
- </developers>\r
- \r
- <properties>\r
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\r
- <swm-distFiles-path>/opt/app/aft/${project.artifactId}/${project.version}</swm-distFiles-path>\r
- <maven.test.failure.ignore>true</maven.test.failure.ignore>\r
- <project.cadiVersion>1.0.0-SNAPSHOT</project.cadiVersion>\r
- <!-- SONAR -->\r
- <jacoco.version>0.7.7.201606060606</jacoco.version>\r
- <sonar.skip>true</sonar.skip>\r
- <sonar-jacoco-listeners.version>3.2</sonar-jacoco-listeners.version>\r
- <sonar.core.codeCoveragePlugin>jacoco</sonar.core.codeCoveragePlugin>\r
- <!-- Default Sonar configuration -->\r
- <sonar.jacoco.reportPath>target/code-coverage/jacoco-ut.exec</sonar.jacoco.reportPath>\r
- <sonar.jacoco.itReportPath>target/code-coverage/jacoco-it.exec</sonar.jacoco.itReportPath>\r
- <!-- Note: This list should match jacoco-maven-plugin's exclusion list below -->\r
- <sonar.exclusions>**/gen/**,**/generated-sources/**,**/yang-gen**,**/pax/**</sonar.exclusions>\r
- <nexusproxy>https://nexus.onap.org</nexusproxy>\r
- <snapshotNexusPath>/content/repositories/snapshots/</snapshotNexusPath>\r
- <releaseNexusPath>/content/repositories/releases/</releaseNexusPath>\r
- <stagingNexusPath>/content/repositories/staging/</stagingNexusPath>\r
- <sitePath>/content/sites/site/org/onap/aaf/authz/${project.artifactId}/${project.version}</sitePath>\r
- </properties>\r
- \r
- <dependencies>\r
- <dependency>\r
- <groupId>junit</groupId>\r
- <artifactId>junit</artifactId>\r
- <version>4.10</version>\r
- <scope>test</scope>\r
- </dependency>\r
- \r
- </dependencies>\r
-\r
- <build>\r
- <plugins>\r
- <plugin>\r
- <groupId>org.codehaus.mojo</groupId>\r
- <artifactId>jaxb2-maven-plugin</artifactId>\r
- <version>1.3</version>\r
- <executions>\r
- <execution>\r
- <phase>generate-sources</phase>\r
- <goals>\r
- <goal>xjc</goal>\r
- </goals>\r
- </execution>\r
- </executions>\r
- <configuration>\r
- <schemaDirectory>src/main/xsd</schemaDirectory>\r
- </configuration>\r
- </plugin>\r
-\r
- <!--This plugin's configuration is used to store Eclipse m2e settings \r
- only. It has no influence on the Maven build itself. -->\r
- <plugin>\r
- <groupId>org.eclipse.m2e</groupId>\r
- <artifactId>lifecycle-mapping</artifactId>\r
- <version>1.0.0</version>\r
- <configuration>\r
- <lifecycleMappingMetadata>\r
- <pluginExecutions>\r
- <pluginExecution>\r
- <pluginExecutionFilter>\r
- <groupId>\r
- org.codehaus.mojo\r
- </groupId>\r
- <artifactId>\r
- jaxb2-maven-plugin\r
- </artifactId>\r
- <versionRange>\r
- [1.3,)\r
- </versionRange>\r
- <goals>\r
- <goal>xjc</goal>\r
- </goals>\r
- </pluginExecutionFilter>\r
- <action>\r
- <ignore></ignore>\r
- </action>\r
- </pluginExecution>\r
- </pluginExecutions>\r
- </lifecycleMappingMetadata>\r
- </configuration>\r
- </plugin>\r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-compiler-plugin</artifactId>\r
- <version>2.3.2</version>\r
- <configuration>\r
- <source>1.6</source>\r
- <target>1.6</target>\r
- </configuration>\r
- </plugin>\r
- \r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-javadoc-plugin</artifactId>\r
- <version>2.10.4</version>\r
- <configuration>\r
- <failOnError>false</failOnError>\r
- </configuration>\r
- <executions>\r
- <execution>\r
- <id>attach-javadocs</id>\r
- <goals>\r
- <goal>jar</goal>\r
- </goals>\r
- </execution>\r
- </executions>\r
- </plugin> \r
- \r
- \r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-source-plugin</artifactId>\r
- <version>2.2.1</version>\r
- <executions>\r
- <execution>\r
- <id>attach-sources</id>\r
- <goals>\r
- <goal>jar-no-fork</goal>\r
- </goals>\r
- </execution>\r
- </executions>\r
- </plugin>\r
- \r
- <plugin>\r
- <groupId>org.sonatype.plugins</groupId>\r
- <artifactId>nexus-staging-maven-plugin</artifactId>\r
- <version>1.6.7</version>\r
- <extensions>true</extensions>\r
- <configuration>\r
- <nexusUrl>${nexusproxy}</nexusUrl>\r
- <stagingProfileId>176c31dfe190a</stagingProfileId>\r
- <serverId>ecomp-staging</serverId>\r
- </configuration>\r
- </plugin> \r
- <plugin>\r
- <groupId>org.jacoco</groupId>\r
- <artifactId>jacoco-maven-plugin</artifactId>\r
- <version>${jacoco.version}</version>\r
- <configuration>\r
- <excludes>\r
- <exclude>**/gen/**</exclude>\r
- <exclude>**/generated-sources/**</exclude>\r
- <exclude>**/yang-gen/**</exclude>\r
- <exclude>**/pax/**</exclude>\r
- </excludes>\r
- </configuration>\r
- <executions>\r
-\r
- <execution>\r
- <id>pre-unit-test</id>\r
- <goals>\r
- <goal>prepare-agent</goal>\r
- </goals>\r
- <configuration>\r
- <destFile>${project.build.directory}/code-coverage/jacoco-ut.exec</destFile>\r
- <propertyName>surefireArgLine</propertyName>\r
- </configuration>\r
- </execution>\r
- \r
- \r
- <execution>\r
- <id>post-unit-test</id>\r
- <phase>test</phase>\r
- <goals>\r
- <goal>report</goal>\r
- </goals>\r
- <configuration>\r
- <dataFile>${project.build.directory}/code-coverage/jacoco-ut.exec</dataFile>\r
- <outputDirectory>${project.reporting.outputDirectory}/jacoco-ut</outputDirectory>\r
- </configuration>\r
- </execution>\r
- <execution>\r
- <id>pre-integration-test</id>\r
- <phase>pre-integration-test</phase>\r
- <goals>\r
- <goal>prepare-agent</goal>\r
- </goals>\r
- <configuration>\r
- <destFile>${project.build.directory}/code-coverage/jacoco-it.exec</destFile>\r
-\r
- <propertyName>failsafeArgLine</propertyName>\r
- </configuration>\r
- </execution>\r
-\r
- \r
- <execution>\r
- <id>post-integration-test</id>\r
- <phase>post-integration-test</phase>\r
- <goals>\r
- <goal>report</goal>\r
- </goals>\r
- <configuration>\r
- <dataFile>${project.build.directory}/code-coverage/jacoco-it.exec</dataFile>\r
- <outputDirectory>${project.reporting.outputDirectory}/jacoco-it</outputDirectory>\r
- </configuration>\r
- </execution>\r
- </executions>\r
- </plugin> \r
-\r
- </plugins>\r
- </build>\r
- \r
- <distributionManagement>\r
- <repository>\r
- <id>ecomp-releases</id>\r
- <name>AAF Release Repository</name>\r
- <url>${nexusproxy}${releaseNexusPath}</url>\r
- </repository>\r
- <snapshotRepository>\r
- <id>ecomp-snapshots</id>\r
- <name>AAF Snapshot Repository</name>\r
- <url>${nexusproxy}${snapshotNexusPath}</url>\r
- </snapshotRepository>\r
- <site>\r
- <id>ecomp-site</id>\r
- <url>dav:${nexusproxy}${sitePath}</url>\r
- </site>\r
- </distributionManagement>\r
-\r
-</project>\r
-\r
+++ /dev/null
-<!-- Used by AAF (ATT inc 2013) -->
-<xs:schema
- xmlns:xs="http://www.w3.org/2001/XMLSchema"
- xmlns:aaf="urn:aaf:v2_0"
- targetNamespace="urn:aaf:v2_0"
- elementFormDefault="qualified">
-
-<!--
- Note: jan 22, 2015. Deprecating the "force" element in the "Request" Structure. Do that
- with Query Params.
-
- Eliminate in 3.0
- -->
-<!--
- Errors
- Note: This Error Structure has been made to conform to the AT&T TSS Policies
-
-
- -->
- <xs:element name="error">
- <xs:complexType>
- <xs:sequence>
- <!--
- Unique message identifier of the format ‘ABCnnnn’ where ‘ABC’ is
- either ‘SVC’ for Service Exceptions or ‘POL’ for Policy Exception.
- Exception numbers may be in the range of 0001 to 9999 where :
- * 0001 to 0199 are reserved for common exception messages
- * 0200 to 0999 are reserved for Parlay Web Services specification use
- * 1000-9999 are available for exceptions
- -->
- <xs:element name="messageId" type="xs:string" minOccurs="1" maxOccurs="1"/>
-
- <!--
- Message text, with replacement
- variables marked with %n, where n is
- an index into the list of <variables>
- elements, starting at 1
- -->
- <xs:element name="text" type="xs:string" minOccurs="1" maxOccurs="1"/>
-
- <!--
- List of zero or more strings that
- represent the contents of the variables
- used by the message text. -->
- <xs:element name="variables" type="xs:string" minOccurs="0" maxOccurs="unbounded" />
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-<!--
- Requests
- -->
- <xs:complexType name="Request">
- <xs:sequence>
- <xs:element name="start" type="xs:dateTime" minOccurs="1" maxOccurs="1" />
- <xs:element name="end" type="xs:date" minOccurs="1" maxOccurs="1"/>
- <!-- Deprecated. Use Query Command
- <xs:element name="force" type="xs:string" minOccurs="1" maxOccurs="1" default="false"/>
- -->
- </xs:sequence>
- </xs:complexType>
-
-<!--
- Keys
- -->
- <xs:element name="keys">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="key" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-
-<!--
- Permissions
--->
- <xs:complexType name = "pkey">
- <xs:sequence>
- <xs:element name="type" type="xs:string"/>
- <xs:element name="instance" type="xs:string"/>
- <xs:element name="action" type="xs:string"/>
- </xs:sequence>
- </xs:complexType>
-
- <xs:element name="permKey">
- <xs:complexType >
- <xs:complexContent>
- <xs:extension base="aaf:pkey" />
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="perm">
- <xs:complexType >
- <xs:complexContent>
- <xs:extension base="aaf:pkey">
- <xs:sequence>
- <xs:element name="roles" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <!-- Note: feb 23, 2015. Added description field. Verify backward compatibility. JR -->
- <xs:element name="description" type="xs:string" minOccurs="0" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="perms">
- <xs:complexType>
- <xs:sequence>
- <xs:element ref="aaf:perm" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="permRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:Request">
- <xs:sequence>
- <xs:element name="type" type="xs:string"/>
- <xs:element name="instance" type="xs:string"/>
- <xs:element name="action" type="xs:string"/>
- <!-- Note: feb 23, 2015. Added description field. Verify backward compatibility. JR -->
- <xs:element name="description" type="xs:string" minOccurs="0" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
-
-<!--
- Roles
--->
- <xs:complexType name="rkey">
- <xs:sequence>
- <xs:element name="name" type="xs:string"/>
- </xs:sequence>
- </xs:complexType>
-
- <xs:element name="roleKey">
- <xs:complexType >
- <xs:complexContent>
- <xs:extension base="aaf:rkey" />
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="role">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:rkey">
- <xs:sequence>
- <xs:element name="perms" type="aaf:pkey" minOccurs="0" maxOccurs="unbounded"/>
- <!-- Note: feb 23, 2015. Added description field. Verify backward compatibility. JR -->
- <xs:element name="description" type="xs:string" minOccurs="0" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="roles">
- <xs:complexType>
- <xs:sequence>
- <xs:element ref="aaf:role" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="roleRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:Request">
- <xs:sequence>
- <xs:element name="name" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <!-- Note: feb 23, 2015. Added description field. Verify backward compatibility. JR -->
- <xs:element name="description" type="xs:string" minOccurs="0" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <!-- Added userRole return types 9/16/2015 -->
- <xs:element name="userRole">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="user" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="role" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="expires" type="xs:date" minOccurs="1" maxOccurs="1" />
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <!-- Added userRoles return types 9/16/2015 -->
- <xs:element name="userRoles">
- <xs:complexType>
- <xs:sequence>
- <xs:element ref="aaf:userRole" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="userRoleRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:Request">
- <xs:sequence>
- <xs:element name="user" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="role" type="xs:string" minOccurs="1" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="rolePermRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:Request">
- <xs:sequence>
- <xs:element name="perm" type="aaf:pkey" minOccurs="1" maxOccurs="1"/>
- <xs:element name="role" type="xs:string" minOccurs="1" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
-
- <xs:element name="nsRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:Request">
- <xs:sequence>
- <xs:element name="name" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="admin" type="xs:string" minOccurs="1" maxOccurs="unbounded"/>
- <xs:element name="responsible" type="xs:string" minOccurs="1" maxOccurs="unbounded"/>
- <!-- Note: feb 23, 2015. Added description field. Verify backward compatibility. JR -->
- <xs:element name="description" type="xs:string" minOccurs="0" maxOccurs="1"/>
- <!-- Note: dec 11, 2015. Request-able NS Type JG -->
- <xs:element name="type" type="xs:string" minOccurs="0" maxOccurs="1"/>
-
- <!-- "scope" is deprecated and unused as of AAF 2.0.11. It will be removed in future versions
- -->
- <xs:element name="scope" type="xs:int" minOccurs="0" maxOccurs="1"/>
-
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name = "nss">
- <xs:complexType>
- <xs:sequence>
- <xs:element name = "ns" minOccurs="0" maxOccurs="unbounded">
- <xs:complexType>
- <xs:sequence>
- <xs:element name = "name" type = "xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name = "responsible" type = "xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <xs:element name = "admin" type = "xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <!-- Note: feb 23, 2015. Added description field. Verify backward compatibility. JR -->
- <xs:element name = "description" type = "xs:string" minOccurs="0" maxOccurs="1"/>
- <!-- Note: Dec 16, 2015. Added description field. Verify backward compatibility. JG -->
- <xs:element name = "attrib" minOccurs="0" maxOccurs="unbounded">
- <xs:complexType>
- <xs:sequence>
- <xs:element name = "key" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name = "value" type="xs:string" minOccurs="0" maxOccurs="1"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-<!--
- Users
--->
- <xs:element name="users">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="user" minOccurs="0" maxOccurs="unbounded">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="id" type="xs:string" minOccurs="1" maxOccurs="1" />
- <!-- Changed type to dateTime, because of importance of Certs -->
- <xs:element name="expires" type="xs:dateTime" minOccurs="1" maxOccurs="1" />
- <!-- need to differentiate User Cred Types, 5/20/2015
- This Return Object is shared by multiple functions:
- Type is not returned for "UserRole", but only "Cred"
- -->
- <xs:element name="type" type="xs:int" minOccurs="0" maxOccurs="1" />
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-<!--
- Certs
- Added 5/20/2015 to support identifying Certificate based Services
- -->
- <xs:element name="certs">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="cert" minOccurs="0" maxOccurs="unbounded">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="id" type="xs:string" minOccurs="1" maxOccurs="1" />
- <xs:element name="x500" type="xs:string" minOccurs="1" maxOccurs="1" />
- <xs:element name="expires" type="xs:dateTime" minOccurs="1" maxOccurs="1" />
- <xs:element name="fingerprint" type="xs:hexBinary" minOccurs="1" maxOccurs="1" />
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-<!--
- Credentials
--->
- <xs:element name="credRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:Request">
- <xs:sequence>
- <xs:element name="id" type="xs:string"/>
- <xs:element name="type" type="xs:int" minOccurs="0" maxOccurs="1"/>
- <xs:choice >
- <xs:element name="password" type="xs:string" />
- <xs:element name="entry" type="xs:string" />
- </xs:choice>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
-<!--
- History
- -->
- <xs:element name="history">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="item" minOccurs="0" maxOccurs="unbounded">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="YYYYMM" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="timestamp" type="xs:dateTime" minOccurs="1" maxOccurs="1"/>
- <xs:element name="subject" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="target" type = "xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="action" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="memo" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="user" type="xs:string" minOccurs="1" maxOccurs="1"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-<!--
- Approvals
- -->
- <xs:complexType name="approval">
- <xs:sequence>
- <!-- Note, id is set by system -->
- <xs:element name="id" type="xs:string" minOccurs="0" maxOccurs="1"/>
- <xs:element name="ticket" type="xs:string"/>
- <xs:element name="user" type="xs:string"/>
- <xs:element name="approver" type="xs:string"/>
- <xs:element name="type" type="xs:string"/>
- <xs:element name="memo" type="xs:string"/>
- <xs:element name="updated" type="xs:dateTime"/>
- <xs:element name="status">
- <xs:simpleType>
- <xs:restriction base="xs:string">
- <xs:enumeration value="approve"/>
- <xs:enumeration value="reject"/>
- <xs:enumeration value="pending"/>
- </xs:restriction>
- </xs:simpleType>
- </xs:element>
- <xs:element name="operation">
- <xs:simpleType>
- <xs:restriction base="xs:string">
- <xs:enumeration value="C"/>
- <xs:enumeration value="U"/>
- <xs:enumeration value="D"/>
- <xs:enumeration value="G"/>
- <xs:enumeration value="UG"/>
- </xs:restriction>
- </xs:simpleType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- <xs:element name="approvals">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="approvals" type="aaf:approval" minOccurs="1" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-<!--
- Delegates
--->
- <xs:complexType name="delg">
- <xs:sequence>
- <xs:element name="user" type="xs:string"/>
- <xs:element name="delegate" type="xs:string"/>
- <xs:element name="expires" type="xs:date"/>
- </xs:sequence>
- </xs:complexType>
-
- <xs:element name="delgRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:Request">
- <xs:sequence>
- <xs:element name="user" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="delegate" type="xs:string" minOccurs="1" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="delgs">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="delgs" type="aaf:delg" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <!-- jg 3/11/2015 New for 2.0.8 -->
- <xs:element name="api">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="route" minOccurs="0" maxOccurs="unbounded">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="meth" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="path" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="param" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <xs:element name="desc" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="comments" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <xs:element name="contentType" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <xs:element name="expected" type="xs:int" minOccurs="1" maxOccurs="1"/>
- <xs:element name="explicitErr" type="xs:int" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-</xs:schema>
+++ /dev/null
-<!-- Used by AAF (ATT inc 2016) -->
-<xs:schema
- xmlns:xs="http://www.w3.org/2001/XMLSchema"
- xmlns:certman="urn:certman:v1_0"
- targetNamespace="urn:certman:v1_0"
- elementFormDefault="qualified">
-
- <!-- jg 4/21/2016 New for Certificate Info -->
- <xs:element name="certInfo">
- <xs:complexType>
- <xs:sequence>
- <!-- Base64 Encoded Private Key -->
- <xs:element name="privatekey" type="xs:string" minOccurs="0" maxOccurs="1"/>
- <!-- Base64 Encoded Certificate -->
- <xs:element name="certs" type="xs:string" minOccurs="1" maxOccurs="unbounded"/>
- <!-- Challenge Password (2 method Auth) -->
- <xs:element name="challenge" type="xs:string" minOccurs="0" maxOccurs="1"/>
- <!-- Notes from Server concerning Cert (not an error) -->
- <xs:element name="notes" type="xs:string" minOccurs="0" maxOccurs="1"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <xs:complexType name="baseRequest">
- <xs:sequence>
- <xs:element name="mechid" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <!-- Sponsor is only required if the caller is not Sponsor. In that case, the calling ID must be delegated to do the work. -->
- <xs:element name="sponsor" type="xs:string" minOccurs="0" maxOccurs="1"/>
- <xs:element name="start" type="xs:dateTime" minOccurs="1" maxOccurs="1" />
- <xs:element name="end" type="xs:date" minOccurs="1" maxOccurs="1"/>
- </xs:sequence>
- </xs:complexType>
-
- <xs:complexType name="specificRequest">
- <xs:complexContent>
- <xs:extension base="certman:baseRequest">
- <xs:sequence>
- <xs:element name="serial" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <!-- Certificate has been compromised or other security issue -->
- <xs:element name="revoke" type="xs:boolean" minOccurs="0" maxOccurs="1" default="false"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
-
- <xs:element name="certificateRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="certman:baseRequest">
- <xs:sequence>
- <!-- One FQDN is required. Multiple driven by Policy -->
- <xs:element name="fqdns" type="xs:string" minOccurs="1" maxOccurs="unbounded"/>
- <!-- Optional Email for getting Public Certificate -->
- <xs:element name="email" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="certificateRenew">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="certman:specificRequest">
- <xs:sequence>
- <!-- One FQDN is required. Multiple driven by Policy -->
- <xs:element name="fqdns" type="xs:string" minOccurs="1" maxOccurs="unbounded"/>
- <!-- Challenge Password (for accessing manually) TODO Is it necessary? -->
- <xs:element name="challenge" type="xs:string" minOccurs="0" maxOccurs="1"/>
- <!-- Optional Email for getting Public Certificate -->
- <xs:element name="email" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="certificateDrop">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="certman:specificRequest">
- <xs:sequence>
- <!-- Challenge Password (for accessing manually) TODO Is it necessary? -->
- <xs:element name="challenge" type="xs:string" minOccurs="0" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <!-- Placement Structures -->
-
- <xs:element name="artifacts">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="artifact" minOccurs="0" maxOccurs="unbounded">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="mechid" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="machine" type="xs:string" minOccurs="0" maxOccurs="1" />
- <xs:element name="type" minOccurs="1" maxOccurs="3">
- <xs:simpleType>
- <xs:restriction base="xs:string">
- <xs:enumeration value="file"/>
- <xs:enumeration value="jks"/>
- <xs:enumeration value="print"/>
- </xs:restriction>
- </xs:simpleType>
- </xs:element>
- <xs:element name="ca" type="xs:string" minOccurs="1" maxOccurs="1" />
- <xs:element name="dir" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="os_user" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <!-- Ignored on input, and set by TABLES. However, returned on output -->
- <xs:element name="sponsor" type="xs:string" minOccurs="0" maxOccurs="1" />
- <!-- Optional... if empty, will use MechID Namespace -->
- <xs:element name="appName" type="xs:string" minOccurs="0" maxOccurs="1"/>
- <!-- Optional... if empty, will notify Sponsor -->
- <xs:element name="notification" type="xs:string" minOccurs="0" maxOccurs="1"/>
- <!-- Optional... Days before auto renewal. Min is 10. Max is 1/3 expiration (60) -->
- <xs:element name="renewDays" type="xs:int" minOccurs="0" maxOccurs="1" default="30"/>
-
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-
-
-</xs:schema>
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>\r
-<!--\r
- ============LICENSE_START====================================================\r
- * org.onap.aaf\r
- * ===========================================================================\r
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * ===========================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- * \r
- * http://www.apache.org/licenses/LICENSE-2.0\r
- * \r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END====================================================\r
- *\r
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- *\r
--->\r
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\r
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">\r
- <modelVersion>4.0.0</modelVersion>\r
- <parent>\r
- <groupId>org.onap.aaf.authz</groupId>\r
- <artifactId>parent</artifactId>\r
- <version>1.0.1-SNAPSHOT</version>\r
- <relativePath>../pom.xml</relativePath>\r
- </parent>\r
- \r
- <artifactId>authz-fs</artifactId>\r
- <name>Authz File Server</name>\r
- <description>Independent FileServer via HTTP (not S) for Public Files (i.e. CRLs)</description>\r
- <url>https://github.com/att/AAF</url>\r
-\r
- <developers>\r
- <developer>\r
- <name>Jonathan Gathman</name>\r
- <email></email>\r
- <organization>ATT</organization>\r
- <organizationUrl></organizationUrl>\r
- </developer>\r
- </developers>\r
-\r
- <properties>\r
- <maven.test.failure.ignore>true</maven.test.failure.ignore>\r
- <project.swmVersion>9</project.swmVersion>\r
- <project.cadiVersion>1.0.0-SNAPSHOT</project.cadiVersion>\r
- <project.innoVersion>1.0.0-SNAPSHOT</project.innoVersion>\r
- <sonar.language>java</sonar.language>\r
- <sonar.skip>true</sonar.skip>\r
- <sonar.java.coveragePlugin>jacoco</sonar.java.coveragePlugin>\r
- <sonar.surefire.reportsPath>${project.build.directory}/surefire-reports</sonar.surefire.reportsPath>\r
- <sonar.jacoco.reportPath>${project.build.directory}/coverage-reports/jacoco.exec</sonar.jacoco.reportPath>\r
- <sonar.jacoco.itReportPath>${project.build.directory}/coverage-reports/jacoco-it.exec</sonar.jacoco.itReportPath>\r
- <sonar.jacoco.reportMissing.force.zero>true</sonar.jacoco.reportMissing.force.zero>\r
- <sonar.projectVersion>${project.version}</sonar.projectVersion>\r
- <nexusproxy>https://nexus.onap.org</nexusproxy>\r
- <snapshotNexusPath>/content/repositories/snapshots/</snapshotNexusPath>\r
- <releaseNexusPath>/content/repositories/releases/</releaseNexusPath>\r
- <stagingNexusPath>/content/repositories/staging/</stagingNexusPath>\r
- <sitePath>/content/sites/site/org/onap/aaf/authz/${project.artifactId}/${project.version}</sitePath>\r
- </properties>\r
- \r
- \r
- <dependencies>\r
- <dependency>\r
- <groupId>org.onap.aaf.authz</groupId>\r
- <artifactId>authz-core</artifactId>\r
- <version>${project.version}</version>\r
- </dependency>\r
- <dependency> \r
- <groupId>org.onap.aaf.cadi</groupId>\r
- <artifactId>cadi-core</artifactId>\r
- <version>${project.cadiVersion}</version>\r
- </dependency>\r
- <dependency>\r
- <groupId>com.att.aft</groupId>\r
- <artifactId>dme2</artifactId>\r
- </dependency>\r
- </dependencies>\r
- \r
- <build>\r
- <plugins>\r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-jar-plugin</artifactId>\r
- <configuration>\r
- <includes>\r
- <include>**/*.class</include>\r
- </includes>\r
- </configuration>\r
- <version>2.3.1</version>\r
- </plugin>\r
- \r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-javadoc-plugin</artifactId>\r
- <version>2.10.4</version>\r
- <configuration>\r
- <failOnError>false</failOnError>\r
- </configuration>\r
- <executions>\r
- <execution>\r
- <id>attach-javadocs</id>\r
- <goals>\r
- <goal>jar</goal>\r
- </goals>\r
- </execution>\r
- </executions>\r
- </plugin> \r
- \r
- \r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-source-plugin</artifactId>\r
- <version>2.2.1</version>\r
- <executions>\r
- <execution>\r
- <id>attach-sources</id>\r
- <goals>\r
- <goal>jar-no-fork</goal>\r
- </goals>\r
- </execution>\r
- </executions>\r
- </plugin>\r
- <plugin>\r
- <groupId>org.sonatype.plugins</groupId>\r
- <artifactId>nexus-staging-maven-plugin</artifactId>\r
- <version>1.6.7</version>\r
- <extensions>true</extensions>\r
- <configuration>\r
- <nexusUrl>${nexusproxy}</nexusUrl>\r
- <stagingProfileId>176c31dfe190a</stagingProfileId>\r
- <serverId>ecomp-staging</serverId>\r
- </configuration>\r
- </plugin> \r
- <plugin>\r
- <groupId>org.jacoco</groupId>\r
- <artifactId>jacoco-maven-plugin</artifactId>\r
- <version>0.7.7.201606060606</version>\r
- <configuration>\r
- <dumpOnExit>true</dumpOnExit>\r
- <includes>\r
- <include>org.onap.aaf.*</include>\r
- </includes>\r
- </configuration>\r
- <executions>\r
- <execution>\r
- <id>pre-unit-test</id>\r
- <goals>\r
- <goal>prepare-agent</goal>\r
- </goals>\r
- <configuration>\r
- <destFile>${project.build.directory}/coverage-reports/jacoco.exec</destFile>\r
- <!-- <append>true</append> -->\r
- </configuration>\r
- </execution>\r
- <execution>\r
- <id>pre-integration-test</id>\r
- <phase>pre-integration-test</phase>\r
- <goals>\r
- <goal>prepare-agent</goal>\r
- </goals>\r
- <configuration>\r
- <destFile>${project.build.directory}/coverage-reports/jacoco-it.exec</destFile>\r
- <!-- <append>true</append> -->\r
- </configuration>\r
- </execution>\r
- <execution>\r
- <goals>\r
- <goal>merge</goal>\r
- </goals>\r
- <phase>post-integration-test</phase>\r
- <configuration>\r
- <fileSets>\r
- <fileSet implementation="org.apache.maven.shared.model.fileset.FileSet">\r
- <directory>${project.build.directory}/coverage-reports</directory>\r
- <includes>\r
- <include>*.exec</include>\r
- </includes>\r
- </fileSet>\r
- </fileSets>\r
- <destFile>${project.build.directory}/jacoco-dev.exec</destFile>\r
- </configuration>\r
- </execution>\r
- </executions>\r
- </plugin> \r
-\r
- </plugins>\r
-\r
- </build>\r
-<distributionManagement>\r
- <repository>\r
- <id>ecomp-releases</id>\r
- <name>AAF Release Repository</name>\r
- <url>${nexusproxy}${releaseNexusPath}</url>\r
- </repository>\r
- <snapshotRepository>\r
- <id>ecomp-snapshots</id>\r
- <name>AAF Snapshot Repository</name>\r
- <url>${nexusproxy}${snapshotNexusPath}</url>\r
- </snapshotRepository>\r
- <site>\r
- <id>ecomp-site</id>\r
- <url>dav:${nexusproxy}${sitePath}</url>\r
- </site>\r
- </distributionManagement>\r
-\r
-</project>\r
+++ /dev/null
-##
-## AUTHZ API (authz-service) Properties
-##
-
-hostname=_HOSTNAME_
-
-## DISCOVERY (DME2) Parameters on the Command Line
-AFT_LATITUDE=_AFT_LATITUDE_
-AFT_LONGITUDE=_AFT_LONGITUDE_
-AFT_ENVIRONMENT=_AFT_ENVIRONMENT_
-DEPLOYED_VERSION=_ARTIFACT_VERSION_
-
-DMEServiceName=service=com.att.authz.authz-fs/version=_MAJOR_VER_._MINOR_VER_._PATCH_VER_/envContext=_ENV_CONTEXT_/routeOffer=_ROUTE_OFFER_
-AFT_DME2_PORT_RANGE=_AUTHZ_FS_PORT_RANGE_
-AFT_DME2_SSL_ENABLE=false
-AFT_DME2_DISABLE_PERSISTENT_CACHE=true
-
-CFA_WebPath=_ROOT_DIR_/data
-CFA_ClearCommand=FmzYPpMY918MwE1hyacoiFSt
-CFA_MaxSize=2000000
\ No newline at end of file
+++ /dev/null
-#-------------------------------------------------------------------------------\r
-# ============LICENSE_START====================================================\r
-# * org.onap.aaf\r
-# * ===========================================================================\r
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
-# * ===========================================================================\r
-# * Licensed under the Apache License, Version 2.0 (the "License");\r
-# * you may not use this file except in compliance with the License.\r
-# * You may obtain a copy of the License at\r
-# * \r
-# * http://www.apache.org/licenses/LICENSE-2.0\r
-# * \r
-# * Unless required by applicable law or agreed to in writing, software\r
-# * distributed under the License is distributed on an "AS IS" BASIS,\r
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
-# * See the License for the specific language governing permissions and\r
-# * limitations under the License.\r
-# * ============LICENSE_END====================================================\r
-# *\r
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
-# *\r
-#-------------------------------------------------------------------------------\r
-###############################################################################\r
-# Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.\r
-###############################################################################\r
-#\r
-# Licensed to the Apache Software Foundation (ASF) under one\r
-# or more contributor license agreements. See the NOTICE file\r
-# distributed with this work for additional information\r
-# regarding copyright ownership. The ASF licenses this file\r
-# to you under the Apache License, Version 2.0 (the\r
-# "License"); you may not use this file except in compliance\r
-# with the License. You may obtain a copy of the License at\r
-#\r
-# http://www.apache.org/licenses/LICENSE-2.0\r
-#\r
-# Unless required by applicable law or agreed to in writing,\r
-# software distributed under the License is distributed on an\r
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\r
-# KIND, either express or implied. See the License for the\r
-# specific language governing permissions and limitations\r
-# under the License.\r
-#\r
-log4j.appender.INIT=org.apache.log4j.DailyRollingFileAppender \r
-log4j.appender.INIT.File=_LOG_DIR_/${LOG4J_FILENAME_init}\r
-log4j.appender.INIT.DatePattern='.'yyyy-MM-dd\r
-#log4j.appender.INIT.MaxFileSize=_MAX_LOG_FILE_SIZE_\r
-#log4j.appender.INIT.MaxBackupIndex=_MAX_LOG_FILE_BACKUP_COUNT_\r
-log4j.appender.INIT.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.INIT.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSSZ} %m %n\r
-\r
-\r
-log4j.appender.FS=org.apache.log4j.DailyRollingFileAppender \r
-log4j.appender.FS.File=logs/${LOG4J_FILENAME_authz}\r
-log4j.appender.FS.DatePattern='.'yyyy-MM-dd\r
-#log4j.appender.FS.MaxFileSize=_MAX_LOG_FILE_SIZE_\r
-#log4j.appender.FS.MaxBackupIndex=_MAX_LOG_FILE_BACKUP_COUNT_\r
-log4j.appender.FS.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.FS.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSSZ} %p [%c] %m %n\r
-\r
-log4j.appender.AUDIT=org.apache.log4j.DailyRollingFileAppender\r
-log4j.appender.AUDIT.File=_LOG_DIR_/${LOG4J_FILENAME_audit}\r
-log4j.appender.AUDIT.DatePattern='.'yyyy-MM-dd\r
-#log4j.appender.AUDIT.MaxFileSize=_MAX_LOG_FILE_SIZE_\r
-#log4j.appender.AUDIT.MaxBackupIndex=_MAX_LOG_FILE_BACKUP_COUNT_\r
-log4j.appender.AUDIT.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.AUDIT.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSSZ} %m %n\r
-\r
-log4j.appender.TRACE=org.apache.log4j.DailyRollingFileAppender\r
-log4j.appender.TRACE.File=logs/${LOG4J_FILENAME_trace}\r
-log4j.appender.TRACE.DatePattern='.'yyyy-MM-dd\r
-#log4j.appender.TRACE.MaxFileSize=_MAX_LOG_FILE_SIZE_\r
-#log4j.appender.TRACE.MaxBackupIndex=_MAX_LOG_FILE_BACKUP_COUNT_\r
-log4j.appender.TRACE.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.TRACE.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSSZ} %m %n\r
-\r
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender\r
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\r
-log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] %m %n\r
-\r
-# General Apache libraries\r
-log4j.rootLogger=WARN\r
-log4j.logger.org.apache=WARN,INIT\r
-log4j.logger.dme2=WARN,INIT\r
-log4j.logger.init=INFO,INIT\r
-log4j.logger.authz=_LOG4J_LEVEL_,FS\r
-log4j.logger.audit=INFO,AUDIT\r
-log4j.logger.trace=TRACE,TRACE\r
-\r
-\r
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\r
-<!--\r
- ============LICENSE_START====================================================\r
- * org.onap.aaf\r
- * ===========================================================================\r
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * ===========================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- * \r
- * http://www.apache.org/licenses/LICENSE-2.0\r
- * \r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END====================================================\r
- *\r
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- *\r
--->\r
-<ns2:ManagedResourceList xmlns:ns2="http://scld.att.com/lrm/util" xmlns="http://scld.att.com/lrm/commontypes" xmlns:ns3="http://scld.att.com/lrm/types">\r
- <ns2:ManagedResource>\r
- <ResourceDescriptor>\r
- <ResourceName>com.att.authz._ARTIFACT_ID_</ResourceName>\r
- <ResourceVersion>\r
- <Major>_MAJOR_VER_</Major>\r
- <Minor>_MINOR_VER_</Minor>\r
- <Patch>_PATCH_VER_</Patch> \r
- </ResourceVersion>\r
- <RouteOffer>_ROUTE_OFFER_</RouteOffer>\r
- </ResourceDescriptor>\r
- <ResourceType>Java</ResourceType>\r
- <ResourcePath>com.att.authz.fs.FileServer</ResourcePath>\r
- <ResourceProps>\r
- <Tag>process.workdir</Tag>\r
- <Value>_ROOT_DIR_</Value>\r
- </ResourceProps> \r
- <ResourceProps>\r
- <Tag>jvm.version</Tag>\r
- <Value>1.8</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>jvm.args</Tag>\r
- <Value>-DAFT_LATITUDE=_AFT_LATITUDE_ -DAFT_LONGITUDE=_AFT_LONGITUDE_ -DAFT_ENVIRONMENT=_AFT_ENVIRONMENT_ -Dplatform=_SCLD_PLATFORM_ -Dcom.sun.jndi.ldap.connect.pool.maxsize=20 -Dcom.sun.jndi.ldap.connect.pool.prefsize=10 -Dcom.sun.jndi.ldap.connect.pool.timeout=3000 </Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>jvm.classpath</Tag>\r
- <Value>_ROOT_DIR_/etc:_ROOT_DIR_/lib/*:</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>jvm.heap.min</Tag>\r
- <Value>1024m</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>jvm.heap.max</Tag>\r
- <Value>2048m</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>start.class</Tag>\r
- <Value>com.att.authz.fs.FileServer</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>stdout.redirect</Tag>\r
- <Value>_ROOT_DIR_/logs/SystemOut.log</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>stderr.redirect</Tag>\r
- <Value>_ROOT_DIR_/logs/SystemErr.log</Value>\r
- </ResourceProps>\r
- <ResourceOSID>aft</ResourceOSID>\r
- <ResourceStartType>AUTO</ResourceStartType>\r
- <ResourceStartPriority>2</ResourceStartPriority>\r
- <ResourceMinCount>_RESOURCE_MIN_COUNT_</ResourceMinCount>\r
- <ResourceMaxCount>_RESOURCE_MAX_COUNT_</ResourceMaxCount> \r
- <ResourceRegistration>_RESOURCE_REGISTRATION_</ResourceRegistration>\r
- <ResourceSWMComponent>com.att.authz:_ARTIFACT_ID_</ResourceSWMComponent>\r
- <ResourceSWMComponentVersion>_ARTIFACT_VERSION_</ResourceSWMComponentVersion>\r
- </ns2:ManagedResource>\r
-</ns2:ManagedResourceList>\r
+++ /dev/null
-#-------------------------------------------------------------------------------\r
-# ============LICENSE_START====================================================\r
-# * org.onap.aaf\r
-# * ===========================================================================\r
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
-# * ===========================================================================\r
-# * Licensed under the Apache License, Version 2.0 (the "License");\r
-# * you may not use this file except in compliance with the License.\r
-# * You may obtain a copy of the License at\r
-# * \r
-# * http://www.apache.org/licenses/LICENSE-2.0\r
-# * \r
-# * Unless required by applicable law or agreed to in writing, software\r
-# * distributed under the License is distributed on an "AS IS" BASIS,\r
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
-# * See the License for the specific language governing permissions and\r
-# * limitations under the License.\r
-# * ============LICENSE_END====================================================\r
-# *\r
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
-# *\r
-#-------------------------------------------------------------------------------\r
-<html>\r
- <head> <!-- begin head -->\r
- <meta charset="utf-8">\r
- <title>AT&T Authentication/Authorization Tool</title>\r
- <!-- \r
- <link rel="stylesheet" href="_AUTHZ_GUI_URL_/theme/aaf5.css">\r
- <script type="text/javascript" src="_AUTHZ_GUI_URL_/theme/comm.js"></script>\r
- <script type="text/javascript" src="_AUTHZ_GUI_URL_/theme/console.js"></script>\r
- <script type="text/javascript" src="_AUTHZ_GUI_URL_/theme/common.js"></script>\r
- <link rel="stylesheet" href="_AUTHZ_GUI_URL_/theme/aaf5Desktop.css">\r
- -->\r
- </head> <!-- end head -->\r
- <body> <!-- begin body -->\r
- <header> <!-- begin header -->\r
- <h1>AT&T Auth Tool on _ENV_CONTEXT_</h1>\r
- <p id="version">AAF Version: _ARTIFACT_VERSION_</p>\r
- </header>\r
- <h1>Success for File Server Access</h1>\r
- </body>\r
-</html>\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.fs;\r
-\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.GET;\r
-\r
-import java.io.IOException;\r
-import java.io.InputStream;\r
-import java.net.URL;\r
-import java.util.ArrayList;\r
-import java.util.EnumSet;\r
-import java.util.List;\r
-import java.util.Properties;\r
-\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.env.AuthzTransOnlyFilter;\r
-import org.onap.aaf.cssa.rserv.CachingFileAccess;\r
-import org.onap.aaf.cssa.rserv.RServlet;\r
-\r
-import com.att.aft.dme2.api.DME2Manager;\r
-import com.att.aft.dme2.api.DME2Server;\r
-import com.att.aft.dme2.api.DME2ServerProperties;\r
-import com.att.aft.dme2.api.DME2ServiceHolder;\r
-import com.att.aft.dme2.api.util.DME2FilterHolder;\r
-import com.att.aft.dme2.api.util.DME2FilterHolder.RequestDispatcherType;\r
-import com.att.aft.dme2.api.util.DME2ServletHolder;\r
-import org.onap.aaf.inno.env.APIException;\r
-\r
-\r
-public class FileServer extends RServlet<AuthzTrans> {\r
- public FileServer(final AuthzEnv env) throws APIException, IOException {\r
- try {\r
- /////////////////////// \r
- // File Server \r
- ///////////////////////\r
- \r
- CachingFileAccess<AuthzTrans> cfa = new CachingFileAccess<AuthzTrans>(env);\r
- route(env,GET,"/:key", cfa); \r
- route(env,GET,"/:key/:cmd", cfa); \r
- ///////////////////////\r
- \r
- \r
- } catch (Exception e) {\r
- e.printStackTrace();\r
- }\r
- }\r
- \r
- public static void main(String[] args) {\r
- try {\r
- // Load Properties from authFramework.properties. Needed for DME2 and AuthzEnv\r
- Properties props = new Properties();\r
- URL rsrc = ClassLoader.getSystemResource("FileServer.props");\r
- if(rsrc==null) {\r
- System.err.println("Folder containing FileServer.props must be on Classpath");\r
- System.exit(1);\r
- }\r
- InputStream is = rsrc.openStream();\r
- try {\r
- props.load(is);\r
- } finally {\r
- is.close();\r
- }\r
- \r
- // Load Properties into AuthzEnv\r
- AuthzEnv env = new AuthzEnv(props); \r
- env.setLog4JNames("log4j.properties","authz","fs","audit","init",null);\r
- \r
- // AFT Discovery Libraries only read System Props\r
- env.loadToSystemPropsStartsWith("AFT_","DME2_");\r
- env.init().log("DME2 using " + env.getProperty("DMEServiceName","unknown") + " URI");\r
- \r
- // Start DME2 (DME2 needs Properties form of props)\r
- DME2Manager dme2 = new DME2Manager("RServDME2Manager",props);\r
- \r
- DME2ServiceHolder svcHolder;\r
- List<DME2ServletHolder> slist = new ArrayList<DME2ServletHolder>();\r
- svcHolder = new DME2ServiceHolder();\r
- String serviceName = env.getProperty("DMEServiceName",null);\r
- if(serviceName!=null) {\r
- svcHolder.setServiceURI(serviceName);\r
- svcHolder.setManager(dme2);\r
- svcHolder.setContext("/");\r
- \r
- FileServer fs = new FileServer(env);\r
- DME2ServletHolder srvHolder = new DME2ServletHolder(fs);\r
- srvHolder.setContextPath("/*");\r
- slist.add(srvHolder);\r
- \r
- EnumSet<RequestDispatcherType> edlist = EnumSet.of(\r
- RequestDispatcherType.REQUEST,\r
- RequestDispatcherType.FORWARD,\r
- RequestDispatcherType.ASYNC\r
- );\r
-\r
- ///////////////////////\r
- // Apply Filters\r
- ///////////////////////\r
- List<DME2FilterHolder> flist = new ArrayList<DME2FilterHolder>();\r
- \r
- // Need TransFilter\r
- flist.add(new DME2FilterHolder(new AuthzTransOnlyFilter(env),"/*",edlist));\r
- svcHolder.setFilters(flist);\r
- svcHolder.setServletHolders(slist);\r
- \r
- DME2Server dme2svr = dme2.getServer();\r
- DME2ServerProperties dsprops = dme2svr.getServerProperties();\r
- dsprops.setGracefulShutdownTimeMs(1000);\r
-\r
- env.init().log("Starting AAF FileServer with Jetty/DME2 server...");\r
- dme2svr.start();\r
- try {\r
-// if(env.getProperty("NO_REGISTER",null)!=null)\r
- dme2.bindService(svcHolder);\r
- env.init().log("DME2 is available as HTTP"+(dsprops.isSslEnable()?"/S":""),"on port:",dsprops.getPort());\r
-\r
- while(true) { // Per DME2 Examples...\r
- Thread.sleep(5000);\r
- }\r
- } catch(InterruptedException e) {\r
- env.init().log("AAF Jetty Server interrupted!");\r
- } catch(Exception e) { // Error binding service doesn't seem to stop DME2 or Process\r
- env.init().log(e,"DME2 Initialization Error");\r
- dme2svr.stop();\r
- System.exit(1);\r
- }\r
- } else {\r
- env.init().log("Properties must contain DMEServiceName");\r
- }\r
-\r
- } catch (Exception e) {\r
- e.printStackTrace(System.err);\r
- System.exit(1);\r
- }\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.fs;\r
-\r
-import static org.junit.Assert.*;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.GET;\r
-\r
-import java.io.File;\r
-import java.io.IOException;\r
-import java.net.URL;\r
-import java.util.Properties;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.InjectMocks;\r
-import org.mockito.Matchers;\r
-import org.mockito.Mock;\r
-import org.mockito.Mockito;\r
-import org.mockito.runners.MockitoJUnitRunner;\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.fs.*;\r
-import org.onap.aaf.cssa.rserv.CachingFileAccess;\r
-import org.powermock.api.mockito.PowerMockito;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-\r
-@RunWith(MockitoJUnitRunner.class)\r
-public class JU_FileServer { \r
- @Mock\r
- AuthzEnv authzEnvMock;\r
- AuthzEnv authzEnv = new AuthzEnv();\r
- \r
- @Before\r
- public void setUp() throws APIException, IOException{\r
-\r
- }\r
- \r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testMain() throws Exception{\r
- \r
- String[] args = null;\r
- Properties props = new Properties();\r
- ClassLoader classLoader = getClass().getClassLoader();\r
- File file = new File(classLoader.getResource("FileServer.props").getFile());\r
-\r
-//PowerMockito.whenNew(Something.class).withArguments(argument).thenReturn(mockSomething);\r
- // env.setLog4JNames("log4j.properties","authz","fs","audit","init",null);\r
- // PowerMockito.whenNew(AuthzEnv.class).withArguments(props).thenReturn(authzEnvMock);\r
- // PowerMockito.doNothing().when(authzEnvMock.setLog4JNames(Matchers.anyString(), Matchers.anyString(), Matchers.anyString(), Matchers.anyString(), Matchers.anyString(), Matchers.anyString()));\r
- // PowerMockito.when(new AuthzEnv(props)).thenReturn(authzEnvMock);\r
- //PowerMockito.doNothing().when(authzEnv).setLog4JNames(Matchers.anyString(), Matchers.anyString(), Matchers.anyString(), Matchers.anyString(), Matchers.anyString(), Matchers.anyString());\r
- //PowerMockito.doNothing().when(authzEnvMock).setLog4JNames(" "," "," "," "," "," ");\r
-\r
- FileServer.main(args);\r
- //assertTrue(true);\r
- \r
- }\r
- \r
-}\r
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>com.att.authz</groupId>
- <artifactId>parent</artifactId>
- <version>1.0.1-SNAPSHOT</version>
- <relativePath>../pom.xml</relativePath>
- </parent>
-
- <artifactId>authz-gui</artifactId>
- <name>Authz GUI (Mobile First)</name>
- <description>GUI for Authz Management</description>
- <url>https://github.com/att/AAF</url>
-
- <developers>
- <developer>
- <name>Jonathan Gathman</name>
- <email></email>
- <organization>ATT</organization>
- <organizationUrl></organizationUrl>
- </developer>
- </developers>
-
-
- <properties>
- <maven.test.failure.ignore>true</maven.test.failure.ignore>
- <project.swmVersion>28</project.swmVersion>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <skipTests>false</skipTests>
- <project.interfaceVersion>1.0.0-SNAPSHOT</project.interfaceVersion>
- <project.innoVersion>1.0.0-SNAPSHOT</project.innoVersion>
- <project.cadiVersion>1.0.0-SNAPSHOT</project.cadiVersion>
- <project.dme2Version>3.1.200</project.dme2Version>
- <sonar.language>java</sonar.language>
- <sonar.skip>true</sonar.skip>
- <sonar.java.coveragePlugin>jacoco</sonar.java.coveragePlugin>
- <sonar.surefire.reportsPath>${project.build.directory}/surefire-reports</sonar.surefire.reportsPath>
- <sonar.jacoco.reportPath>${project.build.directory}/coverage-reports/jacoco.exec</sonar.jacoco.reportPath>
- <sonar.jacoco.itReportPath>${project.build.directory}/coverage-reports/jacoco-it.exec</sonar.jacoco.itReportPath>
- <sonar.jacoco.reportMissing.force.zero>true</sonar.jacoco.reportMissing.force.zero>
- <sonar.projectVersion>${project.version}</sonar.projectVersion>
- <nexusproxy>https://nexus.onap.org</nexusproxy>
- <snapshotNexusPath>/content/repositories/snapshots/</snapshotNexusPath>
- <releaseNexusPath>/content/repositories/releases/</releaseNexusPath>
- <stagingNexusPath>/content/repositories/staging/</stagingNexusPath>
- <sitePath>/content/sites/site/org/onap/aaf/authz/${project.artifactId}/${project.version}</sitePath>
- </properties>
-
-
- <dependencies>
- <dependency>
- <groupId>com.att.authz</groupId>
- <artifactId>authz-core</artifactId>
- <version>${project.version}</version>
- <exclusions>
- <exclusion>
- <groupId>javax.servlet</groupId>
- <artifactId>servlet-api</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>com.att.authz</groupId>
- <artifactId>authz-client</artifactId>
- <version>${project.version}</version>
- </dependency>
-
- <!-- <dependency>
- <groupId>com.att.authz</groupId>
- <artifactId>authz-att</artifactId>
- </dependency> -->
-
-
- <dependency>
- <groupId>com.att.authz</groupId>
- <artifactId>authz-cmd</artifactId>
- <version>${project.version}</version>
- <exclusions>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- </exclusion>
- <exclusion>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>org.onap.aaf.cadi</groupId>
- <artifactId>cadi-aaf</artifactId>
- <version>${project.cadiVersion}</version>
- </dependency>
-
- <dependency>
- <groupId>org.onap.aaf.cadi</groupId>
- <artifactId>cadi-tguard</artifactId>
- <version>${project.cadiVersion}</version>
- </dependency>
-
- <dependency>
- <groupId>org.onap.aaf.cadi</groupId>
- <artifactId>cadi-client</artifactId>
- <version>${project.cadiVersion}</version>
- </dependency>
-
- <dependency>
- <groupId>gso</groupId>
- <artifactId>GLCookieDecryption</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.onap.aaf.inno</groupId>
- <artifactId>xgen</artifactId>
- <version>${project.innoVersion}</version>
- </dependency>
-
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- <configuration>
- <includes>
- <include>**/*.class</include>
- </includes>
- </configuration>
- <version>2.3.1</version>
- </plugin>
-
- <plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <executions>
- <execution>
- <id>swm</id>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- <configuration>
- <finalName>authz-gui-${project.version}.${project.swmVersion}</finalName>
-
- <descriptors>
- <descriptor>../authz-service/src/main/assemble/swm.xml</descriptor>
- </descriptors>
- <archive>
- </archive>
- </configuration>
- </execution>
- </executions>
- </plugin>
-
-
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-javadoc-plugin</artifactId>
- <version>2.10.4</version>
- <configuration>
- <failOnError>false</failOnError>
- </configuration>
- <executions>
- <execution>
- <id>attach-javadocs</id>
- <goals>
- <goal>jar</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
-
-
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-source-plugin</artifactId>
- <version>2.2.1</version>
- <executions>
- <execution>
- <id>attach-sources</id>
- <goals>
- <goal>jar-no-fork</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
-
-<plugin>
- <groupId>org.sonatype.plugins</groupId>
- <artifactId>nexus-staging-maven-plugin</artifactId>
- <version>1.6.7</version>
- <extensions>true</extensions>
- <configuration>
- <nexusUrl>${nexusproxy}</nexusUrl>
- <stagingProfileId>176c31dfe190a</stagingProfileId>
- <serverId>ecomp-staging</serverId>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.jacoco</groupId>
- <artifactId>jacoco-maven-plugin</artifactId>
- <version>0.7.7.201606060606</version>
- <configuration>
- <dumpOnExit>true</dumpOnExit>
- <includes>
- <include>org.onap.aaf.*</include>
- </includes>
- </configuration>
- <executions>
- <execution>
- <id>pre-unit-test</id>
- <goals>
- <goal>prepare-agent</goal>
- </goals>
- <configuration>
- <destFile>${project.build.directory}/coverage-reports/jacoco.exec</destFile>
- <!-- <append>true</append> -->
- </configuration>
- </execution>
- <execution>
- <id>pre-integration-test</id>
- <phase>pre-integration-test</phase>
- <goals>
- <goal>prepare-agent</goal>
- </goals>
- <configuration>
- <destFile>${project.build.directory}/coverage-reports/jacoco-it.exec</destFile>
- <!-- <append>true</append> -->
- </configuration>
- </execution>
- <execution>
- <goals>
- <goal>merge</goal>
- </goals>
- <phase>post-integration-test</phase>
- <configuration>
- <fileSets>
- <fileSet implementation="org.apache.maven.shared.model.fileset.FileSet">
- <directory>${project.build.directory}/coverage-reports</directory>
- <includes>
- <include>*.exec</include>
- </includes>
- </fileSet>
- </fileSets>
- <destFile>${project.build.directory}/jacoco-dev.exec</destFile>
- </configuration>
- </execution>
- </executions>
- </plugin>
-
- </plugins>
- </build>
-<distributionManagement>
- <repository>
- <id>ecomp-releases</id>
- <name>AAF Release Repository</name>
- <url>${nexusproxy}${releaseNexusPath}</url>
- </repository>
- <snapshotRepository>
- <id>ecomp-snapshots</id>
- <name>AAF Snapshot Repository</name>
- <url>${nexusproxy}${snapshotNexusPath}</url>
- </snapshotRepository>
- <site>
- <id>ecomp-site</id>
- <url>dav:${nexusproxy}${sitePath}</url>
- </site>
- </distributionManagement>
-</project>
+++ /dev/null
-##
-## AUTHZ GUI (authz-gui) Properties
-##
-
-hostname=_HOSTNAME_
-
-## DISCOVERY (DME2) Parameters on the Command Line
-AFT_LATITUDE=_AFT_LATITUDE_
-AFT_LONGITUDE=_AFT_LONGITUDE_
-AFT_ENVIRONMENT=_AFT_ENVIRONMENT_
-DEPLOYED_VERSION=_ARTIFACT_VERSION_
-
-## Pull in common/security properties
-
-cadi_prop_files=_COMMON_DIR_/com.att.aaf.common.props;_COMMON_DIR_/com.att.aaf.props
-
-##DME2 related parameters
-DMEServiceName=service=com.att.authz.authz-gui/version=_MAJOR_VER_._MINOR_VER_._PATCH_VER_/envContext=_ENV_CONTEXT_/routeOffer=_ROUTE_OFFER_
-AFT_DME2_PORT_RANGE=_AUTHZ_GUI_PORT_RANGE_
-
-# Turn on both AAF TAF & LUR 2.0
-aaf_url=https://DME2RESOLVE/service=com.att.authz.AuthorizationService/version=_MAJOR_VER_._MINOR_VER_/envContext=_ENV_CONTEXT_/routeOffer=_ROUTE_OFFER_
-
-## URLs
-aaf_url.gui_onboard=https://wiki.web.att.com/display/aaf/OnBoarding
-aaf_url.aaf_help=http://wiki.web.att.com/display/aaf
-aaf_url.cadi_help=http://wiki.web.att.com/display/cadi
-aaf_tools=swm,scamper,dme2,soacloud
-aaf_url.tool.swm=http://wiki.web.att.com/display/swm
-aaf_url.tool.scamper=https://wiki.web.att.com/display/scamper/Home
-aaf_url.tool.soacloud=https://wiki.web.att.com/display/soacloud/SOA+Cloud+Management+Platform
-aaf_url.tool.dme2=https://wiki.web.att.com/display/soacloud/User+Guide+-+DME2
-
-
+++ /dev/null
-###############################################################################
-# Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
-###############################################################################
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-log4j.appender.INIT=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.INIT.File=_LOG_DIR_/${LOG4J_FILENAME_init}
-log4j.appender.INIT.DatePattern='.'yyyy-MM-dd
-#log4j.appender.INIT.MaxFileSize=_MAX_LOG_FILE_SIZE_
-#log4j.appender.INIT.MaxBackupIndex=_MAX_LOG_FILE_BACKUP_COUNT_
-log4j.appender.INIT.layout=org.apache.log4j.PatternLayout
-log4j.appender.INIT.layout.ConversionPattern=%d %p [%c] %m %n
-
-log4j.appender.GUI=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.GUI.File=_LOG_DIR_/${LOG4J_FILENAME_gui}
-log4j.appender.GUI.DatePattern='.'yyyy-MM-dd
-#log4j.appender.GUI.MaxFileSize=_MAX_LOG_FILE_SIZE_
-#log4j.appender.GUI.MaxBackupIndex=_MAX_LOG_FILE_BACKUP_COUNT_
-log4j.appender.GUI.layout=org.apache.log4j.PatternLayout
-log4j.appender.GUI.layout.ConversionPattern=%d %p [%c] %m %n
-
-log4j.appender.AUDIT=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.AUDIT.File=_LOG_DIR_/${LOG4J_FILENAME_audit}
-log4j.appender.AUDIT.DatePattern='.'yyyy-MM-dd
-#log4j.appender.GUI.MaxFileSize=_MAX_LOG_FILE_SIZE_
-#log4j.appender.GUI.MaxBackupIndex=_MAX_LOG_FILE_BACKUP_COUNT_
-log4j.appender.AUDIT.layout=org.apache.log4j.PatternLayout
-log4j.appender.AUDIT.layout.ConversionPattern=%d %p [%c] %m %n
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] %m %n
-
-# General Apache libraries
-log4j.rootLogger=WARN
-log4j.logger.org.apache=WARN,INIT
-log4j.logger.dme2=WARN,INIT
-log4j.logger.init=INFO,INIT
-log4j.logger.gui=_LOG4J_LEVEL_,GUI
-log4j.logger.audit=INFO,AUDIT
-
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
-<!--
- Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- -->
-
-<ns2:ManagedResourceList xmlns:ns2="http://scld.att.com/lrm/util" xmlns="http://scld.att.com/lrm/commontypes" xmlns:ns3="http://scld.att.com/lrm/types">
- <ns2:ManagedResource>
- <ResourceDescriptor>
- <ResourceName>com.att.authz._ARTIFACT_ID_</ResourceName>
- <ResourceVersion>
- <Major>_MAJOR_VER_</Major>
- <Minor>_MINOR_VER_</Minor>
- <Patch>_PATCH_VER_</Patch>
- </ResourceVersion>
- <RouteOffer>_ROUTE_OFFER_</RouteOffer>
- </ResourceDescriptor>
- <ResourceType>Java</ResourceType>
- <ResourcePath>com.att.authz.gui.AuthGUI</ResourcePath>
- <ResourceProps>
- <Tag>process.workdir</Tag>
- <Value>_ROOT_DIR_</Value>
- </ResourceProps>
- <ResourceProps>
- <Tag>jvm.version</Tag>
- <Value>1.8</Value>
- </ResourceProps>
- <ResourceProps>
- <Tag>jvm.args</Tag>
- <Value>-DAFT_LATITUDE=_AFT_LATITUDE_ -DAFT_LONGITUDE=_AFT_LONGITUDE_ -DAFT_ENVIRONMENT=_AFT_ENVIRONMENT_ -Dplatform=_SCLD_PLATFORM_ -Dcom.sun.jndi.ldap.connect.pool.maxsize=20 -Dcom.sun.jndi.ldap.connect.pool.prefsize=10 -Dcom.sun.jndi.ldap.connect.pool.timeout=3000 </Value>
- </ResourceProps>
- <ResourceProps>
- <Tag>jvm.classpath</Tag>
- <Value>_ROOT_DIR_/etc:_ROOT_DIR_/lib/*:</Value>
- </ResourceProps>
- <ResourceProps>
- <Tag>jvm.heap.min</Tag>
- <Value>512m</Value>
- </ResourceProps>
- <ResourceProps>
- <Tag>jvm.heap.max</Tag>
- <Value>2048m</Value>
- </ResourceProps>
- <ResourceProps>
- <Tag>start.class</Tag>
- <Value>com.att.authz.gui.AuthGUI</Value>
- </ResourceProps>
- <ResourceProps>
- <Tag>stdout.redirect</Tag>
- <Value>_ROOT_DIR_/logs/SystemOut.log</Value>
- </ResourceProps>
- <ResourceProps>
- <Tag>stderr.redirect</Tag>
- <Value>_ROOT_DIR_/logs/SystemErr.log</Value>
- </ResourceProps>
- <ResourceOSID>aft</ResourceOSID>
- <ResourceStartType>AUTO</ResourceStartType>
- <ResourceStartPriority>3</ResourceStartPriority>
- <ResourceMinCount>_RESOURCE_MIN_COUNT_</ResourceMinCount>
- <ResourceMaxCount>_RESOURCE_MAX_COUNT_</ResourceMaxCount>
- <ResourceRegistration>_RESOURCE_REGISTRATION_</ResourceRegistration>
- <ResourceSWMComponent>com.att.authz:_ARTIFACT_ID_</ResourceSWMComponent>
- <ResourceSWMComponentVersion>_ARTIFACT_VERSION_</ResourceSWMComponentVersion>
- </ns2:ManagedResource>
-</ns2:ManagedResourceList>
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.cui;
-
-import java.io.PrintWriter;
-import java.security.Principal;
-
-import javax.servlet.ServletInputStream;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import org.onap.aaf.cadi.config.Config;
-import org.onap.aaf.cadi.http.HTransferSS;
-import com.att.cmd.AAFcli;
-import com.att.cssa.rserv.HttpCode;
-
-public class CUI extends HttpCode<AuthzTrans, Void> {
- private final AuthGUI gui;
- public CUI(AuthGUI gui) {
- super(null,"Command Line");
- this.gui = gui;
- }
-
- @Override
- public void handle(AuthzTrans trans, HttpServletRequest req,HttpServletResponse resp) throws Exception {
- ServletInputStream isr = req.getInputStream();
- PrintWriter pw = resp.getWriter();
- int c;
- StringBuilder cmd = new StringBuilder();
-
- while((c=isr.read())>=0) {
- cmd.append((char)c);
- }
-
- Principal p = trans.getUserPrincipal();
- trans.env().setProperty(Config.AAF_DEFAULT_REALM, trans.env().getProperty(Config.AAF_DEFAULT_REALM,Config.getDefaultRealm()));
- AAFcli aafcli = new AAFcli(trans.env(), pw,
- gui.aafCon.hman(),
- gui.aafCon.securityInfo(), new HTransferSS(p,AuthGUI.app,
- gui.aafCon.securityInfo()));
-
- aafcli.verbose(false);
- aafcli.gui(true);
- String cmdStr = cmd.toString();
- if (!cmdStr.contains("--help")) {
- cmdStr = cmdStr.replaceAll("help", "--help");
- }
- if (!cmdStr.contains("--version")) {
- cmdStr = cmdStr.replaceAll("version", "--version");
- }
- try {
- aafcli.eval(cmdStr);
- pw.flush();
- } catch (Exception e) {
- pw.flush();
- pw.println(e.getMessage());
- } finally {
- aafcli.close();
- }
-
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui;
-
-import static com.att.cssa.rserv.HttpMethods.GET;
-import static com.att.cssa.rserv.HttpMethods.POST;
-import static com.att.cssa.rserv.HttpMethods.PUT;
-
-import java.io.IOException;
-import java.security.GeneralSecurityException;
-import java.util.ArrayList;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.Properties;
-
-import com.att.aft.dme2.api.DME2Exception;
-import com.att.aft.dme2.api.DME2Manager;
-import com.att.aft.dme2.api.DME2Server;
-import com.att.aft.dme2.api.DME2ServerProperties;
-import com.att.aft.dme2.api.DME2ServiceHolder;
-import com.att.aft.dme2.api.util.DME2FilterHolder;
-import com.att.aft.dme2.api.util.DME2FilterHolder.RequestDispatcherType;
-import com.att.aft.dme2.api.util.DME2ServletHolder;
-import com.att.authz.common.Define;
-import com.att.authz.cui.CUI;
-import com.att.authz.env.AuthzEnv;
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.env.AuthzTransFilter;
-import com.att.authz.env.AuthzTransOnlyFilter;
-import com.att.authz.gui.pages.ApiDocs;
-import com.att.authz.gui.pages.ApiExample;
-import com.att.authz.gui.pages.ApprovalAction;
-import com.att.authz.gui.pages.ApprovalForm;
-import com.att.authz.gui.pages.Home;
-import com.att.authz.gui.pages.LoginLanding;
-import com.att.authz.gui.pages.LoginLandingAction;
-import com.att.authz.gui.pages.NsDetail;
-import com.att.authz.gui.pages.NsHistory;
-import com.att.authz.gui.pages.NsInfoAction;
-import com.att.authz.gui.pages.NsInfoForm;
-import com.att.authz.gui.pages.NssShow;
-import com.att.authz.gui.pages.PassChangeAction;
-import com.att.authz.gui.pages.PassChangeForm;
-import com.att.authz.gui.pages.PendingRequestsShow;
-import com.att.authz.gui.pages.PermDetail;
-import com.att.authz.gui.pages.PermGrantAction;
-import com.att.authz.gui.pages.PermGrantForm;
-import com.att.authz.gui.pages.PermHistory;
-import com.att.authz.gui.pages.PermsShow;
-import com.att.authz.gui.pages.RequestDetail;
-import com.att.authz.gui.pages.RoleDetail;
-import com.att.authz.gui.pages.RoleHistory;
-import com.att.authz.gui.pages.RolesShow;
-import com.att.authz.gui.pages.UserRoleExtend;
-import com.att.authz.gui.pages.UserRoleRemove;
-import com.att.authz.gui.pages.WebCommand;
-import com.att.authz.org.OrganizationFactory;
-import com.att.authz.server.AbsServer;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.aaf.v2_0.AAFTrustChecker;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.config.Config;
-import com.att.cssa.rserv.CachingFileAccess;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.rosetta.env.RosettaDF;
-import com.att.xgen.html.HTMLGen;
-import com.att.xgen.html.State;
-
-import aaf.v2_0.Api;
-import aaf.v2_0.Approvals;
-import aaf.v2_0.CredRequest;
-import aaf.v2_0.Error;
-import aaf.v2_0.History;
-import aaf.v2_0.Nss;
-import aaf.v2_0.Perms;
-import aaf.v2_0.RolePermRequest;
-import aaf.v2_0.Roles;
-import aaf.v2_0.UserRoles;
-import aaf.v2_0.Users;
-
-public class AuthGUI extends AbsServer implements State<Env>{
- public static final int TIMEOUT = 60000;
- public static final String app = "AAF GUI";
-
- public RosettaDF<Perms> permsDF;
- public RosettaDF<Roles> rolesDF;
- public RosettaDF<Users> usersDF;
- public RosettaDF<UserRoles> userrolesDF;
- public RosettaDF<CredRequest> credReqDF;
- public RosettaDF<RolePermRequest> rolePermReqDF;
- public RosettaDF<Approvals> approvalsDF;
- public RosettaDF<Nss> nssDF;
- public RosettaDF<Api> apiDF;
- public RosettaDF<Error> errDF;
- public RosettaDF<History> historyDF;
-
- public final AuthzEnv env;
- public final Slot slot_httpServletRequest;
-
- public AuthGUI(final AuthzEnv env) throws CadiException, GeneralSecurityException, IOException, APIException {
- super(env,app);
- this.env = env;
-
- env.setLog4JNames("log4j.properties","authz","gui","audit","init","trace ");
- OrganizationFactory.setDefaultOrg(env, "com.att.authz.org.att.ATT");
-
-
- slot_httpServletRequest = env.slot("HTTP_SERVLET_REQUEST");
-
- permsDF = env.newDataFactory(Perms.class);
- rolesDF = env.newDataFactory(Roles.class);
-// credsDF = env.newDataFactory(Cred.class);
- usersDF = env.newDataFactory(Users.class);
- userrolesDF = env.newDataFactory(UserRoles.class);
- credReqDF = env.newDataFactory(CredRequest.class);
- rolePermReqDF = env.newDataFactory(RolePermRequest.class);
- approvalsDF = env.newDataFactory(Approvals.class);
- nssDF = env.newDataFactory(Nss.class);
- apiDF = env.newDataFactory(Api.class);
- errDF = env.newDataFactory(Error.class);
- historyDF = env.newDataFactory(History.class);
-
- /////////////////////////
- // Screens
- /////////////////////////
- // Start Screen
- final Page start = new Display(this, GET, new Home(this)).page();
-
- // MyPerms Screens
- final Page myPerms = new Display(this, GET, new PermsShow(this, start)).page();
- Page permDetail = new Display(this, GET, new PermDetail(this, start, myPerms)).page();
- new Display(this, GET, new PermHistory(this,start,myPerms,permDetail));
-
- // MyRoles Screens
- final Page myRoles = new Display(this, GET, new RolesShow(this, start)).page();
- Page roleDetail = new Display(this, GET, new RoleDetail(this, start, myRoles)).page();
- new Display(this, GET, new RoleHistory(this,start,myRoles,roleDetail));
-
- // MyNameSpace
- final Page myNamespaces = new Display(this, GET, new NssShow(this, start)).page();
- Page nsDetail = new Display(this, GET, new NsDetail(this, start, myNamespaces)).page();
- new Display(this, GET, new NsHistory(this, start,myNamespaces,nsDetail));
-
- // Password Change Screens
- final Page pwc = new Display(this, GET, new PassChangeForm(this, start)).page();
- new Display(this, POST, new PassChangeAction(this, start, pwc));
-
- // Validation Change Screens
- final Page validate = new Display(this, GET, new ApprovalForm(this, start)).page();
- new Display(this, POST, new ApprovalAction(this, start, validate));
-
- // Onboard, Detailed Edit Screens
- final Page onb = new Display(this, GET, new NsInfoForm(this, start)).page();
- new Display(this, POST, new NsInfoAction(this, start, onb));
-
- // Web Command Screens
- /* final Page webCommand =*/ new Display(this, GET, new WebCommand(this, start)).page();
-
- // API Docs
- final Page apidocs = new Display(this, GET, new ApiDocs(this, start)).page();
- new Display(this, GET, new ApiExample(this,start, apidocs)).page();
-
- // Permission Grant Page
- final Page permGrant = new Display(this, GET, new PermGrantForm(this, start)).page();
- new Display(this, POST, new PermGrantAction(this, start, permGrant)).page();
-
- // Login Landing if no credentials detected
- final Page loginLanding = new Display(this, GET, new LoginLanding(this, start)).page();
- new Display(this, POST, new LoginLandingAction(this, start, loginLanding));
-
- // User Role Request Extend and Remove
- new Display(this, GET, new UserRoleExtend(this, start,myRoles)).page();
- new Display(this, GET, new UserRoleRemove(this, start,myRoles)).page();
-
- // See my Pending Requests
- final Page requestsShow = new Display(this, GET, new PendingRequestsShow(this, start)).page();
- new Display(this, GET, new RequestDetail(this, start, requestsShow));
-
- // Command line Mechanism
- route(env, PUT, "/gui/cui", new CUI(this),"text/plain;charset=utf-8","*/*");
-
- ///////////////////////
- // WebContent Handler
- ///////////////////////
- route(env,GET,"/theme/:key", new CachingFileAccess<AuthzTrans>(env,
- CachingFileAccess.CFA_WEB_DIR,"theme"));
- ///////////////////////
- }
-
- public static void main(String[] args) {
- setup(AuthGUI.class, "authGUI.props");
- }
-
- /**
- * Start up AuthzAPI as DME2 Service
- * @param env
- * @param props
- * @throws DME2Exception
- * @throws CadiException
- */
- public void startDME2(Properties props) throws DME2Exception, CadiException {
-
- DME2Manager dme2 = new DME2Manager("AAF GUI DME2Manager", props);
- DME2ServiceHolder svcHolder;
- List<DME2ServletHolder> slist = new ArrayList<DME2ServletHolder>();
- svcHolder = new DME2ServiceHolder();
- String serviceName = env.getProperty("DMEServiceName",null);
- if(serviceName!=null) {
- svcHolder.setServiceURI(serviceName);
- svcHolder.setManager(dme2);
- svcHolder.setContext("/");
-
-
- DME2ServletHolder srvHolder = new DME2ServletHolder(this, new String[]{"/gui"});
- srvHolder.setContextPath("/*");
- slist.add(srvHolder);
-
- EnumSet<RequestDispatcherType> edlist = EnumSet.of(
- RequestDispatcherType.REQUEST,
- RequestDispatcherType.FORWARD,
- RequestDispatcherType.ASYNC
- );
-
- ///////////////////////
- // Apply Filters
- ///////////////////////
- List<DME2FilterHolder> flist = new ArrayList<DME2FilterHolder>();
-
- // Secure all GUI interactions with AuthzTransFilter
- flist.add(new DME2FilterHolder(new AuthzTransFilter(env, aafCon, new AAFTrustChecker(
- env.getProperty(Config.CADI_TRUST_PROP, Config.CADI_USER_CHAIN),
- Define.ROOT_NS + ".mechid|"+Define.ROOT_COMPANY+"|trust"
- )),"/gui/*", edlist));
-
- // Don't need security for display Artifacts or login page
- AuthzTransOnlyFilter atof;
- flist.add(new DME2FilterHolder(atof =new AuthzTransOnlyFilter(env),"/theme/*", edlist));
- flist.add(new DME2FilterHolder(atof,"/js/*", edlist));
- flist.add(new DME2FilterHolder(atof,"/login/*", edlist));
-
- svcHolder.setFilters(flist);
- svcHolder.setServletHolders(slist);
-
- DME2Server dme2svr = dme2.getServer();
-// dme2svr.setGracefulShutdownTimeMs(1000);
-
- env.init().log("Starting AAF GUI with Jetty/DME2 server...");
- dme2svr.start();
- DME2ServerProperties dsprops = dme2svr.getServerProperties();
- try {
-// if(env.getProperty("NO_REGISTER",null)!=null)
- dme2.bindService(svcHolder);
- env.init().log("DME2 is available as HTTP"+(dsprops.isSslEnable()?"/S":""),"on port:",dsprops.getPort());
-
- while(true) { // Per DME2 Examples...
- Thread.sleep(5000);
- }
- } catch(InterruptedException e) {
- env.init().log("AAF Jetty Server interrupted!");
- } catch(Exception e) { // Error binding service doesn't seem to stop DME2 or Process
- env.init().log(e,"DME2 Initialization Error");
- dme2svr.stop();
- System.exit(1);
- }
- } else {
- env.init().log("Properties must contain DMEServiceName");
- }
- }
-
-
- public AuthzEnv env() {
- return env;
- }
-
- /**
- * Derive API Error Class from AAF Response (future)
- */
- public Error getError(AuthzTrans trans, Future<?> fp) {
-// try {
- String text = fp.body();
- Error err = new Error();
- err.setMessageId(Integer.toString(fp.code()));
- if(text==null || text.length()==0) {
- err.setText("**No Message**");
- } else {
- err.setText(fp.body());
- }
- return err;
-// } catch (APIException e) {
-// Error err = new Error();
-// err.setMessageId(Integer.toString(fp.code()));
-// err.setText("Could not obtain response from AAF Message: " + e.getMessage());
-// return err;
-// }
- }
-
- public void writeError(AuthzTrans trans, Future<?> fp, HTMLGen hgen) {
- Error err = getError(trans,fp);
-
- String messageBody = err.getText();
- List<String> vars = err.getVariables();
- for (int varCounter=0;varCounter<vars.size();) {
- String var = vars.get(varCounter++);
- if (messageBody.indexOf("%" + varCounter) >= 0) {
- messageBody = messageBody.replace("%" + varCounter, var);
- }
- }
-
- String msg = "[" + err.getMessageId() + "] " + messageBody;
- if(hgen!=null) {
- hgen.text(msg);
- }
- trans.checkpoint("AAF Error: " + msg);
- }
-
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui;
-
-import static com.att.xgen.html.HTMLGen.A;
-import static com.att.xgen.html.HTMLGen.LI;
-import static com.att.xgen.html.HTMLGen.UL;
-
-import java.io.IOException;
-
-import org.onap.aaf.inno.env.APIException;
-import com.att.xgen.Cache;
-import com.att.xgen.Mark;
-import com.att.xgen.html.HTMLGen;
-
-public class BreadCrumbs extends NamedCode {
- private Page[] breadcrumbs;
-
- public BreadCrumbs(Page ... pages) {
- super(false,"breadcrumbs");
- breadcrumbs = pages;
- }
-
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- // BreadCrumbs
- Mark mark = new Mark();
- hgen.incr(mark, UL);
- for(Page p : breadcrumbs) {
- hgen.incr(LI,true)
- .leaf(A,"href="+p.url()).text(p.name())
- .end(2);
- }
- hgen.end(mark);
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui;
-
-import java.io.IOException;
-
-import org.onap.aaf.inno.env.APIException;
-import com.att.xgen.Cache;
-import com.att.xgen.html.HTMLGen;
-
-public class Controls extends NamedCode {
- public Controls() {
- super(false,"controls");
- }
-
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- hgen.incr("form","method=post")
- .incr("input", true, "type=checkbox", "name=vehicle", "value=Bike").text("I have a bike").end()
- .text("Password: ")
- .incr("input", true, "type=password", "id=password1").end()
- .tagOnly("input", "type=submit", "value=Submit")
- .end();
- }
-
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui;
-
-import java.util.Enumeration;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.cssa.rserv.HttpCode;
-import com.att.cssa.rserv.HttpMethods;
-import org.onap.aaf.inno.env.Slot;
-
-public class Display {
- private final Page get;
- public Display(final AuthGUI gui, final HttpMethods meth, final Page page) {
- get = page;
- final String[] fields = page.fields();
- final Slot slots[] = new Slot[fields.length];
- String prefix = page.name() + '.';
- for(int i=0;i<slots.length;++i) {
- slots[i] = gui.env.slot(prefix + fields[i]);
- }
-
- /*
- * We handle all the "Form POST" calls here with a naming convention that allows us to create arrays from strings.
- *
- * On the HTTP side, elements concatenate their name with their Index number (if multiple). In this code,
- * we turn such names into arrays with same index number. Then, we place them in the Transaction "Properties" so that
- * it can be transferred to subclasses easily.
- */
- if(meth.equals(HttpMethods.POST)) {
- // Here, we'll expect FORM URL Encoded Data, which we need to get from the body
- gui.route(gui.env, meth, page.url(),
- new HttpCode<AuthzTrans,AuthGUI>(gui,page.name()) {
- @Override
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {
- trans.put(gui.slot_httpServletRequest, req);
- for(int i=0; i<fields.length;++i) {
- int idx = fields[i].indexOf("[]");
- if(idx<0) { // single value
- trans.put(slots[i], req.getParameter(fields[i])); // assume first value
- } else { // multi value
- String field=fields[i].substring(0, idx);
- String[] array = new String[30];
- for(Enumeration<String> names = req.getParameterNames(); names.hasMoreElements();) {
- String key = names.nextElement();
- if(key.subSequence(0, idx).equals(field)) {
- try {
- int x = Integer.parseInt(key.substring(field.length()));
- if(x>=array.length) {
- String[] temp = new String[x+10];
- System.arraycopy(temp, 0, temp, 0, array.length);
- array = temp;
- }
- array[x]=req.getParameter(key);
- } catch (NumberFormatException e) {
- trans.debug().log(e);
- }
- }
- }
- trans.put(slots[i], array);
- }
- }
- page.replay(context,trans,resp.getOutputStream(),"general");
- }
- }, "application/x-www-form-urlencoded","*/*");
-
- } else {
- // Transfer whether Page shouldn't be cached to local Final var.
- final boolean no_cache = page.no_cache;
-
- gui.route(gui.env, meth, page.url(),
- new HttpCode<AuthzTrans,AuthGUI>(gui,page.name()) {
- @Override
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {
- trans.put(gui.slot_httpServletRequest, req);
- for(int i=0; i<slots.length;++i) {
- int idx = fields[i].indexOf("[]");
- if(idx<0) { // single value
- trans.put(slots[i], req.getParameter(fields[i]));
- } else { // multi value
- String[] array = new String[30];
- String field=fields[i].substring(0, idx);
-
- for(Enumeration<String> mm = req.getParameterNames();mm.hasMoreElements();) {
- String key = mm.nextElement();
- if(key.startsWith(field)) {
- try {
- int x = Integer.parseInt(key.substring(field.length()));
- if(x>=array.length) {
- String[] temp = new String[x+10];
- System.arraycopy(temp, 0, temp, 0, array.length);
- array = temp;
- }
- array[x]=req.getParameter(key);
- } catch (NumberFormatException e) {
- trans.debug().log(e);
- }
- }
- }
- trans.put(slots[i], array);
- }
- }
- page.replay(context,trans,resp.getOutputStream(),"general");
- }
-
- @Override
- public boolean no_cache() {
- return no_cache;
- }
- }, "text/html","*/*");
- }
-
- }
-
- public Page page() {
- return get;
- }
-}
\ No newline at end of file
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui;
-
-import java.io.IOException;
-
-import org.onap.aaf.inno.env.APIException;
-import com.att.xgen.Cache;
-import com.att.xgen.html.HTMLGen;
-
-public class Form extends NamedCode {
- private String preamble;
- private NamedCode content;
-
- public Form(boolean no_cache, NamedCode content) {
- super(no_cache,content.idattrs());
- this.content = content;
- preamble=null;
- idattrs = content.idattrs();
- }
-
- public Form preamble(String preamble) {
- this.preamble = preamble;
- return this;
- }
-
-
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- if(preamble!=null) {
- hgen.incr("p","class=preamble").text(preamble).end();
- }
- hgen.incr("form","method=post");
-
- content.code(cache, hgen);
-
- hgen.tagOnly("input", "type=submit", "value=Submit")
- .tagOnly("input", "type=reset", "value=Reset")
- .end();
- }
-
- /* (non-Javadoc)
- * @see com.att.authz.gui.NamedCode#idattrs()
- */
- @Override
- public String[] idattrs() {
- return content.idattrs();
- }
-
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui;
-
-import com.att.xgen.Code;
-import com.att.xgen.html.HTMLGen;
-
-
-
-public abstract class NamedCode implements Code<HTMLGen> {
- public final boolean no_cache;
- protected String[] idattrs;
-
- /*
- * Mark whether this code should not be cached, and any attributes
- */
- public NamedCode(final boolean no_cache, String ... idattrs) {
- this.idattrs = idattrs;
- this.no_cache = no_cache;
- }
-
- /**
- * Return ID and Any Attributes needed to create a "div" section of this code
- * @return
- */
- public String[] idattrs() {
- return idattrs;
- }
-
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui;
-
-import static com.att.xgen.html.HTMLGen.A;
-import static com.att.xgen.html.HTMLGen.H1;
-import static com.att.xgen.html.HTMLGen.LI;
-import static com.att.xgen.html.HTMLGen.TITLE;
-import static com.att.xgen.html.HTMLGen.UL;
-
-import java.io.IOException;
-import java.security.Principal;
-
-import com.att.authz.env.AuthzEnv;
-import com.att.authz.env.AuthzTrans;
-import org.onap.aaf.cadi.config.Config;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.util.Split;
-import com.att.xgen.Cache;
-import com.att.xgen.CacheGen;
-import com.att.xgen.Code;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.Mark;
-import com.att.xgen.html.HTMLCacheGen;
-import com.att.xgen.html.HTMLGen;
-import com.att.xgen.html.Imports;
-
-/**
- * A Base "Mobile First" Page
- *
- *
- */
-public class Page extends HTMLCacheGen {
- public static enum BROWSER {iPhone,html5,ie,ieOld};
-
- public static final int MAX_LINE=20;
-
- protected static final String[] NO_FIELDS = new String[0];
-
- private static final String ENV_CONTEXT = "envContext";
- private static final String DME_SERVICE_NAME = "DMEServiceName";
- private static final String ROUTE_OFFER = "routeOffer";
- private static final String BROWSER_TYPE = "BROWSER_TYPE";
-
- private final String bcName, bcUrl;
- private final String[] fields;
-
- public final boolean no_cache;
-
- public String name() {
- return bcName;
- }
-
- public String url() {
- return bcUrl;
- }
-
- public String[] fields() {
- return fields;
- }
-
- public Page(AuthzEnv env, String name, String url, String [] fields, final NamedCode ... content) throws APIException,IOException {
- this(env,name,url,1,fields,content);
- }
-
- public Page(AuthzEnv env, String name, String url, int backdots, String [] fields, final NamedCode ... content) throws APIException,IOException {
- super(CacheGen.PRETTY, new PageCode(env, backdots, content));
- bcName = name;
- bcUrl = url;
- this.fields = fields;
- // Mark which fields must be "no_cache"
- boolean no_cacheTemp=false;
- for(NamedCode nc : content) {
- if(nc.no_cache) {
- no_cacheTemp=true;
- break;
- }
- }
- no_cache=no_cacheTemp;
- }
-
- private static class PageCode implements Code<HTMLGen> {
- private final NamedCode[] content;
- private final Slot browserSlot;
- private final int backdots;
- protected AuthzEnv env;
-
- public PageCode(AuthzEnv env, int backdots, final NamedCode[] content) {
- this.content = content;
- this.backdots = backdots;
- browserSlot = env.slot(BROWSER_TYPE);
- this.env = env;
- }
-
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- // Note: I found that App Storage saves everything about the page, or not. Thus, if you declare the page uncacheable, none of the
- // Artifacts, like JPGs are stored, which makes this feature useless for Server driven elements
- //hgen.html("manifest=../theme/aaf.appcache");
- cache.dynamic(hgen, new DynamicCode<HTMLGen,AuthGUI,AuthzTrans>() {
- @Override
- public void code(AuthGUI state, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- switch(browser(trans,browserSlot)) {
- case ieOld:
- case ie:
- hgen.directive("!DOCTYPE html");
- hgen.directive("meta", "http-equiv=X-UA-Compatible","content=IE=11");
- default:
- }
- }
- });
- hgen.html();
- Mark head = hgen.head();
- hgen.leaf(TITLE).text("AT&T Authentication/Authorization Tool").end();
- hgen.imports(new Imports(backdots).css("theme/aaf5.css")
- .js("theme/comm.js")
- .js("theme/console.js")
- .js("theme/common.js"));
- cache.dynamic(hgen, new DynamicCode<HTMLGen,AuthGUI,AuthzTrans>() {
- @Override
- public void code(AuthGUI state, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- switch(browser(trans,browserSlot)) {
- case iPhone:
- hgen.imports(new Imports(backdots).css("theme/aaf5iPhone.css"));
- break;
- case ie:
- case ieOld:
- hgen.js().text("document.createElement('header');")
- .text("document.createElement('nav');")
- .done();
- case html5:
- hgen.imports(new Imports(backdots).css("theme/aaf5Desktop.css"));
- break;
- }
- }
- });
- hgen.end(head);
-
- Mark body = hgen.body();
- Mark header = hgen.header();
- cache.dynamic(hgen, new DynamicCode<HTMLGen,AuthGUI,AuthzTrans>() {
- @Override
- public void code(AuthGUI state, AuthzTrans trans,Cache<HTMLGen> cache, HTMLGen xgen)
- throws APIException, IOException {
- // Obtain Server Info, and print
- String DMEServiceName = trans.getProperty(DME_SERVICE_NAME);
- String env = DMEServiceName.substring(
- DMEServiceName.indexOf(ENV_CONTEXT),
- DMEServiceName.indexOf(ROUTE_OFFER) -1).split("=")[1];
-
- xgen.leaf(H1).text("AT&T Auth Tool on " + env).end();
- xgen.leaf("p","id=version").text("AAF Version: " + trans.getProperty(Config.AAF_DEPLOYED_VERSION, "N/A")).end();
-
- // Obtain User Info, and print
- Principal p = trans.getUserPrincipal();
- String user;
- if(p==null) {
- user = "please choose a Login Authority";
- } else {
- user = p.getName();
- }
- xgen.leaf("p","id=welcome").text("Welcome, " + user).end();
-
- switch(browser(trans,browserSlot)) {
- case ieOld:
- case ie:
- xgen.incr("h5").text("This app is Mobile First HTML5. Internet Explorer "
- + " does not support all HTML5 standards. Old, non TSS-Standard versions may not function correctly.").br()
- .text(" For best results, use a highly compliant HTML5 browser like Firefox.")
- .end();
- break;
- default:
- }
- }
- });
-
- hgen.hr();
-
- int cIdx;
- NamedCode nc;
- // If BreadCrumbs, put here
- if(content.length>0 && content[0] instanceof BreadCrumbs) {
- nc = content[0];
- Mark ctnt = hgen.divID(nc.idattrs());
- nc.code(cache, hgen);
- hgen.end(ctnt);
- cIdx = 1;
- } else {
- cIdx = 0;
- }
-
- hgen.end(header);
-
- Mark inner = hgen.divID("inner");
- // Content
- for(int i=cIdx;i<content.length;++i) {
- nc = content[i];
- Mark ctnt = hgen.divID(nc.idattrs());
- nc.code(cache, hgen);
- hgen.end(ctnt);
- }
-
- hgen.end(inner);
-
- // Navigation - Using older Nav to work with decrepit IE versions
-
- Mark nav = hgen.divID("nav");
- hgen.incr("h2").text("Related Links").end();
- hgen.incr(UL)
- .leaf(LI).leaf(A,"href="+env.getProperty("aaf_url.aaf_help")).text("AAF WIKI").end(2)
- .leaf(LI).leaf(A,"href="+env.getProperty("aaf_url.cadi_help")).text("CADI WIKI").end(2);
- String tools = env.getProperty("aaf_tools");
- if(tools!=null) {
- hgen.hr()
- .incr(HTMLGen.UL,"style=margin-left:5%")
- .leaf(HTMLGen.H3).text("Related Tools").end();
-
- for(String tool : Split.splitTrim(',',tools)) {
- hgen.leaf(LI).leaf(A,"href="+env.getProperty("aaf_url.tool."+tool)).text(tool.toUpperCase() + " Help").end(2);
- }
- hgen.end();
- }
- hgen.end();
-
- hgen.hr();
-
- hgen.end(nav);
- // Footer - Using older Footer to work with decrepit IE versions
- Mark footer = hgen.divID("footer");
- hgen.textCR(1, "(c) 2014-6 AT&T Inc. All Rights Reserved")
- .end(footer);
-
- hgen.end(body);
- hgen.endAll();
- }
- }
-
- public static String getBrowserType() {
- return BROWSER_TYPE;
- }
-
- /**
- * It's IE if int >=0
- *
- * Use int found in "ieVersion"
- *
- * Official IE 7
- * Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; .NET CLR 1.1.4322;
- * .NET CLR 2.0.50727; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)
- * Official IE 8
- * Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2;
- * .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; ATT)
- *
- * IE 11 Compatibility
- * Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; SLCC2; .NET CLR 2.0.50727;
- * .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET CLR 1.1.4322; .NET4.0C; .NET4.0E; InfoPath.3; HVD; ATT)
- *
- * IE 11 (not Compatiblity)
- * Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727;
- * .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET CLR 1.1.4322; .NET4.0C; .NET4.0E; InfoPath.3; HVD; ATT)
- *
- * @param trans
- * @return
- */
- public static BROWSER browser(AuthzTrans trans, Slot slot) {
- BROWSER br = trans.get(slot, null);
- if(br==null) {
- String agent = trans.agent();
- int msie;
- if(agent.contains("iPhone") /* other phones? */) {
- br=BROWSER.iPhone;
- } else if ((msie = agent.indexOf("MSIE"))>=0) {
- msie+=5;
- int end = agent.indexOf(";",msie);
- float ver;
- try {
- ver = Float.valueOf(agent.substring(msie,end));
- br = ver<8f?BROWSER.ieOld:BROWSER.ie;
- } catch (Exception e) {
- br = BROWSER.ie;
- }
- } else {
- br = BROWSER.html5;
- }
- trans.put(slot,br);
- }
- return br;
- }
-}
-
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui;
-
-import static com.att.xgen.html.HTMLGen.TABLE;
-import static com.att.xgen.html.HTMLGen.TD;
-import static com.att.xgen.html.HTMLGen.TR;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-import com.att.authz.gui.table.AbsCell;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.Trans;
-import org.onap.aaf.inno.env.TransStore;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.Mark;
-import com.att.xgen.html.HTMLGen;
-import com.att.xgen.html.State;
-
-public class Table<S extends State<Env>, TRANS extends TransStore> extends NamedCode {
- private final Slot ROW_MSG_SLOT, EMPTY_TABLE_SLOT;
- private final String title;
- private final String[] columns;
- private final Rows rows;
-
- public Table(String title, TRANS trans, Data<S,TRANS> data, String ... attrs) {
- super(true,attrs);
- ROW_MSG_SLOT=trans.slot("TABLE_ROW_MSG");
- EMPTY_TABLE_SLOT=trans.slot("TABLE_EMPTY");
- this.columns = data.headers();
- boolean alt = false;
- for(String s : attrs) {
- if("class=std".equals(s) || "class=stdform".equals(s)) {
- alt=true;
- }
- }
- rows = new Rows(data,alt?1:0);
- this.title = title;
-
- // Derive an ID from title (from no spaces, etc), and prepend to IDAttributes (Protected from NamedCode)
- idattrs = new String[attrs.length+1];
- idattrs[0] = title.replaceAll("\\s","");
- System.arraycopy(attrs, 0, idattrs, 1, attrs.length);
- }
-
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- Mark table = new Mark();
- Mark tr = new Mark();
- hgen.incr(table,TABLE)
- .leaf("caption", "class=title").text(title).end()
- .incr(tr,TR);
- for(String column : columns) {
- hgen.leaf("th").text(column).end();
- }
- hgen.end(tr);
-
- // Load Rows Dynamically
- cache.dynamic(hgen, rows);
- // End Table
- hgen.end(table);
-
- // Print Message from Row Gathering, if available
- cache.dynamic(hgen, new DynamicCode<HTMLGen,S,TRANS>() {
- @Override
- public void code(S state, TRANS trans, Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- String msg;
- if((msg = trans.get(EMPTY_TABLE_SLOT, null))!=null) {
- hgen.incr("style").text("#inner tr,caption,input,p.preamble {display: none;}#inner p.notfound {margin: 0px 0px 0px 20px}").end();
- hgen.incr(HTMLGen.P,"class=notfound").text(msg).end().br();
- } else if((msg=trans.get(ROW_MSG_SLOT,null))!=null) {
- hgen.p(msg).br();
- }
- }
- });
- }
-
- public static class Cells {
- public static final Cells EMPTY = new Cells();
- private Cells() {
- cells = new AbsCell[0][0];
- msg = "No Data Found";
- }
-
- public Cells(ArrayList<AbsCell[]> arrayCells, String msg) {
- cells = new AbsCell[arrayCells.size()][];
- arrayCells.toArray(cells);
- this.msg = msg;
- }
- public AbsCell[][] cells;
- public String msg;
- }
-
- public interface Data<S extends State<Env>, TRANS extends Trans> {
- public Cells get(S state,TRANS trans);
- public String[] headers();
- }
-
- private class Rows extends DynamicCode<HTMLGen,S,TRANS> {
- private Data<S,TRANS> data;
- private int alt;
-
- public Rows(Data<S,TRANS> data, int alt) {
- this.data = data;
- this.alt = alt;
- }
-
- @Override
- public void code(S state, TRANS trans, Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- Mark tr = new Mark();
- Mark td = new Mark();
-
- int alt = this.alt;
- Cells cells = data.get(state, trans);
- if(cells.cells.length>0) {
- for(AbsCell[] row : cells.cells) {
- switch(alt) {
- case 1:
- alt=2;
- case 0:
- hgen.incr(tr,TR);
- break;
- default:
- alt=1;
- hgen.incr(tr,TR,"class=alt");
- }
- for(AbsCell cell :row) {
- hgen.leaf(td, TD,cell.attrs());
- cell.write(hgen);
- hgen.end(td);
- }
- hgen.end(tr);
- }
- // Pass Msg back to Table code, in order to place after Table Complete
- if(cells.msg!=null) {
- trans.put(ROW_MSG_SLOT,cells.msg);
- }
-
- } else {
- trans.put(EMPTY_TABLE_SLOT,cells.msg);
- }
- }
- }
-}
+++ /dev/null
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import com.att.authz.gui.Table;
-import com.att.authz.gui.Table.Cells;
-import com.att.authz.gui.table.AbsCell;
-import com.att.authz.gui.table.TextCell;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.Symm;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import com.att.cssa.rserv.HttpMethods;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.TimeTaken;
-import com.att.xgen.Cache;
-import com.att.xgen.html.HTMLGen;
-
-import aaf.v2_0.Api;
-import aaf.v2_0.Api.Route;
-
-public class ApiDocs extends Page {
- // Package on purpose
- private static final String HREF = "/gui/api";
- private static final String NAME = "AAF RESTful API";
- private static final String fields[] = {};
- private static final String ERROR_LINK = "<a href=\"./example/"
- + "YXBwbGljYXRpb24vRXJyb3IranNvbg=="
-// + Symm.base64noSplit().encode("application/Error+json")
- + "\">JSON</a> "
- + "<a href=\"./example/"
- + "YXBwbGljYXRpb24vRXJyb3IreG1s"
-// + Symm.base64noSplit().encode("application/Error+xml")
- + "\">XML</a> ";
-
-
- public ApiDocs(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,NAME,HREF, fields,
- new BreadCrumbs(breadcrumbs),
- new Preamble(),
- new Table<AuthGUI,AuthzTrans>("AAF API Reference",gui.env.newTransNoAvg(),new Model(), "class=std")
- );
- }
-
- private static class Preamble extends NamedCode {
-
- private static final String I = "i";
-
- public Preamble() {
- super(false, "preamble");
- }
-
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen xgen) throws APIException, IOException {
- xgen.leaf(HTMLGen.H1).text("AAF 2.0 RESTful interface").end()
- .hr();
- xgen.leaf(HTMLGen.H2).text("Accessing RESTful").end();
- xgen.incr(HTMLGen.UL)
- .leaf(HTMLGen.LI).text("AAF RESTful service is secured by the following:").end()
- .incr(HTMLGen.UL)
- .leaf(HTMLGen.LI).text("The Client must utilize HTTP/S. Non Secure HTTP is not acceptable").end()
- .leaf(HTMLGen.LI).text("The Client MUST supply an Identity validated by one of the following mechanisms").end()
- .incr(HTMLGen.UL)
- .leaf(HTMLGen.LI).text("(Near Future) Application level Certificate").end()
- .end()
- .end()
- .leaf(HTMLGen.LI).text("Responses").end()
- .incr(HTMLGen.UL)
- .leaf(HTMLGen.LI).text("Each API Entity listed shows what structure will be accepted by service (ContentType) "
- + "or responded with by service (Accept). Therefore, use these in making your call. Critical for PUT/POST.").end()
- .leaf(HTMLGen.LI).text("Each API call may respond with JSON or XML. Choose the ContentType/Accept that has "
- + "+json after the type for JSON or +xml after the Type for XML").end()
- .leaf(HTMLGen.LI).text("XSDs for Versions").end()
- .incr(HTMLGen.UL)
- .leaf(HTMLGen.LI).leaf(HTMLGen.A,"href=../theme/aaf_2_0.xsd").text("API 2.0").end().end()
- .end()
- .leaf(HTMLGen.LI).text("AAF can support multiple Versions of the API. Choose the ContentType/Accept that has "
- + "the appropriate version=?.?").end()
- .leaf(HTMLGen.LI).text("All Errors coming from AAF return AT&T Standard Error Message as a String: " + ERROR_LINK
- + " (does not apply to errors from Container)").end()
- .end()
- .leaf(HTMLGen.LI).text("Character Restrictions").end()
- .incr(HTMLGen.UL)
- .leaf(HTMLGen.LI).text("Character Restrictions must depend on the Enforcement Point used").end()
- .leaf(HTMLGen.LI).text("Most AAF usage will be AAF Enforcement Point Characters for Instance and Action are:")
- .br().br().leaf(I).text("a-zA-Z0-9,.()_-=%").end()
- .br().br().text("For Instance, you may declare a multi-dimensional key with : (colon) separator, example:").end()
- .br().leaf(I).text(":myCluster:myKeyspace").end()
- .br().br().text("The * (asterix) may be used as a wild-card by itself or within the multi-dimensional key, example:")
- .br().leaf(I).text(":myCluster:*").end()
- .br().br().text("The % (percent) character can be used as an Escape Character. Applications can use % followed by 2 hexadecimal "
- + "digits to cover odd keys. It is their code, however, which must translate.")
- .br().br().text("The = (equals) is allowed so that Applications can pass Base64 encodations of binary keys").end()
- .leaf(HTMLGen.LI).text("Ask for a Consultation on how these are typically used, or, if your tool is the only Enforcement Point, if set may be expanded").end()
- .end()
- .end();
- /*
-
- The Content is defined in the AAF XSD - TODO Add aaf.xsd�;
- Character Restrictions
-
- URLs impose restrictions on characters which have specific meanings. This means you cannot have these characters in the Field Content you send
- “#� is a “Fragment URL�, or anchor. Content after this Character is not sent. AAF cannot do anything about this… don’t use it.
- “?=&�. These are used to delineate Parameters.
- “/“ is used to separate fields
- */
- }
-
- };
- /**
- * Implement the Table Content for Permissions by User
- *
- *
- */
- private static class Model implements Table.Data<AuthGUI,AuthzTrans> {
- public static final String[] HEADERS = new String[] {"Entity","Method","Path Info","Description"};
- private static final TextCell BLANK = new TextCell("");
-
- @Override
- public String[] headers() {
- return HEADERS;
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public Cells get(final AuthGUI gui, final AuthzTrans trans) {
- ArrayList<AbsCell[]> ns = new ArrayList<AbsCell[]>();
- ArrayList<AbsCell[]> perms = new ArrayList<AbsCell[]>();
- ArrayList<AbsCell[]> roles = new ArrayList<AbsCell[]>();
- ArrayList<AbsCell[]> user = new ArrayList<AbsCell[]>();
- ArrayList<AbsCell[]> aafOnly = new ArrayList<AbsCell[]>();
- ArrayList<AbsCell[]> rv = new ArrayList<AbsCell[]>();
-
-
- TimeTaken tt = trans.start("AAF APIs",Env.REMOTE);
- try {
- gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {
- @Override
- public Void code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- Future<Api> fa = client.read("/api",gui.apiDF);
- if(fa.get(5000)) {
- tt.done();
- TimeTaken tt2 = trans.start("Load Data", Env.SUB);
- try {
- if(fa.value!=null)for(Route r : fa.value.getRoute()) {
- String path = r.getPath();
- // Build info
- StringBuilder desc = new StringBuilder();
-
- desc.append("<p class=double>");
- desc.append(r.getDesc());
-
- if(r.getComments().size()>0) {
- for(String ct : r.getComments()) {
- desc.append("</p><p class=api_comment>");
- desc.append(ct);
- }
- }
-
- if(r.getParam().size()>0) {
- desc.append("<hr><p class=api_label>Parameters</p>");
-
- for(String params : r.getParam()) {
- String param[] = params.split("\\s*\\|\\s*");
- desc.append("</p><p class=api_contentType>");
- desc.append(param[0]);
- desc.append(" : ");
- desc.append(param[1]);
- if("true".equalsIgnoreCase(param[2])) {
- desc.append(" (Required)");
- }
- }
- }
-
-
- if(r.getExpected()!=0) {
- desc.append("</p><p class=api_label>Expected HTTP Code</p><p class=api_comment>");
- desc.append(r.getExpected());
- }
-
- if(r.getExplicitErr().size()!=0) {
- desc.append("</p><p class=api_label>Explicit HTTP Error Codes</p><p class=api_comment>");
- boolean first = true;
- for(int ee : r.getExplicitErr()) {
- if(first) {
- first = false;
- } else {
- desc.append(", ");
- }
- desc.append(ee);
- }
- }
-
- desc.append("</p><p class=api_label>");
- desc.append("GET".equals(r.getMeth())?"Accept:":"ContentType:");
- Collections.sort(r.getContentType());
- if(r.getPath().startsWith("/authn/basicAuth")) {
- desc.append("</p><p class=api_contentType>text/plain");
- }
- for(String ct : r.getContentType()) {
- if(ct.contains("version=2")) {
- desc.append("</p><p class=api_contentType><a href=\"./example/");
- try {
- desc.append(Symm.base64noSplit.encode(ct));
- } catch (IOException e) {
- throw new CadiException(e);
- }
- desc.append("\"/>");
- desc.append(ct);
- desc.append("</a>");
- }
- }
- desc.append("</p>");
-
-
- AbsCell[] sa = new AbsCell[] {
- null,
- new TextCell(r.getMeth(),"class=right"),
- new TextCell(r.getPath()),
- new TextCell(desc.toString()),
- };
-
- if(path.startsWith("/authz/perm")) {
- sa[0] = perms.size()==0?new TextCell("PERMISSION"):BLANK;
- perms.add(sa);
- } else if(path.startsWith("/authz/role") || path.startsWith("/authz/userRole")) {
- sa[0] = roles.size()==0?new TextCell("ROLE"):BLANK;
- roles.add(sa);
- } else if(path.startsWith("/authz/ns")) {
- sa[0] = ns.size()==0?new TextCell("NAMESPACE"):BLANK;
- ns.add(sa);
- } else if(path.startsWith("/authn/basicAuth")
- || path.startsWith("/authn/validate")
- || path.startsWith("/authz/user")) {
- sa[0] = user.size()==0?new TextCell("USER"):BLANK;
- user.add(sa);
- } else {
- sa[0] = aafOnly.size()==0?new TextCell("AAF ONLY"):BLANK;
- aafOnly.add(sa);
- }
- }
- //TODO if(trans.fish(p))
- prepare(rv, perms,roles,ns,user);
- } finally {
- tt2.done();
- }
- } else {
- gui.writeError(trans, fa, null);
- }
- return null;
- }
- });
- } catch (Exception e) {
- trans.error().log(e.getMessage());
- } finally {
- tt.done();
- }
-
- return new Cells(rv,null);
- }
-
- @SuppressWarnings("unchecked")
- private void prepare(ArrayList<AbsCell[]> rv, ArrayList<AbsCell[]> ... all) {
- AbsCell lead;
- AbsCell[] row;
- for(ArrayList<AbsCell[]> al : all) {
- if(al.size()>1) {
- row = al.get(0);
- lead = row[0];
- row[0]=BLANK;
- al.get(0).clone()[0]=BLANK;
- Collections.sort(al, new Comparator<AbsCell[]>() {
- @Override
- public int compare(AbsCell[] ca1, AbsCell[] ca2) {
- int meth = ((TextCell)ca1[2]).name.compareTo(
- ((TextCell)ca2[2]).name);
- if(meth == 0) {
- return (HttpMethods.valueOf(((TextCell)ca1[1]).name).compareTo(
- HttpMethods.valueOf(((TextCell)ca2[1]).name)));
- } else {
- return meth;
- }
- }
- });
- // set new first row
- al.get(0)[0]=lead;
-
- rv.addAll(al);
- }
- }
- }
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import org.onap.aaf.cadi.Symm;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Data.TYPE;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.TimeTaken;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.Mark;
-import com.att.xgen.html.HTMLGen;
-
-import aaf.v2_0.Error;
-
-/**
- * Detail Page for Permissions
- *
- *
- */
-public class ApiExample extends Page {
- public static final String HREF = "/gui/example/:tc";
- public static final String NAME = "APIExample";
-
- public ApiExample(final AuthGUI gui, Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env, NAME, HREF, 2/*backdots*/, new String[] {"API Code Example"},
- new BreadCrumbs(breadcrumbs),
- new Model()
- );
- }
-
- private static class Model extends NamedCode {
- private static final String WITH_OPTIONAL_PARAMETERS = "\n\n////////////\n Data with Optional Parameters \n////////////\n\n";
-
- public Model() {
- super(false);
- }
-
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen xgen) throws APIException, IOException {
- Mark inner = xgen.divID("inner");
- xgen.divID("example","class=std");
- cache.dynamic(xgen, new DynamicCode<HTMLGen,AuthGUI,AuthzTrans>() {
- @Override
- public void code(final AuthGUI gui, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen xgen) throws APIException, IOException {
- TimeTaken tt = trans.start("Code Example",Env.REMOTE);
- try {
- final String typecode;
- int prefix = trans.path().lastIndexOf('/')+1;
- String encoded = trans.path().substring(prefix);
- typecode = Symm.base64noSplit.decode(encoded);
- Future<String> fp = gui.client().read("/api/example/" + encoded,
- "application/Void+json"
- );
- Future<String> fs2;
- if(typecode.contains("Request+")) {
- fs2 = gui.client().read("/api/example/" + typecode+"?optional=true",
- "application/Void+json"
- );
- } else {
- fs2=null;
- }
-
-
- if(fp.get(5000)) {
- xgen.incr(HTMLGen.H1).text("Sample Code").end()
- .incr(HTMLGen.H5).text(typecode).end();
- xgen.incr("pre");
- if(typecode.contains("+xml")) {
- xgen.xml(fp.body());
- if(fs2!=null && fs2.get(5000)) {
- xgen.text(WITH_OPTIONAL_PARAMETERS);
- xgen.xml(fs2.body());
- }
- } else {
- xgen.text(fp.body());
- if(fs2!=null && fs2.get(5000)) {
- xgen.text(WITH_OPTIONAL_PARAMETERS);
- xgen.text(fs2.body());
- }
- }
- xgen.end();
- } else {
- Error err = gui.errDF.newData().in(TYPE.JSON).load(fp.body()).asObject();
- xgen.incr(HTMLGen.H3)
- .textCR(2,"Error from AAF Service")
- .end();
-
- xgen.p("Error Code: ",err.getMessageId())
- .p(err.getText())
- .end();
-
- }
-
- } catch (APIException e) {
- throw e;
- } catch (IOException e) {
- throw e;
- } catch (Exception e) {
- throw new APIException(e);
- }finally {
- tt.done();
- }
- }
-
- });
- xgen.end(inner);
- }
- }
-
-}
-
\ No newline at end of file
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.html.HTMLGen;
-
-import aaf.v2_0.Approval;
-import aaf.v2_0.Approvals;
-
-public class ApprovalAction extends Page {
- public ApprovalAction(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,"Approvals",ApprovalForm.HREF, ApprovalForm.FIELDS,
- new BreadCrumbs(breadcrumbs),
- new NamedCode(true,"content") {
- final Slot sAppr = gui.env.slot(ApprovalForm.NAME+'.'+ApprovalForm.FIELDS[0]);
- final Slot sUser = gui.env.slot(ApprovalForm.NAME+'.'+ApprovalForm.FIELDS[1]);
-
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- cache.dynamic(hgen, new DynamicCode<HTMLGen,AuthGUI, AuthzTrans>() {
- @Override
- public void code(final AuthGUI gui, final AuthzTrans trans,Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- boolean fail = true;
- String[] appr = trans.get(sAppr,null);
- String user = trans.get(sUser,null);
- String lastPage = ApprovalForm.HREF;
- if (user != null) {
- lastPage += "?user="+user;
- }
-
- if(appr==null) {
- hgen.p("No Approvals have been selected.");
- } else {
- Approval app;
- final Approvals apps = new Approvals();
- int count = 0;
- for(String a : appr) {
- if(a!=null) {
- int idx = a.indexOf('|');
- if(idx>=0) {
- app = new Approval();
- app.setStatus(a.substring(0,idx));
- app.setTicket(a.substring(++idx));
- app.setApprover(trans.getUserPrincipal().getName());
- apps.getApprovals().add(app);
- ++count;
- }
- }
- }
- if(apps.getApprovals().isEmpty()) {
- hgen.p("No Approvals have been sent.");
- } else {
- TimeTaken tt = trans.start("AAF Update Approvals",Env.REMOTE);
- try {
- final int total = count;
- fail = gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Boolean>() {
- @Override
- public Boolean code(Rcli<?> client) throws APIException, CadiException {
- boolean fail2 = true;
- Future<Approvals> fa = client.update("/authz/approval",gui.approvalsDF,apps);
- if(fa.get(AuthGUI.TIMEOUT)) {
- // Do Remote Call
- fail2 = false;
- hgen.p(total + (total==1?" Approval has":" Approvals have") + " been Saved");
- } else {
- gui.writeError(trans, fa, hgen);
- }
- return fail2;
- }
- });
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- tt.done();
- }
- }
-
- hgen.br();
- if(fail) {
- hgen.incr("a",true,"href="+lastPage).text("Try again").end();
- } else {
- hgen.incr("a",true,"href="+Home.HREF).text("Home").end();
- }
- }
- }
- });
- }
- });
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-import com.att.authz.env.AuthzEnv;
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.Form;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import com.att.authz.gui.Table;
-import com.att.authz.gui.Table.Cells;
-import com.att.authz.gui.table.AbsCell;
-import com.att.authz.gui.table.ButtonCell;
-import com.att.authz.gui.table.RadioCell;
-import com.att.authz.gui.table.RefCell;
-import com.att.authz.gui.table.TextAndRefCell;
-import com.att.authz.gui.table.TextCell;
-import com.att.authz.org.Organization;
-import com.att.authz.org.Organization.Identity;
-import com.att.authz.org.OrganizationFactory;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.Mark;
-import com.att.xgen.html.HTMLGen;
-
-import aaf.v2_0.Approval;
-
-public class ApprovalForm extends Page {
- // Package on purpose
- static final String NAME="Approvals";
- static final String HREF = "/gui/approve";
- static final String WEBPHONE = "http://webphone.att.com/cgi-bin/webphones.pl?id=";
- static final String[] FIELDS = new String[] {"line[]","user"};
-
-
- public ApprovalForm(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,NAME,HREF, FIELDS,
-
- new BreadCrumbs(breadcrumbs),
- new NamedCode(false, "filterByUser") {
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- cache.dynamic(hgen, new DynamicCode<HTMLGen, AuthGUI, AuthzTrans>() {
- @Override
- public void code(AuthGUI gui, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- String user = trans.get(trans.env().slot(NAME+".user"),"");
- hgen.incr("p", "class=userFilter")
- .text("Filter by User:")
- .tagOnly("input", "type=text", "value="+user, "id=userTextBox")
- .tagOnly("input", "type=button", "onclick=userFilter('"+HREF+"');", "value=Go!")
- .end();
- }
- });
- }
- },
- new Form(true,new Table<AuthGUI,AuthzTrans>("Approval Requests", gui.env.newTransNoAvg(),new Model(gui.env()),"class=stdform"))
- .preamble("The following requires your Approval to proceed in the AAF System.</p><p class=subtext>Hover on Identity for Name; click for WebPhone"),
- new NamedCode(false, "selectAlljs") {
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- Mark jsStart = new Mark();
- hgen.js(jsStart);
- hgen.text("function selectAll(radioClass) {");
- hgen.text("var radios = document.querySelectorAll(\".\"+radioClass);");
- hgen.text("for (i = 0; i < radios.length; i++) {");
- hgen.text("radios[i].checked = true;");
- hgen.text("}");
- hgen.text("}");
- hgen.end(jsStart);
- }
- });
-
- }
-
- /**
- * Implement the Table Content for Approvals
- *
- *
- */
- private static class Model implements Table.Data<AuthGUI,AuthzTrans> {
- private static final String[] headers = new String[] {"Identity","Request","Approve","Deny"};
- private static final Object THE_DOMAIN = null;
- private Slot sUser;
-
- public Model(AuthzEnv env) {
- sUser = env.slot(NAME+".user");
- }
-
- @Override
- public String[] headers() {
- return headers;
- }
-
- @Override
- public Cells get(final AuthGUI gui, final AuthzTrans trans) {
- final String userParam = trans.get(sUser, null);
- ArrayList<AbsCell[]> rv = new ArrayList<AbsCell[]>();
- String msg = null;
- TimeTaken tt = trans.start("AAF Get Approvals for Approver",Env.REMOTE);
- try {
- final List<Approval> pendingApprovals = new ArrayList<Approval>();
- final List<Integer> beginIndicesPerApprover = new ArrayList<Integer>();
- int numLeft = gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Integer>() {
- @Override
- public Integer code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- Future<aaf.v2_0.Approvals> fa = client.read("/authz/approval/approver/"+trans.user(),gui.approvalsDF);
- int numLeft = 0;
- if(fa.get(AuthGUI.TIMEOUT)) {
-
- if(fa.value!=null) {
- for (Approval appr : fa.value.getApprovals()) {
- if (appr.getStatus().equals("pending")) {
- if (userParam!=null) {
- if (!appr.getUser().equalsIgnoreCase(userParam)) {
- numLeft++;
- continue;
- }
- }
- pendingApprovals.add(appr);
- }
- }
- }
-
- String prevApprover = null;
- int overallIndex = 0;
-
- for (Approval appr : pendingApprovals) {
- String currApprover = appr.getApprover();
- if (!currApprover.equals(prevApprover)) {
- prevApprover = currApprover;
- beginIndicesPerApprover.add(overallIndex);
- }
- overallIndex++;
- }
- }
- return numLeft;
- }
- });
-
- if (pendingApprovals.size() > 0) {
- // Only add select all links if we have approvals
- AbsCell[] selectAllRow = new AbsCell[] {
- AbsCell.Null,
- AbsCell.Null,
- new ButtonCell("all", "onclick=selectAll('approve')", "class=selectAllButton"),
- new ButtonCell("all", "onclick=selectAll('deny')", "class=selectAllButton")
- };
- rv.add(selectAllRow);
- }
-
- int line=-1;
-
- while (beginIndicesPerApprover.size() > 0) {
- int beginIndex = beginIndicesPerApprover.remove(0);
- int endIndex = (beginIndicesPerApprover.isEmpty()?pendingApprovals.size():beginIndicesPerApprover.get(0));
- List<Approval> currApproverList = pendingApprovals.subList(beginIndex, endIndex);
-
- String currApproverFull = currApproverList.get(0).getApprover();
- String currApproverShort = currApproverFull.substring(0,currApproverFull.indexOf('@'));
- String currApprover = (trans.user().indexOf('@')<0?currApproverShort:currApproverFull);
- if (!currApprover.equals(trans.user())) {
- AbsCell[] approverHeader;
- if (currApproverFull.substring(currApproverFull.indexOf('@')).equals(THE_DOMAIN)) {
- approverHeader = new AbsCell[] {
- new TextAndRefCell("Approvals Delegated to Me by ", currApprover,
- WEBPHONE + currApproverShort,
- new String[] {"colspan=4", "class=head"})
- };
- } else {
- approverHeader = new AbsCell[] {
- new TextCell("Approvals Delegated to Me by " + currApprover,
- new String[] {"colspan=4", "class=head"})
- };
- }
- rv.add(approverHeader);
- }
-
- // Sort by User Requesting
- Collections.sort(currApproverList, new Comparator<Approval>() {
- @Override
- public int compare(Approval a1, Approval a2) {
- return a1.getUser().compareTo(a2.getUser());
- }
- });
-
- String prevUser = null;
- for (Approval appr : currApproverList) {
- if(++line<MAX_LINE) { // limit number displayed at one time.
- AbsCell userCell;
- String user = appr.getUser();
- if(user.equals(prevUser)) {
- userCell = AbsCell.Null;
- } else {
- String title;
- Organization org = OrganizationFactory.obtain(trans.env(), user);
- if(org==null) {
- title="";
- } else {
- Identity au = org.getIdentity(trans, user);
- if(au!=null) {
- if(au.type().equals("MECHID")) {
- title="title=Sponsor is " + au.responsibleTo();
- } else {
- title="title=" + au.fullName();
- }
- } else {
- title="";
- }
- }
- userCell = new RefCell(prevUser=user,
- "" //TODO add Organization Link ability
- ,title);
- }
- AbsCell[] sa = new AbsCell[] {
- userCell,
- new TextCell(appr.getMemo()),
- new RadioCell("line"+ line,"approve", "approved|"+appr.getTicket()),
- new RadioCell("line"+ line,"deny", "denied|"+appr.getTicket())
- };
- rv.add(sa);
- } else {
- ++numLeft;
- }
- }
- }
- if(numLeft>0) {
- msg = "After these, there will be " + numLeft + " approvals left to process";
- }
- if(rv.size()==0) {
- if (numLeft>0) {
- msg = "No Approvals to process at this time for user " + userParam +". You have "
- + numLeft + " other approvals to process.";
- } else {
- msg = "No Approvals to process at this time";
- }
- }
- } catch (Exception e) {
- trans.error().log(e);
- } finally {
- tt.done();
- }
- return new Cells(rv,msg);
- }
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import static com.att.xgen.html.HTMLGen.A;
-import static com.att.xgen.html.HTMLGen.H3;
-
-import java.io.IOException;
-
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import org.onap.aaf.inno.env.APIException;
-import com.att.xgen.Cache;
-import com.att.xgen.Mark;
-import com.att.xgen.html.HTMLGen;
-
-
-public class Home extends Page {
- public static final String HREF = "/gui/home";
- public Home(final AuthGUI gui) throws APIException, IOException {
- super(gui.env,"Home",HREF, NO_FIELDS, new NamedCode(false,"content") {
- @Override
- public void code(final Cache<HTMLGen> cache, final HTMLGen xgen) throws APIException, IOException {
-// // TEMP
-// JSGen jsg = xgen.js();
-// jsg.function("httpPost","sURL","sParam")
-// .text("var oURL = new java.net.URL(sURL)")
-// .text("var oConn = oURL.openConnection();")
-// .text("oConn.setDoInput(true);")
-// .text("oConn.setDoOutpu(true);")
-// .text("oConn.setUseCaches(false);")
-// .text("oConn.setRequestProperty(\"Content-Type\",\"application/x-www-form-urlencoded\");")
-// .text(text)
-// jsg.done();
- // TEMP
- final Mark pages = xgen.divID("Pages");
- xgen.leaf(H3).text("Choose from the following:").end()
- .leaf(A,"href=myperms").text("My Permissions").end()
- .leaf(A,"href=myroles").text("My Roles").end()
- // TODO: uncomment when on cassandra 2.1.2 for MyNamespace GUI page
- .leaf(A,"href=mynamespaces").text("My Namespaces").end()
- .leaf(A,"href=approve").text("My Approvals").end()
- .leaf(A, "href=myrequests").text("My Pending Requests").end()
- // Enable later
-// .leaf(A, "href=onboard").text("Onboarding").end()
- // Password Change. If logged in as CSP/GSO, go to their page
- .leaf(A,"href=passwd").text("Password Management").end()
- .leaf(A,"href=cui").text("Command Prompt").end()
- .leaf(A,"href=api").text("AAF API").end()
- ;
-
- xgen.end(pages);
- }
- });
- }
-
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.URLDecoder;
-
-import javax.servlet.http.HttpServletRequest;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import org.onap.aaf.cadi.config.Config;
-import org.onap.aaf.inno.env.APIException;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.Mark;
-import com.att.xgen.html.HTMLGen;
-
-public class LoginLanding extends Page {
- public static final String HREF = "/login";
- static final String NAME = "Login";
- static final String fields[] = {"id","password","environment"};
- static final String envs[] = {"DEV","TEST","PROD"};
-
- public LoginLanding(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env, NAME,HREF, fields, new NamedCode(true, "content") {
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- hgen.leaf("p").text("No login credentials are found in your current session. " +
- "Choose your preferred login option to continue.").end();
-
- Mark loginPaths = hgen.divID("Pages");
-
- cache.dynamic(hgen, new DynamicCode<HTMLGen, AuthGUI, AuthzTrans>() {
- @Override
- public void code(AuthGUI authGUI, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen xgen) throws APIException, IOException {
- HttpServletRequest req = trans.get(gui.slot_httpServletRequest, null);
- if(req!=null) {
- String query = req.getQueryString();
- if(query!=null) {
- for(String qs : query.split("&")) {
- int equals = qs.indexOf('=');
- xgen.leaf(HTMLGen.A, "href="+URLDecoder.decode(qs.substring(equals+1),Config.UTF_8)).text(qs.substring(0,equals).replace('_', ' ')).end();
- }
- }
- }
- xgen.leaf(HTMLGen.A, "href=gui/home?Authentication=BasicAuth").text("AAF Basic Auth").end();
- }
- });
-// hgen.leaf("a", "href=#","onclick=divVisibility('cso');").text("Global Login").end()
-// .incr("p", "id=cso","style=display:none").text("this will redirect to global login").end()
-// .leaf("a", "href=#","onclick=divVisibility('tguard');").text("tGuard").end()
-// .incr("p", "id=tguard","style=display:none").text("this will redirect to tGuard login").end()
-// hgen.leaf("a", "href=#","onclick=divVisibility('basicauth');").text("AAF Basic Auth").end();
- hgen.end(loginPaths);
-
-// hgen.incr("form","method=post","style=display:none","id=basicauth","gui/home?Authentication=BasicAuth");
-// Mark table = new Mark(TABLE);
-// hgen.incr(table);
-// cache.dynamic(hgen, new DynamicCode<HTMLGen, AuthGUI, AuthzTrans>() {
-// @Override
-// public void code(AuthGUI gui, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen hgen)
-// throws APIException, IOException {
-// hgen
-// .input(fields[0],"Username",true)
-// .input(fields[1],"Password",true, "type=password");
-// Mark selectRow = new Mark();
-// hgen
-// .incr(selectRow, "tr")
-// .incr("td")
-// .incr("label", "for=envs", "required").text("Environment").end()
-// .end()
-// .incr("td")
-// .incr("select", "name=envs", "id=envs", "required")
-// .incr("option", "value=").text("Select Environment").end();
-// for (String env : envs) {
-// hgen.incr("option", "value="+env).text(env).end();
-// }
-// hgen
-// .end(selectRow)
-
-// hgen.end();
-// }
-// });
-// hgen.end();
-// hgen.tagOnly("input", "type=submit", "value=Submit")
-// .tagOnly("input", "type=reset", "value=Reset")
-// .end();
-
-
- }
- });
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Slot;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.html.HTMLGen;
-
-public class LoginLandingAction extends Page {
- public LoginLandingAction(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,"Login",LoginLanding.HREF, LoginLanding.fields,
- new BreadCrumbs(breadcrumbs),
- new NamedCode(true,"content") {
- final Slot sID = gui.env.slot(LoginLanding.NAME+'.'+LoginLanding.fields[0]);
-// final Slot sPassword = gui.env.slot(LoginLanding.NAME+'.'+LoginLanding.fields[1]);
-
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- cache.dynamic(hgen, new DynamicCode<HTMLGen,AuthGUI, AuthzTrans>() {
- @Override
- public void code(final AuthGUI gui, final AuthzTrans trans,Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- String username = trans.get(sID,null);
-// String password = trans.get(sPassword,null);
-
- hgen.p("User: "+username);
- hgen.p("Pass: ********");
-
- // TODO: clarification from JG
- // put in request header?
- // then pass through authn/basicAuth call?
-
- }
- });
- }
- });
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.util.ArrayList;
-import java.util.List;
-
-import com.att.authz.env.AuthzEnv;
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.Page;
-import com.att.authz.gui.Table;
-import com.att.authz.gui.Table.Cells;
-import com.att.authz.gui.table.AbsCell;
-import com.att.authz.gui.table.RefCell;
-import com.att.authz.gui.table.TextCell;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import com.att.cmd.AAFcli;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-import org.onap.aaf.inno.env.util.Chrono;
-
-import aaf.v2_0.Nss;
-import aaf.v2_0.Nss.Ns;
-import aaf.v2_0.Perm;
-import aaf.v2_0.Perms;
-import aaf.v2_0.Role;
-import aaf.v2_0.Roles;
-import aaf.v2_0.Users;
-import aaf.v2_0.Users.User;
-
-public class NsDetail extends Page {
-
- public static final String HREF = "/gui/nsdetail";
- public static final String NAME = "NsDetail";
- static final String WEBPHONE = "http://webphone.att.com/cgi-bin/webphones.pl?id=";
- public static enum NS_FIELD { OWNERS, ADMINS, ROLES, PERMISSIONS, CREDS};
- private static final String BLANK = "";
-
- public NsDetail(final AuthGUI gui, Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env, NAME, HREF, new String[] {"name"},
- new BreadCrumbs(breadcrumbs),
- new Table<AuthGUI,AuthzTrans>("Namespace Details",gui.env.newTransNoAvg(),new Model(gui.env()),"class=detail")
- );
- }
-
- /**
- * Implement the table content for Namespace Detail
- *
- *
- */
- private static class Model implements Table.Data<AuthGUI,AuthzTrans> {
- private static final String[] headers = new String[0];
- private static final String CSP_ATT_COM = "@csp.att.com";
- private Slot name;
- public Model(AuthzEnv env) {
- name = env.slot(NAME+".name");
- }
-
- @Override
- public String[] headers() {
- return headers;
- }
-
- @Override
- public Cells get(final AuthGUI gui, final AuthzTrans trans) {
- final String nsName = trans.get(name, null);
- if(nsName==null) {
- return Cells.EMPTY;
- }
- ArrayList<AbsCell[]> rv = new ArrayList<AbsCell[]>();
- rv.add(new AbsCell[]{new TextCell("Name:"),new TextCell(nsName)});
-
- final TimeTaken tt = trans.start("AAF Namespace Details",Env.REMOTE);
- try {
- gui.clientAsUser(trans.getUserPrincipal(),new Retryable<Void>() {
- @Override
- public Void code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- Future<Nss> fn = client.read("/authz/nss/"+nsName,gui.nssDF);
-
- if(fn.get(AuthGUI.TIMEOUT)) {
- tt.done();
- try {
-// TimeTaken tt = trans.start("Load Data", Env.SUB);
-
- for(Ns n : fn.value.getNs()) {
- String desc = (n.getDescription()!=null?n.getDescription():BLANK);
- rv.add(new AbsCell[]{new TextCell("Description:"),new TextCell(desc)});
-
- addField(trans, rv, n.getAdmin(), NS_FIELD.ADMINS);
- addField(trans, rv, n.getResponsible(), NS_FIELD.OWNERS);
-
- Future<Users> fu = client.read(
- "/authn/creds/ns/"+nsName,
- gui.usersDF
- );
- List<String> creds = new ArrayList<String>();
- if(fu.get(AAFcli.timeout())) {
- for (User u : fu.value.getUser()) {
- StringBuilder sb = new StringBuilder(u.getId());
- switch(u.getType()) {
- case 1: sb.append(" (U/Pass) "); break;
- case 10: sb.append(" (Cert) "); break;
- case 200: sb.append(" (x509) "); break;
- default:
- sb.append(" ");
- }
- sb.append(Chrono.niceDateStamp(u.getExpires()));
- creds.add(sb.toString());
- }
- }
- addField(trans, rv, creds, NS_FIELD.CREDS);
-
- Future<Roles> fr = client.read(
- "/authz/roles/ns/"+nsName,
- gui.rolesDF
- );
- List<String> roles = new ArrayList<String>();
- if(fr.get(AAFcli.timeout())) {
- for (Role r : fr.value.getRole()) {
- roles.add(r.getName());
- }
- }
- addField(trans, rv, roles, NS_FIELD.ROLES);
-
-
- Future<Perms> fp = client.read(
- "/authz/perms/ns/"+nsName,
- gui.permsDF
- );
- List<String> perms = new ArrayList<String>();
-
- if(fp.get(AAFcli.timeout())) {
- for (Perm p : fp.value.getPerm()) {
- perms.add(p.getType() + "|" + p.getInstance() + "|" + p.getAction());
- }
- }
- addField(trans, rv, perms, NS_FIELD.PERMISSIONS);
- }
- String historyLink = NsHistory.HREF
- + "?name=" + nsName;
- rv.add(new AbsCell[] {new RefCell("See History",historyLink)});
- } finally {
- tt.done();
- }
- } else {
- rv.add(new AbsCell[] {new TextCell("*** Data Unavailable ***")});
- }
- return null;
- }
- });
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- tt.done();
- }
- return new Cells(rv,null);
- }
-
- private void addField(AuthzTrans trans, ArrayList<AbsCell[]> rv, List<String> values, NS_FIELD field) {
- if (!values.isEmpty()) {
- switch(field) {
- case OWNERS:
- case ADMINS:
- case CREDS:
- for (int i=0; i< values.size(); i++) {
- AbsCell label = (i==0?new TextCell(sentenceCase(field)+":"):AbsCell.Null);
- String user = values.get(i);
- AbsCell userCell = (user.endsWith(CSP_ATT_COM)?
- new RefCell(user,WEBPHONE + user.substring(0,user.indexOf('@'))):new TextCell(user));
- rv.add(new AbsCell[] {
- label,
- userCell
- });
- }
- break;
- case ROLES:
- for (int i=0; i< values.size(); i++) {
- AbsCell label = (i==0?new TextCell(sentenceCase(field)+":"):AbsCell.Null);
- rv.add(new AbsCell[] {
- label,
- new TextCell(values.get(i))
- });
- }
- break;
- case PERMISSIONS:
- for (int i=0; i< values.size(); i++) {
- AbsCell label = (i==0?new TextCell(sentenceCase(field)+":"):AbsCell.Null);
- String perm = values.get(i);
- String[] fields = perm.split("\\|");
- String grantLink = PermGrantForm.HREF
- + "?type=" + fields[0].trim()
- + "&instance=" + fields[1].trim()
- + "&action=" + fields[2].trim();
-
- rv.add(new AbsCell[] {
- label,
- new TextCell(perm),
- new RefCell("Grant This Perm", grantLink)
- });
- }
- break;
- }
-
- }
- }
-
- private String sentenceCase(NS_FIELD field) {
- String sField = field.toString();
- return sField.substring(0, 1).toUpperCase() + sField.substring(1).toLowerCase();
- }
-
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Comparator;
-import java.util.List;
-
-import com.att.authz.env.AuthzEnv;
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import com.att.authz.gui.Table;
-import com.att.authz.gui.Table.Cells;
-import com.att.authz.gui.table.AbsCell;
-import com.att.authz.gui.table.RefCell;
-import com.att.authz.gui.table.TextCell;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.html.HTMLGen;
-
-import aaf.v2_0.History;
-import aaf.v2_0.History.Item;
-
-public class NsHistory extends Page {
- static final String NAME="NsHistory";
- static final String HREF = "/gui/nsHistory";
- static final String FIELDS[] = {"name","dates"};
- static final String WEBPHONE = "http://webphone.att.com/cgi-bin/webphones.pl?id=";
- static enum Month { JANUARY, FEBRUARY, MARCH, APRIL, MAY, JUNE, JULY,
- AUGUST, SEPTEMBER, OCTOBER, NOVEMBER, DECEMBER };
-
- public NsHistory(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,NAME,HREF, FIELDS,
- new BreadCrumbs(breadcrumbs),
- new Table<AuthGUI,AuthzTrans>("History", gui.env.newTransNoAvg(),new Model(gui.env()),"class=std"),
- new NamedCode(true, "content") {
- @Override
- public void code(final Cache<HTMLGen> cache, final HTMLGen hgen) throws APIException, IOException {
- final Slot name = gui.env.slot(NAME+".name");
- cache.dynamic(hgen, new DynamicCode<HTMLGen, AuthGUI, AuthzTrans>() {
- @Override
- public void code(AuthGUI gui, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- String obName = trans.get(name, null);
-
- // Use Javascript to make the table title more descriptive
- hgen.js()
- .text("var caption = document.querySelector(\".title\");")
- .text("caption.innerHTML='History for Namespace [ " + obName + " ]';")
- .done();
-
- // Use Javascript to change Link Target to our last visited Detail page
- String lastPage = NsDetail.HREF + "?name=" + obName;
- hgen.js()
- .text("alterLink('nsdetail', '"+lastPage + "');")
- .done();
-
- hgen.br();
- hgen.leaf("a","href=#advanced_search","onclick=divVisibility('advanced_search');").text("Advanced Search").end()
- .divID("advanced_search", "style=display:none");
- hgen.incr("table");
-
- addDateRow(hgen,"Start Date");
- addDateRow(hgen,"End Date");
- hgen.incr("tr").incr("td");
- hgen.tagOnly("input", "type=button","value=Get History",
- "onclick=datesURL('"+HREF+"?name=" + obName+"');");
- hgen.end().end();
- hgen.end();
- hgen.end();
-
- }
- });
- }
- }
-
- );
- }
-
- private static void addDateRow(HTMLGen hgen, String s) {
- hgen
- .incr("tr")
- .incr("td")
- .incr("label", "for=month", "required").text(s+"*").end()
- .end()
- .incr("td")
- .incr("select", "name=month"+s.substring(0, s.indexOf(' ')), "id=month"+s.substring(0, s.indexOf(' ')), "required")
- .incr("option", "value=").text("Month").end();
- for (Month m : Month.values()) {
- if (Calendar.getInstance().get(Calendar.MONTH) == m.ordinal()) {
- hgen.incr("option", "selected", "value="+(m.ordinal()+1)).text(m.name()).end();
- } else {
- hgen.incr("option", "value="+(m.ordinal()+1)).text(m.name()).end();
- }
- }
- hgen.end()
- .end()
- .incr("td")
- .tagOnly("input","type=number","id=year"+s.substring(0, s.indexOf(' ')),"required",
- "value="+Calendar.getInstance().get(Calendar.YEAR), "min=1900",
- "max="+Calendar.getInstance().get(Calendar.YEAR),
- "placeholder=Year").end()
- .end();
- }
-
-
-
-
- /**
- * Implement the Table Content for History
- *
- *
- */
- private static class Model implements Table.Data<AuthGUI,AuthzTrans> {
- private static final String CSP_ATT_COM = "@csp.att.com";
- private static final String[] headers = new String[] {"Date","User","Memo"};
- private Slot name;
- private Slot dates;
-
- public Model(AuthzEnv env) {
- name = env.slot(NAME+".name");
- dates = env.slot(NAME+".dates");
- }
-
- @Override
- public String[] headers() {
- return headers;
- }
-
- @Override
- public Cells get(final AuthGUI gui, final AuthzTrans trans) {
- final String oName = trans.get(name,null);
- final String oDates = trans.get(dates,null);
-
- if(oName==null) {
- return Cells.EMPTY;
- }
-
- ArrayList<AbsCell[]> rv = new ArrayList<AbsCell[]>();
- String msg = null;
- final TimeTaken tt = trans.start("AAF Get History for Namespace ["+oName+"]",Env.REMOTE);
- try {
- gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {
- @Override
- public Void code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- if (oDates != null) {
- client.setQueryParams("yyyymm="+oDates);
- }
- Future<History> fh = client.read("/authz/hist/ns/"+oName,gui.historyDF);
- if (fh.get(AuthGUI.TIMEOUT)) {
- tt.done();
- TimeTaken tt2 = trans.start("Load History Data", Env.SUB);
- try {
- List<Item> histItems = fh.value.getItem();
-
- java.util.Collections.sort(histItems, new Comparator<Item>() {
- @Override
- public int compare(Item o1, Item o2) {
- return o2.getTimestamp().compare(o1.getTimestamp());
- }
- });
-
- for (Item i : histItems) {
- String user = i.getUser();
- AbsCell userCell = (user.endsWith(CSP_ATT_COM)?
- new RefCell(user,WEBPHONE + user.substring(0,user.indexOf('@'))):new TextCell(user));
-
- rv.add(new AbsCell[] {
- new TextCell(i.getTimestamp().toGregorianCalendar().getTime().toString()),
- userCell,
- new TextCell(i.getMemo())
- });
- }
- } finally {
- tt2.done();
- }
- } else {
- if (fh.code()==403) {
- rv.add(new AbsCell[] {new TextCell("You may not view History of Namespace [" + oName + "]", "colspan = 3", "class=center")});
- } else {
- rv.add(new AbsCell[] {new TextCell("*** Data Unavailable ***", "colspan = 3", "class=center")});
- }
- }
- return null;
- }
- });
- } catch (Exception e) {
- trans.error().log(e);
- } finally {
- tt.done();
- }
- return new Cells(rv,msg);
- }
- }
-
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.text.ParseException;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-import org.onap.aaf.inno.env.util.Chrono;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.html.HTMLGen;
-
-import aaf.v2_0.CredRequest;
-
-public class NsInfoAction extends Page {
- public NsInfoAction(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,"Onboard",PassChangeForm.HREF, PassChangeForm.fields,
- new BreadCrumbs(breadcrumbs),
- new NamedCode(true,"content") {
- final Slot sID = gui.env.slot(PassChangeForm.NAME+'.'+PassChangeForm.fields[0]);
- final Slot sCurrPass = gui.env.slot(PassChangeForm.NAME+'.'+PassChangeForm.fields[1]);
- final Slot sPassword = gui.env.slot(PassChangeForm.NAME+'.'+PassChangeForm.fields[2]);
- final Slot sPassword2 = gui.env.slot(PassChangeForm.NAME+'.'+PassChangeForm.fields[3]);
- final Slot startDate = gui.env.slot(PassChangeForm.NAME+'.'+PassChangeForm.fields[4]);
-
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- cache.dynamic(hgen, new DynamicCode<HTMLGen,AuthGUI, AuthzTrans>() {
- @Override
- public void code(final AuthGUI gui, final AuthzTrans trans,Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- String id = trans.get(sID,null);
- String currPass = trans.get(sCurrPass,null);
- String password = trans.get(sPassword,null);
- String password2 = trans.get(sPassword2,null);
-
- // Run Validations
- boolean fail = true;
-
- if (id==null || id.indexOf('@')<=0) {
- hgen.p("Data Entry Failure: Please enter a valid ID, including domain.");
- } else if(password == null || password2 == null || currPass == null) {
- hgen.p("Data Entry Failure: Both Password Fields need entries.");
- } else if(!password.equals(password2)) {
- hgen.p("Data Entry Failure: Passwords do not match.");
- } else { // everything else is checked by Server
- final CredRequest cred = new CredRequest();
- cred.setId(id);
- cred.setPassword(currPass);
- try {
- fail = gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Boolean>() {
- @Override
- public Boolean code(Rcli<?> client)throws CadiException, ConnectException, APIException {
- TimeTaken tt = trans.start("Check Current Password",Env.REMOTE);
- try {
- Future<CredRequest> fcr = client.create( // Note: Need "Post", because of hiding password in SSL Data
- "/authn/validate",
- gui.credReqDF,
- cred
- );
- boolean go;
- boolean fail = true;
- fcr.get(5000);
- if(fcr.code() == 200) {
- hgen.p("Current Password validated");
- go = true;
- } else {
- hgen.p(String.format("Invalid Current Password: %d %s",fcr.code(),fcr.body()));
- go = false;
- }
- if(go) {
- tt.done();
- tt = trans.start("AAF Change Password",Env.REMOTE);
- try {
- // Change over Cred to reset mode
- cred.setPassword(password);
- String start = trans.get(startDate, null);
- if(start!=null) {
- try {
- cred.setStart(Chrono.timeStamp(Chrono.dateOnlyFmt.parse(start)));
- } catch (ParseException e) {
- throw new CadiException(e);
- }
- }
-
- fcr = client.create(
- "/authn/cred",
- gui.credReqDF,
- cred
- );
-
- if(fcr.get(5000)) {
- // Do Remote Call
- hgen.p("New Password has been added.");
- fail = false;
- } else {
- gui.writeError(trans, fcr, hgen);
- }
- } finally {
- tt.done();
- }
- }
- return fail;
- } finally {
- tt.done();
- }
- }
- });
-
- } catch (Exception e) {
- hgen.p("Unknown Error");
- e.printStackTrace();
- }
- }
- hgen.br();
- if(fail) {
- hgen.incr("a",true,"href="+PassChangeForm.HREF+"?id="+id).text("Try again").end();
- } else {
- hgen.incr("a",true,"href="+Home.HREF).text("Home").end();
- }
- }
- });
- }
- });
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import static com.att.xgen.html.HTMLGen.A;
-import static com.att.xgen.html.HTMLGen.TABLE;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.util.List;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.Mark;
-import com.att.xgen.html.HTMLGen;
-
-import aaf.v2_0.Nss;
-import aaf.v2_0.Nss.Ns;
-import aaf.v2_0.Nss.Ns.Attrib;
-
-public class NsInfoForm extends Page {
- // Package on purpose
- static final String HREF = "/gui/onboard";
- static final String NAME = "Onboarding";
- static final String fields[] = {"ns","description","mots","owners","admins"};
-
- public NsInfoForm(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,NAME,HREF, fields,
- new BreadCrumbs(breadcrumbs),
- new NamedCode(true,"content") {
-
- private final Slot sID = gui.env.slot(NsInfoForm.NAME+'.'+NsInfoForm.fields[0]);
- @Override
- public void code(final Cache<HTMLGen> cache, final HTMLGen hgen) throws APIException, IOException {
- // p tags not closing right using .p() - causes issues in IE8 password form - so using leaf for the moment
- hgen.leaf(HTMLGen.H2).text("Namespace Info").end()
- .leaf("p").text("Hover over Fields for Tool Tips, or click ")
- .leaf(A,"href="+gui.env.getProperty("aaf_url.gui_onboard","")).text("Here").end()
- .text(" for more information")
- .end()
- .incr("form","method=post");
- Mark table = new Mark(TABLE);
- hgen.incr(table);
- cache.dynamic(hgen, new DynamicCode<HTMLGen, AuthGUI, AuthzTrans>() {
- @SuppressWarnings("unchecked")
- @Override
- public void code(final AuthGUI gui, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- final String incomingID= trans.get(sID, "");
- final String[] info = new String[fields.length];
- final Object own_adm[] = new Object[2];
- for(int i=0;i<info.length;++i) {
- info[i]="";
- }
- if(incomingID.length()>0) {
- TimeTaken tt = trans.start("AAF Namespace Info",Env.REMOTE);
- try {
- gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {
- @Override
- public Void code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- Future<Nss> fn = client.read("/authz/nss/"+incomingID,gui.nssDF);
- if(fn.get(AuthGUI.TIMEOUT)) {
- for(Ns ns : fn.value.getNs()) {
- info[0]=ns.getName();
- info[1]=ns.getDescription();
- for(Attrib attr: ns.getAttrib()) {
- switch(attr.getKey()) {
- case "mots":
- info[2]=attr.getValue();
- default:
- }
- }
- own_adm[0]=ns.getResponsible();
- own_adm[1]=ns.getAdmin();
- }
- } else {
- trans.error().log(fn.body());
- }
- return null;
- }
- });
- } catch (Exception e) {
- trans.error().log("Unable to access AAF for NS Info",incomingID);
- e.printStackTrace();
- } finally {
- tt.done();
- }
- }
- hgen.input(fields[0],"Namespace",false,"value="+info[0],"title=AAF Namespace")
- .input(fields[1],"Description*",true,"value="+info[1],"title=Full Application Name, Tool Name or Group")
- .input(fields[2],"MOTS ID",false,"value="+info[2],"title=MOTS ID if this is an Application, and has MOTS");
- Mark endTD = new Mark(),endTR=new Mark();
- // Owners
- hgen.incr(endTR,HTMLGen.TR)
- .incr(endTD,HTMLGen.TD)
- .leaf("label","for="+fields[3]).text("Responsible Party")
- .end(endTD)
- .incr(endTD,HTMLGen.TD)
- .tagOnly("input","id="+fields[3],"title=Owner of App, must be an Non-Bargained Employee");
- if(own_adm[0]!=null) {
- for(String s : (List<String>)own_adm[0]) {
- hgen.incr("label",true).text(s).end();
- }
- }
- hgen.end(endTR);
-
- // Admins
- hgen.incr(endTR,HTMLGen.TR)
- .incr(endTD,HTMLGen.TD)
- .leaf("label","for="+fields[4]).text("Administrators")
- .end(endTD)
- .incr(endTD,HTMLGen.TD)
- .tagOnly("input","id="+fields[4],"title=Admins may be employees, contractors or mechIDs");
- if(own_adm[1]!=null) {
- for(String s : (List<String>)own_adm[1]) {
- hgen.incr(HTMLGen.P,true).text(s).end();
- }
- }
- hgen.end(endTR)
- .end();
- }
- });
- hgen.end();
- hgen.tagOnly("input", "type=submit", "value=Submit")
- .end();
-
- }
- });
- }
-
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-
-import com.att.authz.env.AuthzEnv;
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.Page;
-import com.att.authz.gui.Table;
-import com.att.authz.gui.Table.Cells;
-import com.att.authz.gui.table.AbsCell;
-import com.att.authz.gui.table.RefCell;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-
-import aaf.v2_0.Nss;
-import aaf.v2_0.Nss.Ns;
-
-public class NssShow extends Page {
- public static final String HREF = "/gui/mynamespaces";
-
- public NssShow(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env, "MyNamespaces",HREF, NO_FIELDS,
- new BreadCrumbs(breadcrumbs),
- new Table<AuthGUI,AuthzTrans>("Namespaces I administer",gui.env.newTransNoAvg(),new Model("admin",gui.env),
- "class=std", "style=display: inline-block; width: 45%; margin: 10px;"),
- new Table<AuthGUI,AuthzTrans>("Namespaces I own",gui.env.newTransNoAvg(),new Model("responsible",gui.env),
- "class=std", "style=display: inline-block; width: 45%; margin: 10px;"));
- }
-
- private static class Model implements Table.Data<AuthGUI,AuthzTrans> {
- private String[] headers;
- private String privilege = null;
- public final Slot sNssByUser;
- private boolean isAdmin;
-
- public Model(String privilege,AuthzEnv env) {
- super();
- headers = new String[] {privilege};
- this.privilege = privilege;
- isAdmin = "admin".equals(privilege);
- sNssByUser = env.slot("NSS_SHOW_MODEL_DATA");
- }
-
- @Override
- public String[] headers() {
- return headers;
- }
-
- @Override
- public Cells get(final AuthGUI gui, final AuthzTrans trans) {
- ArrayList<AbsCell[]> rv = new ArrayList<AbsCell[]>();
- List<Ns> nss = trans.get(sNssByUser, null);
- if(nss==null) {
- TimeTaken tt = trans.start("AAF Nss by User for " + privilege,Env.REMOTE);
- try {
- nss = gui.clientAsUser(trans.getUserPrincipal(), new Retryable<List<Ns>>() {
- @Override
- public List<Ns> code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- List<Ns> nss = null;
- Future<Nss> fp = client.read("/authz/nss/either/" + trans.user(),gui.nssDF);
- if(fp.get(AuthGUI.TIMEOUT)) {
- TimeTaken tt = trans.start("Load Data for " + privilege, Env.SUB);
- try {
- if(fp.value!=null) {
- nss = fp.value.getNs();
- Collections.sort(nss, new Comparator<Ns>() {
- public int compare(Ns ns1, Ns ns2) {
- return ns1.getName().compareToIgnoreCase(ns2.getName());
- }
- });
- trans.put(sNssByUser,nss);
- }
- } finally {
- tt.done();
- }
- }else {
- gui.writeError(trans, fp, null);
- }
- return nss;
- }
- });
- } catch (Exception e) {
- trans.error().log(e);
- } finally {
- tt.done();
- }
- }
-
- if(nss!=null) {
- for(Ns n : nss) {
- if((isAdmin && !n.getAdmin().isEmpty())
- || (!isAdmin && !n.getResponsible().isEmpty())) {
- AbsCell[] sa = new AbsCell[] {
- new RefCell(n.getName(),NsDetail.HREF
- +"?name="+n.getName()),
- };
- rv.add(sa);
- }
- }
- }
-
- return new Cells(rv,null);
- }
- }
-
-
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.text.ParseException;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-import org.onap.aaf.inno.env.util.Chrono;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.html.HTMLGen;
-
-import aaf.v2_0.CredRequest;
-
-public class PassChangeAction extends Page {
- public PassChangeAction(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,"PassChange",PassChangeForm.HREF, PassChangeForm.fields,
- new BreadCrumbs(breadcrumbs),
- new NamedCode(true,"content") {
- final Slot sID = gui.env.slot(PassChangeForm.NAME+'.'+PassChangeForm.fields[0]);
- final Slot sCurrPass = gui.env.slot(PassChangeForm.NAME+'.'+PassChangeForm.fields[1]);
- final Slot sPassword = gui.env.slot(PassChangeForm.NAME+'.'+PassChangeForm.fields[2]);
- final Slot sPassword2 = gui.env.slot(PassChangeForm.NAME+'.'+PassChangeForm.fields[3]);
- final Slot startDate = gui.env.slot(PassChangeForm.NAME+'.'+PassChangeForm.fields[4]);
-
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- cache.dynamic(hgen, new DynamicCode<HTMLGen,AuthGUI, AuthzTrans>() {
- @Override
- public void code(final AuthGUI gui, final AuthzTrans trans,Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- String id = trans.get(sID,null);
- String currPass = trans.get(sCurrPass,null);
- String password = trans.get(sPassword,null);
- String password2 = trans.get(sPassword2,null);
-
- // Run Validations
- boolean fail = true;
-
- if (id==null || id.indexOf('@')<=0) {
- hgen.p("Data Entry Failure: Please enter a valid ID, including domain.");
- } else if(password == null || password2 == null || currPass == null) {
- hgen.p("Data Entry Failure: Both Password Fields need entries.");
- } else if(!password.equals(password2)) {
- hgen.p("Data Entry Failure: Passwords do not match.");
- } else { // everything else is checked by Server
- final CredRequest cred = new CredRequest();
- cred.setId(id);
- cred.setPassword(currPass);
- try {
- fail = gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Boolean>() {
- @Override
- public Boolean code(Rcli<?> client)throws CadiException, ConnectException, APIException {
- boolean fail = true;
- boolean go = false;
- TimeTaken tt = trans.start("Check Current Password",Env.REMOTE);
- try {
- Future<CredRequest> fcr = client.create( // Note: Need "Post", because of hiding password in SSL Data
- "/authn/validate",gui.credReqDF,cred);
-
- fcr.get(5000);
- if(fcr.code() == 200) {
- hgen.p("Current Password validated");
- go = true;
- } else {
- hgen.p(String.format("Invalid Current Password: %d %s",fcr.code(),fcr.body()));
- go = false;
- }
- } finally {
- tt.done();
- }
- if(go) {
- tt = trans.start("AAF Change Password",Env.REMOTE);
- try {
- // Change over Cred to reset mode
- cred.setPassword(password);
- String start = trans.get(startDate, null);
- if(start!=null) {
- try {
- cred.setStart(Chrono.timeStamp(Chrono.dateOnlyFmt.parse(start)));
- } catch (ParseException e) {
- throw new CadiException(e);
- }
- }
-
- Future<CredRequest> fcr = client.create(
- "/authn/cred",
- gui.credReqDF,
- cred
- );
-
- if(fcr.get(5000)) {
- // Do Remote Call
- hgen.p("New Password has been added.");
- fail = false;
- } else {
- gui.writeError(trans, fcr, hgen);
- }
- } finally {
- tt.done();
- }
- }
- return fail;
- }
-
- });
- } catch (Exception e) {
- hgen.p("Unknown Error");
- e.printStackTrace();
- }
-
- }
- hgen.br();
- if(fail) {
- hgen.incr("a",true,"href="+PassChangeForm.HREF+"?id="+id).text("Try again").end();
- } else {
- hgen.incr("a",true,"href="+Home.HREF).text("Home").end();
- }
- }
- });
- }
- });
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import static com.att.xgen.html.HTMLGen.TABLE;
-
-import java.io.IOException;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Slot;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.Mark;
-import com.att.xgen.html.HTMLGen;
-
-public class PassChangeForm extends Page {
- // Package on purpose
- static final String HREF = "/gui/passwd";
- static final String NAME = "PassChange";
- static final String fields[] = {"id","current","password","password2","startDate"};
-
- public PassChangeForm(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,NAME,HREF, fields,
- new BreadCrumbs(breadcrumbs),
- new NamedCode(true,"content") {
- private final Slot sID = gui.env.slot(PassChangeForm.NAME+'.'+PassChangeForm.fields[0]);
- @Override
- public void code(final Cache<HTMLGen> cache, final HTMLGen hgen) throws APIException, IOException {
- // p tags not closing right using .p() - causes issues in IE8 password form - so using leaf for the moment
- hgen.leaf("p").text("You are requesting a new Mechanical Password in the AAF System. " +
- "So that you can perform clean migrations, you will be able to use both this " +
- "new password and the old one until their respective expiration dates.").end()
- .leaf("p").text("Note: You must be a Namespace Admin where the MechID resides.").end()
- .incr("form","method=post");
- Mark table = new Mark(TABLE);
- hgen.incr(table);
- cache.dynamic(hgen, new DynamicCode<HTMLGen, AuthGUI, AuthzTrans>() {
- @Override
- public void code(AuthGUI gui, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
-// GregorianCalendar gc = new GregorianCalendar();
-// System.out.println(gc.toString());
- String incomingID= trans.get(sID, "");
- hgen
- .input(fields[0],"ID*",true,"value="+incomingID)
- .input(fields[1],"Current Password*",true,"type=password")
- .input(fields[2],"New Password*",true, "type=password")
- .input(fields[3], "Reenter New Password*",true, "type=password")
-// .input(fields[3],"Start Date",false,"type=date", "value="+
-// Chrono.dateOnlyFmt.format(new Date(System.currentTimeMillis()))
-// )
- .end();
- }
- });
- hgen.end();
- hgen.tagOnly("input", "type=submit", "value=Submit")
- .end();
-
- }
- });
- }
-
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-import java.util.UUID;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import com.att.authz.gui.Table;
-import com.att.authz.gui.Table.Cells;
-import com.att.authz.gui.table.AbsCell;
-import com.att.authz.gui.table.RefCell;
-import com.att.authz.gui.table.TextCell;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.TimeTaken;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.html.HTMLGen;
-
-import aaf.v2_0.Approval;
-import aaf.v2_0.Approvals;
-
-public class PendingRequestsShow extends Page {
- public static final String HREF = "/gui/myrequests";
- public static final String NAME = "MyRequests";
- static final String WEBPHONE = "http://webphone.att.com/cgi-bin/webphones.pl?id=";
- private static final String DATE_TIME_FORMAT = "yyyy-MM-dd";
-
- public PendingRequestsShow(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env, NAME,HREF, NO_FIELDS,
- new BreadCrumbs(breadcrumbs),
- new NamedCode(true,"expedite") {
- @Override
- public void code(final Cache<HTMLGen> cache, final HTMLGen hgen) throws APIException, IOException {
- cache.dynamic(hgen, new DynamicCode<HTMLGen, AuthGUI, AuthzTrans>() {
- @Override
- public void code(AuthGUI gui, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- hgen
- .leaf("p", "class=expedite_request").text("These are your submitted Requests that are awaiting Approval. ")
- .br()
- .text("To Expedite a Request: ")
- .leaf("a","href=#expedite_directions","onclick=divVisibility('expedite_directions');")
- .text("Click Here").end()
- .divID("expedite_directions", "style=display:none");
- hgen
- .incr(HTMLGen.OL)
- .incr(HTMLGen.LI)
- .leaf("a","href="+ApprovalForm.HREF+"?user="+trans.user(), "id=userApprove")
- .text("Copy This Link")
- .end()
- .end()
- .incr(HTMLGen.LI)
- .text("Send it to the Approver Listed")
- .end()
- .end()
- .text("NOTE: Using this link, the Approver will only see your requests. You only need to send this link once!")
- .end()
- .end();
- }
- });
- }
- },
- new Table<AuthGUI,AuthzTrans>("Pending Requests",gui.env.newTransNoAvg(),new Model(), "class=std")
- );
-
-
- }
-
- /**
- * Implement the Table Content for Requests by User
- *
- *
- */
- private static class Model implements Table.Data<AuthGUI,AuthzTrans> {
- private static final String CSP_ATT_COM = "@csp.att.com";
- final long NUM_100NS_INTERVALS_SINCE_UUID_EPOCH = 0x01b21dd213814000L;
- private static final String[] headers = new String[] {"Request Date","Status","Memo","Approver"};
-
- @Override
- public String[] headers() {
- return headers;
- }
-
- @Override
- public Cells get(final AuthGUI gui, final AuthzTrans trans) {
- DateFormat createdDF = new SimpleDateFormat(DATE_TIME_FORMAT);
- ArrayList<AbsCell[]> rv = new ArrayList<AbsCell[]>();
- try {
- gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {
- @Override
- public Void code(Rcli<?> client)throws CadiException, ConnectException, APIException {
- TimeTaken tt = trans.start("AAF Get Approvals by User",Env.REMOTE);
- try {
- Future<Approvals> fa = client.read("/authz/approval/user/"+trans.user(),gui.approvalsDF);
- if(fa.get(5000)) {
- tt.done();
- tt = trans.start("Load Data", Env.SUB);
- if(fa.value!=null) {
- List<Approval> approvals = fa.value.getApprovals();
- Collections.sort(approvals, new Comparator<Approval>() {
- @Override
- public int compare(Approval a1, Approval a2) {
- UUID id1 = UUID.fromString(a1.getId());
- UUID id2 = UUID.fromString(a2.getId());
- return id1.timestamp()<=id2.timestamp()?1:-1;
- }
- });
-
- String prevTicket = null;
- for(Approval a : approvals) {
- String approver = a.getApprover();
- String approverShort = approver.substring(0,approver.indexOf('@'));
-
- AbsCell tsCell = null;
- String ticket = a.getTicket();
- if (ticket.equals(prevTicket)) {
- tsCell = AbsCell.Null;
- } else {
- UUID id = UUID.fromString(a.getId());
- tsCell = new RefCell(createdDF.format((id.timestamp() - NUM_100NS_INTERVALS_SINCE_UUID_EPOCH)/10000),
- RequestDetail.HREF + "?ticket=" + a.getTicket());
- prevTicket = ticket;
- }
-
- AbsCell approverCell = null;
- if (approver.endsWith(CSP_ATT_COM)) {
- approverCell = new RefCell(approver, WEBPHONE + approverShort);
- } else {
- approverCell = new TextCell(approver);
- }
- AbsCell[] sa = new AbsCell[] {
- tsCell,
- new TextCell(a.getStatus()),
- new TextCell(a.getMemo()),
- approverCell
- };
- rv.add(sa);
- }
- }
- } else {
- gui.writeError(trans, fa, null);
- }
- } finally {
- tt.done();
- }
-
-
- return null;
- }
- });
- } catch (Exception e) {
- trans.error().log(e);
- }
- return new Cells(rv,null);
- }
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.util.ArrayList;
-import java.util.List;
-
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;
-import com.att.authz.env.AuthzEnv;
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.Page;
-import com.att.authz.gui.Table;
-import com.att.authz.gui.Table.Cells;
-import com.att.authz.gui.table.AbsCell;
-import com.att.authz.gui.table.RefCell;
-import com.att.authz.gui.table.TextCell;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-
-import aaf.v2_0.Perm;
-import aaf.v2_0.Perms;
-
-/**
- * Detail Page for Permissions
- *
- */
-public class PermDetail extends Page {
- public static final String HREF = "/gui/permdetail";
- public static final String NAME = "PermDetail";
- private static final String BLANK = "";
-
- public PermDetail(final AuthGUI gui, Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env, NAME, HREF, new String[] {"type","instance","action"},
- new BreadCrumbs(breadcrumbs),
- new Table<AuthGUI,AuthzTrans>("Permission Details",gui.env.newTransNoAvg(),new Model(gui.env()),"class=detail")
- );
- }
-
- /**
- * Implement the table content for Permissions Detail
- *
- *
- */
- private static class Model implements Table.Data<AuthGUI,AuthzTrans> {
- private static final String[] headers = new String[0];
- private Slot type, instance, action;
- public Model(AuthzEnv env) {
- type = env.slot(NAME+".type");
- instance = env.slot(NAME+".instance");
- action = env.slot(NAME+".action");
- }
-
- @Override
- public String[] headers() {
- return headers;
- }
-
- @Override
- public Cells get(final AuthGUI gui, final AuthzTrans trans) {
- final String pType = trans.get(type, null);
- final String pInstance = trans.get(instance, null);
- final String pAction = trans.get(action, null);
- if(pType==null || pInstance==null || pAction==null) {
- return Cells.EMPTY;
- }
- ArrayList<AbsCell[]> rv = new ArrayList<AbsCell[]>();
- rv.add(new AbsCell[]{new TextCell("Type:"),new TextCell(pType)});
- rv.add(new AbsCell[]{new TextCell("Instance:"),new TextCell(pInstance)});
- rv.add(new AbsCell[]{new TextCell("Action:"),new TextCell(pAction)});
- try {
- gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {
- @Override
- public Void code(Rcli<?> client)throws CadiException, ConnectException, APIException {
- TimeTaken tt = trans.start("AAF Perm Details",Env.REMOTE);
- try {
- Future<Perms> fp= client.read("/authz/perms/"+pType + '/' + pInstance + '/' + pAction,gui.permsDF);
-
- if(fp.get(AuthGUI.TIMEOUT)) {
- tt.done();
- tt = trans.start("Load Data", Env.SUB);
- List<Perm> ps = fp.value.getPerm();
- if(!ps.isEmpty()) {
- Perm perm = fp.value.getPerm().get(0);
- String desc = (perm.getDescription()!=null?perm.getDescription():BLANK);
- rv.add(new AbsCell[]{new TextCell("Description:"),new TextCell(desc)});
- boolean first=true;
- for(String r : perm.getRoles()) {
- if(first){
- first=false;
- rv.add(new AbsCell[] {
- new TextCell("Associated Roles:"),
- new TextCell(r)
- });
- } else {
- rv.add(new AbsCell[] {
- AbsCell.Null,
- new TextCell(r)
- });
- }
- }
- }
- String historyLink = PermHistory.HREF
- + "?type=" + pType + "&instance=" + pInstance + "&action=" + pAction;
-
- rv.add(new AbsCell[] {new RefCell("See History",historyLink)});
- } else {
- rv.add(new AbsCell[] {new TextCell(
- fp.code()==HttpStatus.NOT_FOUND_404?
- "*** Implicit Permission ***":
- "*** Data Unavailable ***"
- )});
- }
- } finally {
- tt.done();
- }
-
- return null;
- }
- });
- } catch (Exception e) {
- e.printStackTrace();
- }
- return new Cells(rv,null);
- }
- }
-}
-
\ No newline at end of file
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.html.HTMLGen;
-
-import aaf.v2_0.Pkey;
-import aaf.v2_0.RolePermRequest;
-
-public class PermGrantAction extends Page {
-
-
- public PermGrantAction(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,PermGrantForm.NAME, PermGrantForm.HREF, PermGrantForm.fields,
- new BreadCrumbs(breadcrumbs),
- new NamedCode(true,"content") {
- final Slot sType = gui.env.slot(PermGrantForm.NAME+'.'+PermGrantForm.fields[0]);
- final Slot sInstance = gui.env.slot(PermGrantForm.NAME+'.'+PermGrantForm.fields[1]);
- final Slot sAction = gui.env.slot(PermGrantForm.NAME+'.'+PermGrantForm.fields[2]);
- final Slot sRole = gui.env.slot(PermGrantForm.NAME+'.'+PermGrantForm.fields[3]);
-
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- cache.dynamic(hgen, new DynamicCode<HTMLGen,AuthGUI, AuthzTrans>() {
- @Override
- public void code(final AuthGUI gui, final AuthzTrans trans,Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
-
- String type = trans.get(sType,null);
- String instance = trans.get(sInstance,null);
- String action = trans.get(sAction,null);
- String role = trans.get(sRole,null);
-
- String lastPage = PermGrantForm.HREF
- + "?type=" + type + "&instance=" + instance + "&action=" + action;
-
- // Run Validations
- boolean fail = true;
-
- TimeTaken tt = trans.start("AAF Grant Permission to Role",Env.REMOTE);
- try {
-
- final RolePermRequest grantReq = new RolePermRequest();
- Pkey pkey = new Pkey();
- pkey.setType(type);
- pkey.setInstance(instance);
- pkey.setAction(action);
- grantReq.setPerm(pkey);
- grantReq.setRole(role);
-
- fail = gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Boolean>() {
- @Override
- public Boolean code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- boolean fail = true;
- Future<RolePermRequest> fgrant = client.create(
- "/authz/role/perm",
- gui.rolePermReqDF,
- grantReq
- );
-
- if(fgrant.get(5000)) {
- hgen.p("Permission has been granted to role.");
- fail = false;
- } else {
- if (202==fgrant.code()) {
- hgen.p("Permission Grant Request sent, but must be Approved before actualizing");
- fail = false;
- } else {
- gui.writeError(trans, fgrant, hgen);
- }
- }
- return fail;
- }
- });
- } catch (Exception e) {
- hgen.p("Unknown Error");
- e.printStackTrace();
- } finally {
- tt.done();
- }
-
- hgen.br();
- hgen.incr("a",true,"href="+lastPage);
- if (fail) {
- hgen.text("Try again");
- } else {
- hgen.text("Grant this Permission to Another Role");
- }
- hgen.end();
- hgen.js()
- .text("alterLink('permgrant', '"+lastPage + "');")
- .done();
-
- }
- });
- }
- });
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import static com.att.xgen.html.HTMLGen.TABLE;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.util.ArrayList;
-import java.util.List;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.Mark;
-import com.att.xgen.html.HTMLGen;
-
-import aaf.v2_0.Role;
-import aaf.v2_0.Roles;
-
-public class PermGrantForm extends Page {
- static final String HREF = "/gui/permgrant";
- static final String NAME = "Permission Grant";
- static final String fields[] = {"type","instance","action","role"};
-
- public PermGrantForm(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,NAME,HREF, fields,
- new BreadCrumbs(breadcrumbs),
- new NamedCode(true,"content") {
- @Override
- public void code(final Cache<HTMLGen> cache, final HTMLGen hgen) throws APIException, IOException {
- final Slot type = gui.env.slot(NAME+".type");
- final Slot instance = gui.env.slot(NAME+".instance");
- final Slot action = gui.env.slot(NAME+".action");
- final Slot role = gui.env.slot(NAME+".role");
- // p tags not closing right using .p() - causes issues in IE8 password form - so using leaf for the moment
- hgen.leaf("p").text("Choose a role to grant to this permission").end()
- .incr("form","method=post");
- Mark table = new Mark(TABLE);
- hgen.incr(table);
- cache.dynamic(hgen, new DynamicCode<HTMLGen, AuthGUI, AuthzTrans>() {
- @Override
- public void code(AuthGUI gui, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
-
- Mark copyRoleJS = new Mark();
- hgen.js(copyRoleJS);
- hgen.text("function copyRole(role) {");
- hgen.text("var txtRole = document.querySelector(\"#role\");");
-// hgen.text("if (role==;");
- hgen.text("txtRole.value=role;");
- hgen.text("}");
- hgen.end(copyRoleJS);
-
- String typeValue = trans.get(type, "");
- String instanceValue = trans.get(instance, "");
- String actionValue = trans.get(action, "");
- String roleValue = trans.get(role,null);
- List<String> myRoles = getMyRoles(gui, trans);
- hgen
- .input(fields[0],"Perm Type",true,"value="+typeValue,"disabled")
- .input(fields[1],"Perm Instance",true,"value="+instanceValue,"disabled")
- .input(fields[2],"Perm Action",true,"value="+actionValue,"disabled");
-
- // select & options are not an input type, so we must create table row & cell tags
- Mark selectRow = new Mark();
- hgen
- .incr(selectRow, "tr")
- .incr("td")
- .incr("label", "for=myroles", "required").text("My Roles").end()
- .end()
- .incr("td")
- .incr("select", "name=myroles", "id=myroles", "onchange=copyRole(this.value)")
- .incr("option", "value=").text("Select one of my roles").end();
- for (String role : myRoles) {
- hgen.incr("option", "value="+role).text(role).end();
- }
- hgen
- .incr("option", "value=").text("Other").end()
- .end(selectRow);
- if(roleValue==null) {
- hgen.input(fields[3],"Role", true, "placeholder=or type a role here");
- } else {
- hgen.input(fields[3],"Role",true, "value="+roleValue);
- }
- hgen.end();
- }
- });
- hgen.end();
- hgen.tagOnly("input", "type=submit", "value=Submit")
- .end();
-
- }
- });
- }
-
- private static List<String> getMyRoles(final AuthGUI gui, final AuthzTrans trans) {
- List<String> myRoles = new ArrayList<String>();
- try {
- gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {
- @Override
- public Void code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- TimeTaken tt = trans.start("AAF get my roles",Env.REMOTE);
- try {
- Future<Roles> fr = client.read("/authz/roles/user/"+trans.user(),gui.rolesDF);
- if(fr.get(5000)) {
- tt.done();
- tt = trans.start("Load Data", Env.SUB);
- if (fr.value != null) for (Role r : fr.value.getRole()) {
- myRoles.add(r.getName());
- }
- } else {
- gui.writeError(trans, fr, null);
- }
- } finally {
- tt.done();
- }
- return null;
- }
- });
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- return myRoles;
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Comparator;
-import java.util.List;
-
-import com.att.authz.env.AuthzEnv;
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import com.att.authz.gui.Table;
-import com.att.authz.gui.Table.Cells;
-import com.att.authz.gui.table.AbsCell;
-import com.att.authz.gui.table.RefCell;
-import com.att.authz.gui.table.TextCell;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.html.HTMLGen;
-
-import aaf.v2_0.History;
-import aaf.v2_0.History.Item;
-
-
-public class PermHistory extends Page {
- static final String NAME="PermHistory";
- static final String HREF = "/gui/permHistory";
- static final String FIELDS[] = {"type","instance","action","dates"};
- static final String WEBPHONE = "http://webphone.att.com/cgi-bin/webphones.pl?id=";
- static enum Month { JANUARY, FEBRUARY, MARCH, APRIL, MAY, JUNE, JULY,
- AUGUST, SEPTEMBER, OCTOBER, NOVEMBER, DECEMBER };
-
- public PermHistory(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,NAME,HREF, FIELDS,
- new BreadCrumbs(breadcrumbs),
- new Table<AuthGUI,AuthzTrans>("History", gui.env.newTransNoAvg(),new Model(gui.env()),"class=std"),
- new NamedCode(true, "content") {
- @Override
- public void code(final Cache<HTMLGen> cache, final HTMLGen hgen) throws APIException, IOException {
- final Slot sType = gui.env.slot(NAME+".type");
- final Slot sInstance = gui.env.slot(NAME+".instance");
- final Slot sAction = gui.env.slot(NAME+".action");
- cache.dynamic(hgen, new DynamicCode<HTMLGen, AuthGUI, AuthzTrans>() {
- @Override
- public void code(AuthGUI gui, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- String type = trans.get(sType, null);
- String instance = trans.get(sInstance,null);
- String action = trans.get(sAction,null);
-
- // Use Javascript to make the table title more descriptive
- hgen.js()
- .text("var caption = document.querySelector(\".title\");")
- .text("caption.innerHTML='History for Permission [ " + type + " ]';")
- .done();
-
- // Use Javascript to change Link Target to our last visited Detail page
- String lastPage = PermDetail.HREF + "?type=" + type
- + "&instance=" + instance
- + "&action=" + action;
- hgen.js()
- .text("alterLink('permdetail', '"+lastPage + "');")
- .done();
-
- hgen.br();
- hgen.leaf("a", "href=#advanced_search", "onclick=divVisibility('advanced_search');").text("Advanced Search").end()
- .divID("advanced_search", "style=display:none");
- hgen.incr("table");
-
- addDateRow(hgen,"Start Date");
- addDateRow(hgen,"End Date");
- hgen.incr("tr").incr("td");
- hgen.tagOnly("input", "type=button","value=Get History",
- "onclick=datesURL('"+HREF+"?type=" + type
- + "&instance=" + instance
- + "&action=" + action+"');");
- hgen.end().end();
- hgen.end();
- hgen.end();
- }
- });
- }
- }
-
- );
-
- }
-
- private static void addDateRow(HTMLGen hgen, String s) {
- hgen
- .incr("tr")
- .incr("td")
- .incr("label", "for=month", "required").text(s+"*").end()
- .end()
- .incr("td")
- .incr("select", "name=month"+s.substring(0, s.indexOf(' ')), "id=month"+s.substring(0, s.indexOf(' ')), "required")
- .incr("option", "value=").text("Month").end();
- for (Month m : Month.values()) {
- if (Calendar.getInstance().get(Calendar.MONTH) == m.ordinal()) {
- hgen.incr("option", "selected", "value="+(m.ordinal()+1)).text(m.name()).end();
- } else {
- hgen.incr("option", "value="+(m.ordinal()+1)).text(m.name()).end();
- }
- }
- hgen.end()
- .end()
- .incr("td")
- .tagOnly("input","type=number","id=year"+s.substring(0, s.indexOf(' ')),"required",
- "value="+Calendar.getInstance().get(Calendar.YEAR), "min=1900",
- "max="+Calendar.getInstance().get(Calendar.YEAR),
- "placeholder=Year").end()
- .end();
- }
-
- /**
- * Implement the Table Content for History
- *
- *
- */
- private static class Model implements Table.Data<AuthGUI,AuthzTrans> {
- private static final String CSP_ATT_COM = "@csp.att.com";
- private static final String[] headers = new String[] {"Date","User","Memo"};
- private Slot sType;
- private Slot sDates;
-
- public Model(AuthzEnv env) {
- sType = env.slot(NAME+".type");
- sDates = env.slot(NAME+".dates");
- }
-
- @Override
- public String[] headers() {
- return headers;
- }
-
- @Override
- public Cells get(final AuthGUI gui, final AuthzTrans trans) {
- final String oName = trans.get(sType,null);
- final String oDates = trans.get(sDates,null);
-
- if(oName==null) {
- return Cells.EMPTY;
- }
-
- ArrayList<AbsCell[]> rv = new ArrayList<AbsCell[]>();
- String msg = null;
- try {
- gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {
- @Override
- public Void code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- TimeTaken tt = trans.start("AAF Get History for Permission ["+oName+"]",Env.REMOTE);
- try {
- if (oDates != null) {
- client.setQueryParams("yyyymm="+oDates);
- }
- Future<History> fh = client.read(
- "/authz/hist/perm/"+oName,
- gui.historyDF
- );
-
-
- if (fh.get(AuthGUI.TIMEOUT)) {
- tt.done();
- tt = trans.start("Load History Data", Env.SUB);
- List<Item> histItems = fh.value.getItem();
-
- java.util.Collections.sort(histItems, new Comparator<Item>() {
- @Override
- public int compare(Item o1, Item o2) {
- return o2.getTimestamp().compare(o1.getTimestamp());
- }
- });
-
- for (Item i : histItems) {
- String user = i.getUser();
- AbsCell userCell = (user.endsWith(CSP_ATT_COM)?
- new RefCell(user,WEBPHONE + user.substring(0,user.indexOf('@'))):new TextCell(user));
-
- rv.add(new AbsCell[] {
- new TextCell(i.getTimestamp().toGregorianCalendar().getTime().toString()),
- userCell,
- new TextCell(i.getMemo())
- });
- }
-
- } else {
- if (fh.code()==403) {
- rv.add(new AbsCell[] {new TextCell("You may not view History of Permission [" + oName + "]", "colspan = 3", "class=center")});
- } else {
- rv.add(new AbsCell[] {new TextCell("*** Data Unavailable ***", "colspan = 3", "class=center")});
- }
- }
- } finally {
- tt.done();
- }
-
- return null;
- }
- });
-
- } catch (Exception e) {
- trans.error().log(e);
- }
- return new Cells(rv,msg);
- }
- }
-
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.util.ArrayList;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.Page;
-import com.att.authz.gui.Table;
-import com.att.authz.gui.Table.Cells;
-import com.att.authz.gui.table.AbsCell;
-import com.att.authz.gui.table.RefCell;
-import com.att.authz.gui.table.TextCell;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.TimeTaken;
-
-import aaf.v2_0.Perm;
-import aaf.v2_0.Perms;
-
-/**
- * Page content for My Permissions
- *
- *
- */
-public class PermsShow extends Page {
- public static final String HREF = "/gui/myperms";
-
- public PermsShow(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env, "MyPerms",HREF, NO_FIELDS,
- new BreadCrumbs(breadcrumbs),
- new Table<AuthGUI,AuthzTrans>("Permissions",gui.env.newTransNoAvg(),new Model(), "class=std"));
- }
-
- /**
- * Implement the Table Content for Permissions by User
- *
- *
- */
- private static class Model implements Table.Data<AuthGUI,AuthzTrans> {
- private static final String[] headers = new String[] {"Type","Instance","Action"};
-
- @Override
- public String[] headers() {
- return headers;
- }
-
- @Override
- public Cells get(final AuthGUI gui, final AuthzTrans trans) {
- ArrayList<AbsCell[]> rv = new ArrayList<AbsCell[]>();
- TimeTaken tt = trans.start("AAF Perms by User",Env.REMOTE);
- try {
- gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {
- @Override
- public Void code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- Future<Perms> fp = client.read("/authz/perms/user/"+trans.user(), gui.permsDF);
- if(fp.get(5000)) {
- TimeTaken ttld = trans.start("Load Data", Env.SUB);
- try {
- if(fp.value!=null) {
- for(Perm p : fp.value.getPerm()) {
- AbsCell[] sa = new AbsCell[] {
- new RefCell(p.getType(),PermDetail.HREF
- +"?type="+p.getType()
- +"&instance="+p.getInstance()
- +"&action="+p.getAction()),
- new TextCell(p.getInstance()),
- new TextCell(p.getAction())
- };
- rv.add(sa);
- }
- } else {
- gui.writeError(trans, fp, null);
- }
- } finally {
- ttld.done();
- }
- }
- return null;
- }
- });
- } catch (Exception e) {
- trans.error().log(e);
- } finally {
- tt.done();
- }
- return new Cells(rv,null);
- }
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.UUID;
-
-import com.att.authz.env.AuthzEnv;
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.Page;
-import com.att.authz.gui.Table;
-import com.att.authz.gui.Table.Cells;
-import com.att.authz.gui.table.AbsCell;
-import com.att.authz.gui.table.RefCell;
-import com.att.authz.gui.table.TextCell;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-
-import aaf.v2_0.Approval;
-import aaf.v2_0.Approvals;
-
-public class RequestDetail extends Page {
- public static final String HREF = "/gui/requestdetail";
- public static final String NAME = "RequestDetail";
- private static final String DATE_TIME_FORMAT = "yyyy-MM-dd HH:mm:ss";
- public static final String[] FIELDS = {"ticket"};
-
- public RequestDetail(final AuthGUI gui, Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env, NAME, HREF, FIELDS,
- new BreadCrumbs(breadcrumbs),
- new Table<AuthGUI,AuthzTrans>("Request Details",gui.env.newTransNoAvg(),new Model(gui.env()),"class=detail")
- );
- }
-
- /**
- * Implement the table content for Request Detail
- *
- *
- */
- private static class Model implements Table.Data<AuthGUI,AuthzTrans> {
- static final String WEBPHONE = "http://webphone.att.com/cgi-bin/webphones.pl?id=";
- private static final String CSP_ATT_COM = "@csp.att.com";
- final long NUM_100NS_INTERVALS_SINCE_UUID_EPOCH = 0x01b21dd213814000L;
- private static final String[] headers = new String[0];
- private Slot sTicket;
- public Model(AuthzEnv env) {
- sTicket = env.slot(NAME+".ticket");
- }
-
- @Override
- public String[] headers() {
- return headers;
- }
-
- @Override
- public Cells get(final AuthGUI gui, final AuthzTrans trans) {
- Cells rv=Cells.EMPTY;
- final String ticket = trans.get(sTicket, null);
- if(ticket!=null) {
- try {
- rv = gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Cells>() {
- @Override
- public Cells code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- TimeTaken tt = trans.start("AAF Approval Details",Env.REMOTE);
- ArrayList<AbsCell[]> rv = new ArrayList<AbsCell[]>();
- try {
- Future<Approvals> fa = client.read(
- "/authz/approval/ticket/"+ticket,
- gui.approvalsDF
- );
-
- if(fa.get(AuthGUI.TIMEOUT)) {
- if (!trans.user().equals(fa.value.getApprovals().get(0).getUser())) {
- return Cells.EMPTY;
- }
- tt.done();
- tt = trans.start("Load Data", Env.SUB);
- boolean first = true;
- for ( Approval approval : fa.value.getApprovals()) {
- AbsCell[] approverLine = new AbsCell[4];
- // only print common elements once
- if (first) {
- DateFormat createdDF = new SimpleDateFormat(DATE_TIME_FORMAT);
- UUID id = UUID.fromString(approval.getId());
-
- rv.add(new AbsCell[]{new TextCell("Ticket ID:"),new TextCell(approval.getTicket(),"colspan=3")});
- rv.add(new AbsCell[]{new TextCell("Memo:"),new TextCell(approval.getMemo(),"colspan=3")});
- rv.add(new AbsCell[]{new TextCell("Requested On:"),
- new TextCell(createdDF.format((id.timestamp() - NUM_100NS_INTERVALS_SINCE_UUID_EPOCH)/10000),"colspan=3")
- });
- rv.add(new AbsCell[]{new TextCell("Operation:"),new TextCell(decodeOp(approval.getOperation()),"colspan=3")});
- String user = approval.getUser();
- if (user.endsWith(CSP_ATT_COM)) {
- rv.add(new AbsCell[]{new TextCell("User:"),
- new RefCell(user,WEBPHONE + user.substring(0, user.indexOf("@")),"colspan=3")});
- } else {
- rv.add(new AbsCell[]{new TextCell("User:"),new TextCell(user,"colspan=3")});
- }
-
- // headers for listing each approver
- rv.add(new AbsCell[]{new TextCell(" ","colspan=4","class=blank_line")});
- rv.add(new AbsCell[]{AbsCell.Null,
- new TextCell("Approver","class=bold"),
- new TextCell("Type","class=bold"),
- new TextCell("Status","class=bold")});
- approverLine[0] = new TextCell("Approvals:");
-
- first = false;
- } else {
- approverLine[0] = AbsCell.Null;
- }
-
- String approver = approval.getApprover();
- String approverShort = approver.substring(0,approver.indexOf('@'));
-
- if (approver.endsWith(CSP_ATT_COM)) {
- approverLine[1] = new RefCell(approver, WEBPHONE + approverShort);
- } else {
- approverLine[1] = new TextCell(approval.getApprover());
- }
-
- String type = approval.getType();
- if ("owner".equalsIgnoreCase(type)) {
- type = "resource owner";
- }
-
- approverLine[2] = new TextCell(type);
- approverLine[3] = new TextCell(approval.getStatus());
- rv.add(approverLine);
-
- }
- } else {
- rv.add(new AbsCell[] {new TextCell("*** Data Unavailable ***")});
- }
- } finally {
- tt.done();
- }
- return new Cells(rv,null);
- }
- });
- } catch (Exception e) {
- trans.error().log(e);
- }
- }
- return rv;
- }
-
- private String decodeOp(String operation) {
- if ("C".equalsIgnoreCase(operation)) {
- return "Create";
- } else if ("D".equalsIgnoreCase(operation)) {
- return "Delete";
- } else if ("U".equalsIgnoreCase(operation)) {
- return "Update";
- } else if ("G".equalsIgnoreCase(operation)) {
- return "Grant";
- } else if ("UG".equalsIgnoreCase(operation)) {
- return "Un-Grant";
- }
- return operation;
- }
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.util.ArrayList;
-
-import com.att.authz.env.AuthzEnv;
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.Page;
-import com.att.authz.gui.Table;
-import com.att.authz.gui.Table.Cells;
-import com.att.authz.gui.table.AbsCell;
-import com.att.authz.gui.table.RefCell;
-import com.att.authz.gui.table.TextCell;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-
-import aaf.v2_0.Pkey;
-import aaf.v2_0.Role;
-import aaf.v2_0.Roles;
-
-/**
- * Detail Page for Permissions
- *
- *
- */
-public class RoleDetail extends Page {
- public static final String HREF = "/gui/roledetail";
- public static final String NAME = "RoleDetail";
- private static final String BLANK = "";
-
- public RoleDetail(final AuthGUI gui, Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env, NAME, HREF, new String[] {"role"},
- new BreadCrumbs(breadcrumbs),
- new Table<AuthGUI,AuthzTrans>("Role Details",gui.env.newTransNoAvg(),new Model(gui.env()),"class=detail")
- );
- }
-
- /**
- * Implement the table content for Permissions Detail
- *
- *
- */
- private static class Model implements Table.Data<AuthGUI,AuthzTrans> {
- private static final String[] headers = new String[0];
- private Slot role;
- public Model(AuthzEnv env) {
- role = env.slot(NAME+".role");
- }
-
- @Override
- public String[] headers() {
- return headers;
- }
-
- @Override
- public Cells get(final AuthGUI gui, final AuthzTrans trans) {
- final String pRole = trans.get(role, null);
- Cells rv = Cells.EMPTY;
- if(pRole!=null) {
- try {
- rv = gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Cells>() {
- @Override
- public Cells code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- ArrayList<AbsCell[]> rv = new ArrayList<AbsCell[]>();
- rv.add(new AbsCell[]{new TextCell("Role:"),new TextCell(pRole)});
-
- TimeTaken tt = trans.start("AAF Role Details",Env.REMOTE);
- try {
-
- Future<Roles> fr = client.read("/authz/roles/"+pRole,gui.rolesDF);
- if(fr.get(AuthGUI.TIMEOUT)) {
- tt.done();
- tt = trans.start("Load Data", Env.SUB);
- Role role = fr.value.getRole().get(0);
- String desc = (role.getDescription()!=null?role.getDescription():BLANK);
- rv.add(new AbsCell[]{new TextCell("Description:"),new TextCell(desc)});
- boolean first=true;
- for(Pkey r : role.getPerms()) {
- if(first){
- first=false;
- rv.add(new AbsCell[] {
- new TextCell("Associated Permissions:"),
- new TextCell(r.getType() +
- " | " + r.getInstance() +
- " | " + r.getAction()
- )
- });
- } else {
- rv.add(new AbsCell[] {
- AbsCell.Null,
- new TextCell(r.getType() +
- " | " + r.getInstance() +
- " | " + r.getAction()
- )
- });
- }
- }
- String historyLink = RoleHistory.HREF
- + "?role=" + pRole;
- rv.add(new AbsCell[] {new RefCell("See History",historyLink)});
- } else {
- rv.add(new AbsCell[] {new TextCell("*** Data Unavailable ***")});
- }
- } finally {
- tt.done();
- }
- return new Cells(rv,null);
- }
- });
- } catch (Exception e) {
- trans.error().log(e);
- }
- }
- return rv;
- }
- }
-}
-
\ No newline at end of file
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Comparator;
-import java.util.List;
-
-import com.att.authz.env.AuthzEnv;
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import com.att.authz.gui.Table;
-import com.att.authz.gui.Table.Cells;
-import com.att.authz.gui.table.AbsCell;
-import com.att.authz.gui.table.RefCell;
-import com.att.authz.gui.table.TextCell;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.html.HTMLGen;
-
-import aaf.v2_0.History;
-import aaf.v2_0.History.Item;
-
-
-public class RoleHistory extends Page {
- static final String NAME="RoleHistory";
- static final String HREF = "/gui/roleHistory";
- static final String FIELDS[] = {"role","dates"};
- static final String WEBPHONE = "http://webphone.att.com/cgi-bin/webphones.pl?id=";
- static enum Month { JANUARY, FEBRUARY, MARCH, APRIL, MAY, JUNE, JULY,
- AUGUST, SEPTEMBER, OCTOBER, NOVEMBER, DECEMBER };
-
- public RoleHistory(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,NAME,HREF, FIELDS,
- new BreadCrumbs(breadcrumbs),
- new Table<AuthGUI,AuthzTrans>("History", gui.env.newTransNoAvg(),new Model(gui.env()),"class=std"),
- new NamedCode(true, "content") {
- @Override
- public void code(final Cache<HTMLGen> cache, final HTMLGen hgen) throws APIException, IOException {
- final Slot role = gui.env.slot(NAME+".role");
- cache.dynamic(hgen, new DynamicCode<HTMLGen, AuthGUI, AuthzTrans>() {
- @Override
- public void code(AuthGUI gui, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- String obRole = trans.get(role, null);
-
- // Use Javascript to make the table title more descriptive
- hgen.js()
- .text("var caption = document.querySelector(\".title\");")
- .text("caption.innerHTML='History for Role [ " + obRole + " ]';")
- .done();
-
- // Use Javascript to change Link Target to our last visited Detail page
- String lastPage = RoleDetail.HREF + "?role=" + obRole;
- hgen.js()
- .text("alterLink('roledetail', '"+lastPage + "');")
- .done();
-
- hgen.br();
- hgen.leaf("a", "href=#advanced_search","onclick=divVisibility('advanced_search');").text("Advanced Search").end()
- .divID("advanced_search", "style=display:none");
- hgen.incr("table");
-
- addDateRow(hgen,"Start Date");
- addDateRow(hgen,"End Date");
- hgen.incr("tr").incr("td");
- hgen.tagOnly("input", "type=button","value=Get History",
- "onclick=datesURL('"+HREF+"?role=" + obRole+"');");
- hgen.end().end();
- hgen.end();
- hgen.end();
- }
- });
- }
- }
-
- );
-
- }
-
- private static void addDateRow(HTMLGen hgen, String s) {
- hgen
- .incr("tr")
- .incr("td")
- .incr("label", "for=month", "required").text(s+"*").end()
- .end()
- .incr("td")
- .incr("select", "name=month"+s.substring(0, s.indexOf(' ')), "id=month"+s.substring(0, s.indexOf(' ')), "required")
- .incr("option", "value=").text("Month").end();
- for (Month m : Month.values()) {
- if (Calendar.getInstance().get(Calendar.MONTH) == m.ordinal()) {
- hgen.incr("option", "selected", "value="+(m.ordinal()+1)).text(m.name()).end();
- } else {
- hgen.incr("option", "value="+(m.ordinal()+1)).text(m.name()).end();
- }
- }
- hgen.end()
- .end()
- .incr("td")
- .tagOnly("input","type=number","id=year"+s.substring(0, s.indexOf(' ')),"required",
- "value="+Calendar.getInstance().get(Calendar.YEAR), "min=1900",
- "max="+Calendar.getInstance().get(Calendar.YEAR),
- "placeholder=Year").end()
- .end();
- }
-
-
- /**
- * Implement the Table Content for History
- *
- *
- */
- private static class Model implements Table.Data<AuthGUI,AuthzTrans> {
- private static final String CSP_ATT_COM = "@csp.att.com";
- private static final String[] headers = new String[] {"Date","User","Memo"};
- private Slot role;
- private Slot dates;
-
- public Model(AuthzEnv env) {
- role = env.slot(NAME+".role");
- dates = env.slot(NAME+".dates");
- }
-
- @Override
- public String[] headers() {
- return headers;
- }
-
- @Override
- public Cells get(final AuthGUI gui, final AuthzTrans trans) {
- final String oName = trans.get(role,null);
- final String oDates = trans.get(dates,null);
-
- Cells rv = Cells.EMPTY;
- if(oName!=null) {
-
- try {
- rv = gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Cells>() {
- @Override
- public Cells code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- ArrayList<AbsCell[]> rv = new ArrayList<AbsCell[]>();
- TimeTaken tt = trans.start("AAF Get History for Namespace ["+oName+"]",Env.REMOTE);
- String msg = null;
- try {
- if (oDates != null) {
- client.setQueryParams("yyyymm="+oDates);
- }
- Future<History> fh = client.read("/authz/hist/role/"+oName,gui.historyDF);
- if (fh.get(AuthGUI.TIMEOUT)) {
- tt.done();
- tt = trans.start("Load History Data", Env.SUB);
- List<Item> histItems = fh.value.getItem();
-
- java.util.Collections.sort(histItems, new Comparator<Item>() {
- @Override
- public int compare(Item o1, Item o2) {
- return o2.getTimestamp().compare(o1.getTimestamp());
- }
- });
-
- for (Item i : histItems) {
- String user = i.getUser();
- AbsCell userCell = (user.endsWith(CSP_ATT_COM)?
- new RefCell(user,WEBPHONE + user.substring(0,user.indexOf('@'))):new TextCell(user));
-
- rv.add(new AbsCell[] {
- new TextCell(i.getTimestamp().toGregorianCalendar().getTime().toString()),
- userCell,
- new TextCell(i.getMemo())
- });
- }
- } else {
- if (fh.code()==403) {
- rv.add(new AbsCell[] {new TextCell("You may not view History of Permission [" + oName + "]", "colspan = 3", "class=center")});
- } else {
- rv.add(new AbsCell[] {new TextCell("*** Data Unavailable ***", "colspan = 3", "class=center")});
- }
- }
- } finally {
- tt.done();
- }
- return new Cells(rv,msg);
- }
- });
- } catch (Exception e) {
- trans.error().log(e);
- }
- }
- return rv;
- }
- }
-
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.Page;
-import com.att.authz.gui.Table;
-import com.att.authz.gui.Table.Cells;
-import com.att.authz.gui.table.AbsCell;
-import com.att.authz.gui.table.RefCell;
-import com.att.authz.gui.table.TextCell;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.TimeTaken;
-import org.onap.aaf.inno.env.util.Chrono;
-
-import aaf.v2_0.UserRole;
-import aaf.v2_0.UserRoles;
-
-
-/**
- * Page content for My Roles
- *
- *
- */
-public class RolesShow extends Page {
- public static final String HREF = "/gui/myroles";
- private static final String DATE_TIME_FORMAT = "yyyy-MM-dd";
- private static SimpleDateFormat expiresDF;
-
- static {
- expiresDF = new SimpleDateFormat(DATE_TIME_FORMAT);
- }
-
- public RolesShow(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env, "MyRoles",HREF, NO_FIELDS,
- new BreadCrumbs(breadcrumbs),
- new Table<AuthGUI,AuthzTrans>("Roles",gui.env.newTransNoAvg(),new Model(), "class=std"));
- }
-
- /**
- * Implement the Table Content for Permissions by User
- *
- *
- */
- private static class Model implements Table.Data<AuthGUI,AuthzTrans> {
- private static final String[] headers = new String[] {"Role","Expires","Remediation","Actions"};
-
- @Override
- public String[] headers() {
- return headers;
- }
-
- @Override
- public Cells get(final AuthGUI gui, final AuthzTrans trans) {
- Cells rv = Cells.EMPTY;
-
- try {
- rv = gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Cells>() {
- @Override
- public Cells code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- ArrayList<AbsCell[]> rv = new ArrayList<AbsCell[]>();
- TimeTaken tt = trans.start("AAF Roles by User",Env.REMOTE);
- try {
- Future<UserRoles> fur = client.read("/authz/userRoles/user/"+trans.user(),gui.userrolesDF);
- if (fur.get(5000)) {
- if(fur.value != null) for (UserRole u : fur.value.getUserRole()) {
- if(u.getExpires().compare(Chrono.timeStamp()) < 0) {
- AbsCell[] sa = new AbsCell[] {
- new TextCell(u.getRole() + "*", "class=expired"),
- new TextCell(expiresDF.format(u.getExpires().toGregorianCalendar().getTime()),"class=expired"),
- new RefCell("Extend",
- UserRoleExtend.HREF + "?user="+trans.user()+"&role="+u.getRole(),
- new String[]{"class=expired"}),
- new RefCell("Remove",
- UserRoleRemove.HREF + "?user="+trans.user()+"&role="+u.getRole(),
- new String[]{"class=expired"})
-
- };
- rv.add(sa);
- } else {
- AbsCell[] sa = new AbsCell[] {
- new RefCell(u.getRole(),
- RoleDetail.HREF+"?role="+u.getRole()),
- new TextCell(expiresDF.format(u.getExpires().toGregorianCalendar().getTime())),
- AbsCell.Null,
- new RefCell("Remove",
- UserRoleRemove.HREF + "?user="+trans.user()+"&role="+u.getRole())
- };
- rv.add(sa);
- }
- }
- }
-
- } finally {
- tt.done();
- }
- return new Cells(rv,null);
- }
- });
- } catch (Exception e) {
- trans.error().log(e);
- }
- return rv;
- }
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.html.HTMLGen;
-
-public class UserRoleExtend extends Page {
- public static final String HREF = "/gui/urExtend";
- static final String NAME = "Extend User Role";
- static final String fields[] = {"user","role"};
-
- public UserRoleExtend(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,NAME, HREF, fields,
- new BreadCrumbs(breadcrumbs),
- new NamedCode(true, "content") {
- @Override
- public void code(final Cache<HTMLGen> cache, final HTMLGen hgen) throws APIException, IOException {
- final Slot sUser = gui.env.slot(NAME+".user");
- final Slot sRole = gui.env.slot(NAME+".role");
-
-
- cache.dynamic(hgen, new DynamicCode<HTMLGen, AuthGUI, AuthzTrans>() {
- @Override
- public void code(AuthGUI gui, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- final String user = trans.get(sUser, "");
- final String role = trans.get(sRole, "");
-
- TimeTaken tt = trans.start("Request to extend user role",Env.REMOTE);
- try {
- gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {
- @Override
- public Void code(Rcli<?> client)throws CadiException, ConnectException, APIException {
- Future<Void> fv = client.setQueryParams("request=true").update("/authz/userRole/extend/"+user+"/"+role);
- if(fv.get(5000)) {
- // not sure if we'll ever hit this
- hgen.p("Extended User ["+ user+"] in Role [" +role+"]");
- } else {
- if (fv.code() == 202 ) {
- hgen.p("User ["+ user+"] in Role [" +role+"] Extension sent for Approval");
- } else {
- gui.writeError(trans, fv, hgen);
- }
- }
- return null;
- }
- });
- } catch (Exception e) {
- trans.error().log(e);
- e.printStackTrace();
- } finally {
- tt.done();
- }
-
-
- }
- });
- }
-
- });
- }
-}
-
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-import java.net.ConnectException;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import org.onap.aaf.cadi.CadiException;
-import org.onap.aaf.cadi.client.Future;
-import org.onap.aaf.cadi.client.Rcli;
-import org.onap.aaf.cadi.client.Retryable;
-import org.onap.aaf.inno.env.APIException;
-import org.onap.aaf.inno.env.Env;
-import org.onap.aaf.inno.env.Slot;
-import org.onap.aaf.inno.env.TimeTaken;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.html.HTMLGen;
-
-public class UserRoleRemove extends Page {
- public static final String HREF = "/gui/urRemove";
- static final String NAME = "Remove User Role";
- static final String fields[] = {"user","role"};
-
- public UserRoleRemove(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env,NAME, HREF, fields,
- new BreadCrumbs(breadcrumbs),
- new NamedCode(true, "content") {
- @Override
- public void code(final Cache<HTMLGen> cache, final HTMLGen hgen) throws APIException, IOException {
- final Slot sUser = gui.env.slot(NAME+".user");
- final Slot sRole = gui.env.slot(NAME+".role");
-
-
- cache.dynamic(hgen, new DynamicCode<HTMLGen, AuthGUI, AuthzTrans>() {
- @Override
- public void code(AuthGUI gui, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- final String user = trans.get(sUser, "");
- final String role = trans.get(sRole, "");
-
- TimeTaken tt = trans.start("Request a user role delete",Env.REMOTE);
- try {
- gui.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {
- @Override
- public Void code(Rcli<?> client) throws CadiException, ConnectException, APIException {
- Future<Void> fv = client.setQueryParams("request=true").delete(
- "/authz/userRole/"+user+"/"+role,Void.class);
-
- if(fv.get(5000)) {
- // not sure if we'll ever hit this
- hgen.p("User ["+ user+"] Removed from Role [" +role+"]");
- } else {
- if (fv.code() == 202 ) {
- hgen.p("User ["+ user+"] Removal from Role [" +role+"] sent for Approval");
- } else {
- gui.writeError(trans, fv, hgen);
- }
- }
- return null;
- }
- });
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- tt.done();
- }
- }
- });
- }
-
- });
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.pages;
-
-import java.io.IOException;
-
-import com.att.authz.env.AuthzTrans;
-import com.att.authz.gui.AuthGUI;
-import com.att.authz.gui.BreadCrumbs;
-import com.att.authz.gui.NamedCode;
-import com.att.authz.gui.Page;
-import org.onap.aaf.inno.env.APIException;
-import com.att.xgen.Cache;
-import com.att.xgen.DynamicCode;
-import com.att.xgen.Mark;
-import com.att.xgen.html.HTMLGen;
-
-public class WebCommand extends Page {
- public static final String HREF = "/gui/cui";
-
- public WebCommand(final AuthGUI gui, final Page ... breadcrumbs) throws APIException, IOException {
- super(gui.env, "Web Command Client",HREF, NO_FIELDS,
- new BreadCrumbs(breadcrumbs),
- new NamedCode(true, "content") {
- @Override
- public void code(Cache<HTMLGen> cache, HTMLGen hgen) throws APIException, IOException {
- hgen.leaf("p","id=help_msg")
- .text("Questions about this page? ")
- .leaf("a", "href=http://wiki.web.att.com/display/aaf/Web+CUI+Usage", "target=_blank")
- .text("Click here")
- .end()
- .text(". Type 'help' below for a list of AAF commands")
- .end()
-
- .divID("console_and_options");
- hgen.divID("console_area");
- hgen.end(); //console_area
-
- hgen.divID("options_link", "class=closed");
- hgen.img("src=../../theme/options_down.png", "onclick=handleDivHiding('options',this);",
- "id=options_img", "alt=Options", "title=Options")
- .end(); //options_link
-
- hgen.divID("options");
- cache.dynamic(hgen, new DynamicCode<HTMLGen,AuthGUI,AuthzTrans>() {
- @Override
- public void code(AuthGUI state, AuthzTrans trans, Cache<HTMLGen> cache, HTMLGen xgen)
- throws APIException, IOException {
- switch(browser(trans,trans.env().slot(getBrowserType()))) {
- case ie:
- case ieOld:
- // IE doesn't support file save
- break;
- default:
- xgen.img("src=../../theme/AAFdownload.png", "onclick=saveToFile();",
- "alt=Save log to file", "title=Save log to file");
- }
-// xgen.img("src=../../theme/AAFemail.png", "onclick=emailLog();",
-// "alt=Email log to me", "title=Email log to me");
- xgen.img("src=../../theme/AAF_font_size.png", "onclick=handleDivHiding('text_slider',this);",
- "id=fontsize_img", "alt=Change text size", "title=Change text size");
- xgen.img("src=../../theme/AAF_details.png", "onclick=selectOption(this,0);",
- "id=details_img", "alt=Turn on/off details mode", "title=Turn on/off details mode");
- xgen.img("src=../../theme/AAF_maximize.png", "onclick=maximizeConsole(this);",
- "id=maximize_img", "alt=Maximize Console Window", "title=Maximize Console Window");
- }
- });
-
- hgen.divID("text_slider");
- hgen.tagOnly("input", "type=button", "class=change_font", "onclick=buttonChangeFontSize('dec')", "value=-")
- .tagOnly("input", "id=text_size_slider", "type=range", "min=75", "max=200", "value=100",
- "oninput=changeFontSize(this.value)", "onchange=changeFontSize(this.value)", "title=Change Text Size")
- .tagOnly("input", "type=button", "class=change_font", "onclick=buttonChangeFontSize('inc')", "value=+")
- .end(); //text_slider
-
- hgen.end(); //options
- hgen.end(); //console_and_options
-
- hgen.divID("input_area");
- hgen.tagOnly("input", "type=text", "id=command_field",
- "autocomplete=off", "autocorrect=off", "autocapitalize=off", "spellcheck=false",
- "onkeypress=keyPressed()", "placeholder=Type your AAFCLI commands here", "autofocus")
- .tagOnly("input", "id=submit", "type=button", "value=Submit",
- "onclick=http('put','../../gui/cui',getCommand(),callCUI);")
- .end();
-
- Mark callCUI = new Mark();
- hgen.js(callCUI);
- hgen.text("function callCUI(resp) {")
- .text("moveCommandToDiv();")
- .text("printResponse(resp);")
- .text("}");
- hgen.end(callCUI);
-
- }
- });
-
- }
-
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.table;
-
-import com.att.xgen.html.HTMLGen;
-
-public abstract class AbsCell {
- private static final String[] NONE = new String[0];
- protected static final String[] CENTER = new String[]{"class=center"};
-
- /**
- * Write Cell Data with HTMLGen generator
- * @param hgen
- */
- public abstract void write(HTMLGen hgen);
-
- public final static AbsCell Null = new AbsCell() {
- @Override
- public void write(final HTMLGen hgen) {
- }
- };
-
- public String[] attrs() {
- return NONE;
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.table;
-
-import com.att.xgen.html.HTMLGen;
-
-public class ButtonCell extends AbsCell {
- private String[] attrs;
-
- public ButtonCell(String value, String ... attributes) {
- attrs = new String[2+attributes.length];
- attrs[0]="type=button";
- attrs[1]="value="+value;
- System.arraycopy(attributes, 0, attrs, 2, attributes.length);
- }
- @Override
- public void write(HTMLGen hgen) {
- hgen.incr("input",true,attrs).end();
-
- }
-
- @Override
- public String[] attrs() {
- return AbsCell.CENTER;
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.table;
-
-import com.att.xgen.html.HTMLGen;
-
-public class RadioCell extends AbsCell {
- private String[] attrs;
-
- public RadioCell(String name, String radioClass, String value, String ... attributes) {
- attrs = new String[4+attributes.length];
- attrs[0]="type=radio";
- attrs[1]="name="+name;
- attrs[2]="class="+radioClass;
- attrs[3]="value="+value;
- System.arraycopy(attributes, 0, attrs, 4, attributes.length);
- }
-
- @Override
- public void write(HTMLGen hgen) {
- hgen.incr("input",true,attrs).end();
- }
-
- @Override
- public String[] attrs() {
- return AbsCell.CENTER;
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.table;
-
-import static com.att.xgen.html.HTMLGen.A;
-
-import com.att.xgen.html.HTMLGen;
-
-/**
- * Write a Reference Link into a Cell
- *
- */
-public class RefCell extends AbsCell {
- public final String name;
- public final String href;
- private String[] attrs;
-
- public RefCell(String name, String href, String... attributes) {
- attrs = new String[attributes.length];
- System.arraycopy(attributes, 0, attrs, 0, attributes.length);
- this.name = name;
- this.href = href;
- }
-
- @Override
- public void write(HTMLGen hgen) {
- hgen.leaf(A,"href="+href).text(name);
- }
-
- @Override
- public String[] attrs() {
- return attrs;
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.table;
-
-import static com.att.xgen.html.HTMLGen.A;
-
-import com.att.xgen.html.HTMLGen;
-
-public class TextAndRefCell extends RefCell {
-
- private String text;
-
- public TextAndRefCell(String text, String name, String href, String[] attributes) {
- super(name, href, attributes);
- this.text = text;
- }
-
- @Override
- public void write(HTMLGen hgen) {
- hgen.text(text);
- hgen.leaf(A,"href="+href).text(name);
- }
-
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-package com.att.authz.gui.table;
-
-import com.att.xgen.html.HTMLGen;
-
-/**
- * Write Simple Text into a Cell
- *
- */
-public class TextCell extends AbsCell {
- public final String name;
- private String[] attrs;
-
- public TextCell(String name, String... attributes) {
- attrs = new String[attributes.length];
- System.arraycopy(attributes, 0, attrs, 0, attributes.length);
- this.name = name;
- }
-
- @Override
- public void write(HTMLGen hgen) {
- hgen.text(name);
- }
-
- @Override
- public String[] attrs() {
- return attrs;
- }
-}
+++ /dev/null
-/*
- Standard CSS for AAF
-*/
-
-html {
- height: 100%;
-}
-
-body {
- background-image:url('t_bubbles.jpg');
- background-color: #FFFFFF;
- background-repeat:no-repeat;
- background-position: right top;
- background-size:15em 4.3em;
- color:#606060;
- font-family: Verdana,Arial,Helvetica,sans-serif;
- overflow: scroll;
- }
-
-header h1,p {
- margin: 4px auto;
-}
-
-header h1 {
- display: inline;
-}
-
-header {
- display: block;
- color: #F13099;
-}
-
-p#version {
- margin:0;
- display:inline;
- font-size: 0.75em;
- float:right;
- color: orange;
- padding-right:4.2em;
-}
-
-header hr {
- margin: 0;
-}
-
-hr {
- border: 1px solid #C0C0C0;
-}
-
-#breadcrumbs {
- padding: 5px 0 12px 0;
-}
-
-
-#breadcrumbs ul {
- color: #DFEFFC;
- margin: 0;
- list-style-type:none;
- padding: 0;
-}
-
-#breadcrumbs li {
- border-width:2px;
- margin: 3px 1px;
- padding: 2px 9px;
- border-style:solid;
- border-top-left-radius: .8em;
- border-bottom-left-radius: .8em;
- background-color:#80C337;
- display:inline;
-}
-
-#breadcrumbs a {
- text-decoration:none;
- color: white;
-}
-
-caption {
- color:#FF7241;
- text-align: center;
- font-size:1.3em;
- font-family: "Lucida Sans Unicode", "Lucida Grande", sans-serif;
-}
-
-#Pages {
- padding: 3px 2px 10px 4px;
- background: linear-gradient(to right, #147AB3,#FFFFFF);
-}
-
-#Pages h3,
-#Pages h4,
-h5{
- color: #909090;
-}
-
-form {
- padding: 10px;
- margin: 4px;
-}
-
-
-form input[id],select#myroles {
- margin: 4px 0;
- width: 150%;
-}
-
-form label {
- margin: 4px 0;
-}
-
-form label[required] {
- color: red;
-}
-
-form input[type=submit], form input[type=reset] {
- font-size: 1.0em;
- margin: 12px 0 0px 0;
- color: #F13099;
-}
-
-p.preamble, p.notfound,.expedite_request {
- display: block;
- margin: 30px 0px 10px 0px;
- font: italic bold 20px/30px Georgia, serif;
- font-size: 110%;
- color: #0079B8;
-}
-.expedite_request {
- margin-top: 0;
- color: #FF7241;
-}
-
-.subtext {
- margin-left: 10px;
- font-size: 75%;
- font-style: italic;
-}
-
-#Pages a {
- display:block;
- font-weight:bold;
- color:#FFFFFF;
- background-color:#80C337;
- text-decoration:none;
- border-top-right-radius: .8em;
- border-bottom-right-radius: .8em;
- border-top-left-radius: .2em;
- border-bottom-left-radius: .2em;
- padding: 3px 40px 3px 10px;
- margin: 4px;
- width: 50%;
-}
-
-#footer {
- background-color: #FF7200;
- color: #FFFFFF;
- text-align:right;
- font-size: 60%;
- padding: 5px;
- position:fixed;
- bottom: 0px;
- left: 0px;
- right: 0px;
-}
-
-/*
- Standard Table, with Alternating Colors
-*/
-div.std {
- vertical-align: top;
-}
-
-div.std table, div.stdform table {
- position: relative;
- border-collapse:collapse;
- table-layout:auto;
- left: 1.3%;
- width: 98%;
- margin-top: 5px;
- bottom: 4px;
- border-radius: 4px;
-}
-
-div.std td, div.stdform td {
- font-size:.9em;
-}
-
-.center {
- text-align: center;
-}
-
-.right {
- text-align: right;
- padding-right: 4px;
-}
-
-p.double {
- line-height: 2em;
-}
-
-p.api_comment {
- font-size: .9em;
- text-indent: 6px;
-}
-
-p.api_contentType {
- font-size: .8em;
- text-indent: 6px;
-}
-
-p.api_label {
- font-size: .9em;
- font-style: italic;
-}
-
-div.std h1, div.std h2, div.std h3, div.std h4, div.std h5 {
- text-indent: 7px;
-}
-
-div.std td {
- border:1px solid #A6C9E2;
-}
-
-div.std th, div.stdform th {
- background-color:#6FA7D1;
- color:#FFFFFF;
- }
-
-div.std tr.alt, div.stdform tr.alt {
- background-color:#DFEFFC;
-}
-
-div.std a, div.stdform a {
- /*color: #606060;*/
- color: #147AB3;
-}
-
-td.head {
- font-weight:bold;
- text-align: center;
-}
-
-td.head a {
- color:blue;
-}
-
-/*
- A Table representing 1 or more columns of text, i.e. Detail lists
-*/
-div.detail table {
- width: 100%;
-}
-
-div.detail caption {
- border-bottom: solid 1px #C0C0C0;
-}
-
-/*
- Approval Form select all
-
-*/
-.selectAllButton {
- background: transparent;
- border:none;
- color:blue;
- text-decoration:underline;
- font-weight:bold;
- cursor:pointer;
-}
-
-
-/*
- Begin Web Command Styling
-*/
-#console_and_options {
- position:relative;
-}
-
-.maximized {
- position:absolute;
- top:0px;
- bottom:50px;
- left:0px;
- right:0px;
- z-index:1000;
- background-color:white;
-}
-
-#console_area {
- -webkit-border-radius: 15px;
- -moz-border-radius: 15px;
- border-radius: 15px;
- background-color: black;
- color: white;
- font-family: "Lucida Console", Monaco, monospace;
- overflow-y: scroll;
- height: 300px;
- min-width: 600px;
- padding: 5px;
- resize: vertical;
-}
-
-.command,.bold {
- font-weight: bold;
-}
-
-.command:before {
- content: "> ";
-}
-
-.response{
- font-style: italic;
- font-size: 150%;
-}
-
-#input_area {
- margin-top: 10px;
- clear: both;
-}
-
-#command_field, #submit {
- font-size: 125%;
- background-color: #333333;
- color: white;
- font-family: "Lucida Console", Monaco, monospace;
- -webkit-border-radius: 1em;
- -moz-border-radius: 1em;
- border-radius: 1em;
-}
-
-#command_field {
- width: 75%;
- padding-left: 1em;
-}
-
-#submit {
- background-color: #80C337;
- padding: 0 5%;
- float: right;
-}
-
-/*
- Options Menu Styling for Web Command
-*/
-#options_link {
- -webkit-border-radius: 0 0 20% 20%;
- -moz-border-radius: 0 0 20% 20%;
- border-radius: 0 0 20% 20%;
- -webkit-transition: opacity 0.5s ease-in-out;
- -moz-transition: opacity 0.5s ease-in-out;
- -ms-transition: opacity 0.5s ease-in-out;
- -o-transition: opacity 0.5s ease-in-out;
- transition: opacity 0.5s ease-in-out;
-}
-
-.closed {
- opacity: 0.5;
- filter: alpha(opacity=50);
-}
-
-#options_link:hover, .open {
- opacity: 1.0;
- filter: alpha(opacity=100);
-}
-
-#options_link, #options {
- background: white;
- position:absolute;
- top:0;
- right:2em;
- padding:0.1em;
-}
-
-#options > img {
- cursor: pointer;
- float: right;
- padding: 0.2em;
-}
-
-.selected {
- border: 3px solid orange;
-}
-
-#options, #text_slider {
- display:none;
- padding:0.5em;
- -webkit-border-radius: 0 0 0 10px;
- -moz-border-radius: 0 0 0 10px;
- border-radius: 0 0 0 10px;
-}
-#text_slider {
- clear:both;
-}
-
-/*
- Button styling for changing text size
-*/
-.change_font {
- border-top: 1px solid #96d1f8;
- background: #65a9d7;
- background: -webkit-gradient(linear, left top, left bottom, from(#3e779d), to(#65a9d7));
- background: -webkit-linear-gradient(top, #3e779d, #65a9d7);
- background: -moz-linear-gradient(top, #3e779d, #65a9d7);
- background: -ms-linear-gradient(top, #3e779d, #65a9d7);
- background: -o-linear-gradient(top, #3e779d, #65a9d7);
- padding: 0 2px;
- -webkit-border-radius: 50%;
- -moz-border-radius: 50%;
- border-radius: 50%;
- -webkit-box-shadow: rgba(0,0,0,1) 0 1px 0;
- -moz-box-shadow: rgba(0,0,0,1) 0 1px 0;
- box-shadow: rgba(0,0,0,1) 0 1px 0;
- text-shadow: rgba(0,0,0,.4) 0 1px 0;
- color: white;
- font-size: 14px;
- font-family: monospace;
- text-decoration: none;
- vertical-align: middle;
-}
-.change_font:hover {
- border-top-color: #28597a;
- background: #28597a;
- color: #ccc;
-}
-
-/*
- Text Size Slider styling
-*/
-
-input[type=range] {
- -webkit-appearance: none;
- width: 60%;
- margin: 0;
-}
-input[type=range]:focus {
- outline: none;
-}
-input[type=range]::-webkit-slider-runnable-track {
- width: 100%;
- height: 4px;
- cursor: pointer;
- box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
- background: #3071a9;
- border-radius: 0.6px;
- border: 0.5px solid #010101;
-}
-input[type=range]::-webkit-slider-thumb {
- box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
- border: 1px solid #000000;
- height: 16px;
- width: 16px;
- border-radius: 30px;
- background: #efffff;
- cursor: pointer;
- -webkit-appearance: none;
- margin-top: -7.15px;
-}
-input[type=range]:focus::-webkit-slider-runnable-track {
- background: #367ebd;
-}
-input[type=range]::-moz-range-track {
- width: 100%;
- height: 2.7px;
- cursor: pointer;
- box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
- background: #3071a9;
- border-radius: 0.6px;
- border: 0.5px solid #010101;
-}
-input[type=range]::-moz-range-thumb {
- box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
- border: 1px solid #000000;
- height: 16px;
- width: 16px;
- border-radius: 30px;
- background: #efffff;
- cursor: pointer;
-}
-input[type=range]::-ms-track {
- width: 100%;
- height: 2.7px;
- cursor: pointer;
- background: transparent;
- border-color: transparent;
- color: transparent;
-}
-input[type=range]::-ms-fill-lower {
- background: #2a6495;
- border: 0.5px solid #010101;
- border-radius: 1.2px;
- box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
-}
-input[type=range]::-ms-fill-upper {
- background: #3071a9;
- border: 0.5px solid #010101;
- border-radius: 1.2px;
- box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
-}
-input[type=range]::-ms-thumb {
- box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
- border: 1px solid #000000;
- height: 16px;
- width: 16px;
- border-radius: 30px;
- background: #efffff;
- cursor: pointer;
- height: 2.7px;
-}
-input[type=range]:focus::-ms-fill-lower {
- background: #3071a9;
-}
-input[type=range]:focus::-ms-fill-upper {
- background: #367ebd;
-}
-.expired {
- color: red;
- background-color: pink;
-}
-.blank_line {
- padding: 10px;
-}
-#filterByUser input {
- display: inline;
-}
+++ /dev/null
-/*
- Modifications for Desktop
-*/
-body {
- background-size:23em 4.7em;
-}
-
-
-#breadcrumbs a:visited, #breadcrumbs a:link {
- transition: padding .5s;
-}
-
-#breadcrumbs a:hover {
- padding: 2px 2px 2px 30px;
- transition: padding .5s;
-}
-
-#breadcrumbs, #inner {
- margin: 3px;
- width: 77%;
- float: left;
- min-width:500px;
- background-color: #FFFFFF;
-
-}
-
-#breadcrumbs li {
- box-shadow: 3px 3px 2px #888888;
-}
-
-#Pages {
- margin: 20px;
- filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#147AB3', endColorstr='#ffffff',GradientType=1 ); /*linear gradient for IE 6-9*/
-}
-
-#Pages a:visited, #Pages a:link {
- padding: 3px 40px 3px 10px;
- transition: padding .5s;
- margin: 6px;
- box-shadow: 3px 3px 2px #888888;
-}
-
-#Pages a:hover {
- padding: 4px 80px 4px 15px;
- transition: box-shadow padding .5s;
- box-shadow: 4px 4px 3px #888888;
-}
-
-
-#inner {
- padding: 7px;
- background: #FFFFFF;
- overflow: hidden;
-}
-
-div.std, form {
- border: solid 2px #D0D0D0;
- border-radius: 5px;
- box-shadow: 10px 10px 5px #888888;
-}
-
-div.detail {
- border: solid 2px #C0C0C0;
- border-radius: 14px;
- box-shadow: 10px 10px 5px #888888;
-}
-
-#nav {
- display: inline-block;
- position: absolute;
- right: 2%;
- left: 81%;
-}
-
-#nav h2 {
- color: #FF7200;
- font-size: 1.2em;
- font-family: Verdana,Arial,Helvetica,sans-serif;
- font-style: italic;
- font-weight: normal;
-
-}
-
-#nav ul {
- font-style:italic;
- font-size: .8em;
- font-family: "Lucida Sans Unicode", "Lucida Grande", sans-serif;
- color: #067ab4;
- list-style-type: square;
- margin: 0;
- padding: 0;
-}
+++ /dev/null
-/*
- Modifications for iPhone
-*/
-body {
- zoom: 210%;
-}
-
-#breadcrumbs {
- font-size: .9em;
-}
-
-
-div.std table {
- margin: 0 0 20px 0;
- zoom: 130%
-}
-
-
-div.stdform th {
- font-size: 9px;
-}
-
-#content input {
- font-size: 1.3em;
-}
-
-
-#Pages a {
- font-size: 1.3em;
- width: 75%;
- height:35px;
-}
-
-#nav {
- display: none;
-}
-
-
+++ /dev/null
-/*
- Modifications for non-html5 IE
-*/
-body {
- background-size:23em 4.7em;
-}
-
-
-body h1 {
- margin: 4px auto;
- color: #F13099;
-}
-
-#footer {
- background-color: #FF7200;
- color: #FFFFFF;
- text-align:right;
- font-size: 60%;
- padding: 5px;
- position:fixed;
- bottom: 0px;
- left: 0px;
- right: 0px;
-}
-
-#breadcrumbs a:visited, #breadcrumbs a:link {
- transition: padding .5s;
-}
-
-#breadcrumbs a:hover {
- padding: 2px 2px 2px 30px;
- transition: padding .5s;
-}
-
-#breadcrumbs, #content {
- margin: 3px;
-}
-
-#breadcrumbs, #inner {
- margin: 3px;
- width: 77%;
- float: left;
- min-width:500px;
- background-color: #FFFFFF;
-}
-
-
-#breadcrumbs li {
- box-shadow: 3px 3px 2px #888888;
-}
-
-#inner {
- padding: 10px;
- overflow: hidden;
-}
-
-#inner form {
- border: solid 2px #D0D0D0;
-}
-
-#inner form input[id] {
- margin: 4px 0;
-}
-
-#inner form label {
- margin: 4px 0;
-}
-
-#inner form label[required] {
- color: red;
-}
-
-#inner form input[type=submit] {
- font-size: 1.0em;
- margin: 12px 0 0px 0;
- color: #F13099;
-}
-
-p.preamble, p.notfound {
- display: block;
- margin: 30px 0px 10px 0px;
- font: italic bold 20px/30px Georgia, serif;
- font-size: 110%;
- color: #0079B8;
-}
-
-
-#Pages {
- margin: 20px;
- filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#147AB3', endColorstr='#ffffff',GradientType=1 ); /*linear gradient for IE 6-9*/
-}
-
-#Pages a:visited, #Pages a:link {
- display: block;
- padding: 3px 40px 3px 10px;
- transition: padding .5s;
- margin: 6px;
- box-shadow: 3px 3px 2px #888888;
- background-color: #98bf21;
- text-decoration: none;
- color: white;
- font-weight: bold;
-}
-
-#Pages a:hover {
- padding: 4px 80px 4px 20px;
- transition: box-shadow padding 1s;
- box-shadow: 4px 4px 3px #888888;
-}
-
-tr {
- font-size: .9em;
-}
-
-tr.alt {
- background-color: #EEF0F0;
-}
-
-#nav {
-
- display: block;
- position: absolute;
- top: 175px;
- right: 2%;
- left: 81%;
- z-index=1;
- clear: both;
-}
-
-
-#nav h2 {
- color: #FF7200;
- font-size: 1.2em;
- font-family: Verdana,Arial,Helvetica,sans-serif;
- font-style: italic;
- font-weight: normal;
-
-}
-
-#nav ul {
- font-style:italic;
- font-size: .8em;
- font-family: "Lucida Sans Unicode", "Lucida Grande", sans-serif;
- color: #067ab4;
- list-style-type: square;
- margin: 0;
- padding: 0;
-}
-
-div.std {
- border: solid 2px #D0D0D0;
- border-radius: 5px;
- box-shadow: 10px 10px 5px #888888;
-}
-
-
-div.detail {
- border: solid 2px #C0C0C0;
- border-radius: 14px;
- box-shadow: 10px 10px 5px #888888;
-}
-
+++ /dev/null
-<!-- Used by AAF (ATT inc 2013) -->
-<xs:schema xmlns:aaf="urn:aaf:v1_0" xmlns:xs="http://www.w3.org/2001/XMLSchema" targetNamespace="urn:aaf:v1_0" elementFormDefault="qualified">
- <xs:element name="error">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="response_data" type="xs:string"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- <xs:element name="bool">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="value" type="xs:boolean"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- <xs:complexType name="permkey">
- <xs:sequence>
- <xs:element name="name" type="xs:string"/>
- <xs:element name="type" type="xs:string"/>
- <xs:element name="action" type="xs:string"/>
- </xs:sequence>
- </xs:complexType>
- <xs:element name="permkeys">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="keys" type="aaf:permkey" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- <xs:complexType name="user">
- <xs:sequence>
- <xs:element name="userName" type="xs:string"/>
- <xs:element name="roleName" type="xs:string"/>
- <xs:element name="userType" type="xs:string"/>
- <xs:element name="createUser" type="xs:string"/>
- <xs:element name="createTimestamp" type="xs:string"/>
- <xs:element name="modifyUser" type="xs:string"/>
- <xs:element name="modifyTimestamp" type="xs:string"/>
- <xs:element ref="aaf:roles" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- <xs:complexType name="role">
- <xs:sequence>
- <xs:element name="userName" type="xs:string"/>
- <xs:element name="roleName" type="xs:string"/>
- <xs:element name="userType" type="xs:string"/>
- <xs:element name="createUser" type="xs:string"/>
- <xs:element name="createTimestamp" type="xs:string"/>
- <xs:element name="modifyUser" type="xs:string"/>
- <xs:element name="modifyTimestamp" type="xs:string"/>
- <xs:element ref="aaf:permissions" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- <xs:element name="roles">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="roles" type="aaf:role" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- <xs:complexType name="permission">
- <xs:complexContent>
- <xs:extension base="aaf:permkey">
- <xs:sequence>
- <xs:element name="grantedRole" type="xs:string"/>
- <xs:element name="createUser" type="xs:string"/>
- <xs:element name="createTimestamp" type="xs:string"/>
- <xs:element name="modifyUser" type="xs:string"/>
- <xs:element name="modifyTimestamp" type="xs:string"/>
- <xs:element name="grantingRole" type="xs:string"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- <xs:element name="permissions">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="permissions" type="aaf:permission" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- <xs:complexType name="delg">
- <xs:sequence>
- <xs:element name="user" type="xs:string"/>
- <xs:element name="delegate" type="xs:string"/>
- <xs:element name="start" type="xs:date"/>
- <xs:element name="end" type="xs:date"/>
- </xs:sequence>
- </xs:complexType>
- <xs:element name="delgs">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="delgs" type="aaf:delg" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="cred">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="id" type="xs:string"/>
- <xs:choice >
- <xs:element name="password" type="xs:string" />
- <xs:element name="cert" type = "xs:hexBinary" />
- </xs:choice>
- <xs:element name="start" type="xs:date" />
- <xs:element name="end" type="xs:date" />
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <!--
- Approvals
- -->
- <xs:complexType name="approval">
- <xs:sequence>
- <xs:element name="user" type="xs:string"/>
- <xs:element name="role" type="xs:string"/>
- <xs:element name="status">
- <xs:simpleType>
- <xs:restriction base="xs:string">
- <xs:enumeration value="approve"/>
- <xs:enumeration value="reject"/>
- </xs:restriction>
- </xs:simpleType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- <xs:element name="approvals">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="approvals" type="aaf:approval" minOccurs="1" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <!--
- Users
- -->
- <xs:element name="users">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="id" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-</xs:schema>
-
+++ /dev/null
-<!-- Used by AAF (ATT inc 2013) -->
-<xs:schema
- xmlns:xs="http://www.w3.org/2001/XMLSchema"
- xmlns:aaf="urn:aaf:v2_0"
- targetNamespace="urn:aaf:v2_0"
- elementFormDefault="qualified">
-
-<!--
- Note: jan 22, 2015. Deprecating the "force" element in the "Request" Structure. Do that
- with Query Params.
-
- Eliminate in 3.0
- -->
-<!--
- Errors
- Note: This Error Structure has been made to conform to the AT&T TSS Policies
-
-
- -->
- <xs:element name="error">
- <xs:complexType>
- <xs:sequence>
- <!--
- Unique message identifier of the format ‘ABCnnnn’ where ‘ABC’ is
- either ‘SVC’ for Service Exceptions or ‘POL’ for Policy Exception.
- Exception numbers may be in the range of 0001 to 9999 where :
- * 0001 to 0199 are reserved for common exception messages
- * 0200 to 0999 are reserved for Parlay Web Services specification use
- * 1000-9999 are available for exceptions
- -->
- <xs:element name="messageId" type="xs:string" minOccurs="1" maxOccurs="1"/>
-
- <!--
- Message text, with replacement
- variables marked with %n, where n is
- an index into the list of <variables>
- elements, starting at 1
- -->
- <xs:element name="text" type="xs:string" minOccurs="1" maxOccurs="1"/>
-
- <!--
- List of zero or more strings that
- represent the contents of the variables
- used by the message text. -->
- <xs:element name="variables" type="xs:string" minOccurs="0" maxOccurs="unbounded" />
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-<!--
- Requests
- -->
- <xs:complexType name="Request">
- <xs:sequence>
- <xs:element name="start" type="xs:dateTime" minOccurs="1" maxOccurs="1" />
- <xs:element name="end" type="xs:date" minOccurs="1" maxOccurs="1"/>
- <!-- Deprecated. Use Query Command
- <xs:element name="force" type="xs:string" minOccurs="1" maxOccurs="1" default="false"/>
- -->
- </xs:sequence>
- </xs:complexType>
-
-<!--
- Permissions
--->
- <xs:complexType name = "pkey">
- <xs:sequence>
- <xs:element name="type" type="xs:string"/>
- <xs:element name="instance" type="xs:string"/>
- <xs:element name="action" type="xs:string"/>
- </xs:sequence>
- </xs:complexType>
-
- <xs:element name="permKey">
- <xs:complexType >
- <xs:complexContent>
- <xs:extension base="aaf:pkey" />
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="perm">
- <xs:complexType >
- <xs:complexContent>
- <xs:extension base="aaf:pkey">
- <xs:sequence>
- <xs:element name="roles" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <!-- Note: feb 23, 2015. Added description field. Verify backward compatibility. JR -->
- <xs:element name="description" type="xs:string" minOccurs="0" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="perms">
- <xs:complexType>
- <xs:sequence>
- <xs:element ref="aaf:perm" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="permRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:Request">
- <xs:sequence>
- <xs:element name="type" type="xs:string"/>
- <xs:element name="instance" type="xs:string"/>
- <xs:element name="action" type="xs:string"/>
- <!-- Note: feb 23, 2015. Added description field. Verify backward compatibility. JR -->
- <xs:element name="description" type="xs:string" minOccurs="0" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
-
-<!--
- Roles
--->
- <xs:complexType name="rkey">
- <xs:sequence>
- <xs:element name="name" type="xs:string"/>
- </xs:sequence>
- </xs:complexType>
-
- <xs:element name="roleKey">
- <xs:complexType >
- <xs:complexContent>
- <xs:extension base="aaf:rkey" />
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="role">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:rkey">
- <xs:sequence>
- <xs:element name="perms" type="aaf:pkey" minOccurs="0" maxOccurs="unbounded"/>
- <!-- Note: feb 23, 2015. Added description field. Verify backward compatibility. JR -->
- <xs:element name="description" type="xs:string" minOccurs="0" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="roles">
- <xs:complexType>
- <xs:sequence>
- <xs:element ref="aaf:role" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="roleRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:Request">
- <xs:sequence>
- <xs:element name="name" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <!-- Note: feb 23, 2015. Added description field. Verify backward compatibility. JR -->
- <xs:element name="description" type="xs:string" minOccurs="0" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="userRoleRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:Request">
- <xs:sequence>
- <xs:element name="user" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="role" type="xs:string" minOccurs="1" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="rolePermRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:Request">
- <xs:sequence>
- <xs:element name="perm" type="aaf:pkey" minOccurs="1" maxOccurs="1"/>
- <xs:element name="role" type="xs:string" minOccurs="1" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
-
- <xs:element name="nsRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:Request">
- <xs:sequence>
- <xs:element name="name" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="admin" type="xs:string" minOccurs="1" maxOccurs="unbounded"/>
- <xs:element name="responsible" type="xs:string" minOccurs="1" maxOccurs="unbounded"/>
- <xs:element name="scope" type="xs:int" minOccurs="0" maxOccurs="1"/>
- <!-- Note: feb 23, 2015. Added description field. Verify backward compatibility. JR -->
- <xs:element name="description" type="xs:string" minOccurs="0" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name = "nss">
- <xs:complexType>
- <xs:sequence>
- <xs:element name = "ns" minOccurs="0" maxOccurs="unbounded">
- <xs:complexType>
- <xs:sequence>
- <xs:element name = "name" type = "xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name = "responsible" type = "xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <xs:element name = "admin" type = "xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <!-- Note: feb 23, 2015. Added description field. Verify backward compatibility. JR -->
- <xs:element name = "description" type = "xs:string" minOccurs="0" maxOccurs="1"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-<!--
- Users
--->
- <xs:element name="users">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="user" minOccurs="0" maxOccurs="unbounded">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="id" type="xs:string" minOccurs="1" maxOccurs="1" />
- <xs:element name="expires" type="xs:date" minOccurs="1" maxOccurs="1" />
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-
-<!--
- Credentials
--->
- <xs:element name="credRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:Request">
- <xs:sequence>
- <xs:element name="id" type="xs:string"/>
- <xs:element name="type" type="xs:int" minOccurs="0" maxOccurs="1"/>
- <xs:choice >
- <xs:element name="password" type="xs:string" />
- <xs:element name="entry" type="xs:string" />
- </xs:choice>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
-<!--
- History
- -->
- <xs:element name="history">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="item" minOccurs="0" maxOccurs="unbounded">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="YYYYMM" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="timestamp" type="xs:dateTime" minOccurs="1" maxOccurs="1"/>
- <xs:element name="subject" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="target" type = "xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="action" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="memo" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="user" type="xs:string" minOccurs="1" maxOccurs="1"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-<!--
- Approvals
- -->
- <xs:complexType name="approval">
- <xs:sequence>
- <!-- Note, id is set by system -->
- <xs:element name="id" type="xs:string" minOccurs="0" maxOccurs="1"/>
- <xs:element name="ticket" type="xs:string"/>
- <xs:element name="user" type="xs:string"/>
- <xs:element name="approver" type="xs:string"/>
- <xs:element name="type" type="xs:string"/>
- <xs:element name="memo" type="xs:string"/>
- <xs:element name="updated" type="xs:dateTime"/>
- <xs:element name="status">
- <xs:simpleType>
- <xs:restriction base="xs:string">
- <xs:enumeration value="approve"/>
- <xs:enumeration value="reject"/>
- <xs:enumeration value="pending"/>
- </xs:restriction>
- </xs:simpleType>
- </xs:element>
- <xs:element name="operation">
- <xs:simpleType>
- <xs:restriction base="xs:string">
- <xs:enumeration value="C"/>
- <xs:enumeration value="U"/>
- <xs:enumeration value="D"/>
- <xs:enumeration value="G"/>
- <xs:enumeration value="UG"/>
- </xs:restriction>
- </xs:simpleType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- <xs:element name="approvals">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="approvals" type="aaf:approval" minOccurs="1" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-<!--
- Delegates
--->
- <xs:complexType name="delg">
- <xs:sequence>
- <xs:element name="user" type="xs:string"/>
- <xs:element name="delegate" type="xs:string"/>
- <xs:element name="expires" type="xs:date"/>
- </xs:sequence>
- </xs:complexType>
-
- <xs:element name="delgRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="aaf:Request">
- <xs:sequence>
- <xs:element name="user" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="delegate" type="xs:string" minOccurs="1" maxOccurs="1"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="delgs">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="delgs" type="aaf:delg" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <!-- jg 3/11/2015 New for 2.0.8 -->
- <xs:element name="api">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="route" minOccurs="0" maxOccurs="unbounded">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="meth" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="path" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="param" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <xs:element name="desc" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="comments" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <xs:element name="contentType" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <xs:element name="expected" type="xs:int" minOccurs="1" maxOccurs="1"/>
- <xs:element name="explicitErr" type="xs:int" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-</xs:schema>
\ No newline at end of file
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-function http(meth, sURL, sInput, func) {
- if (sInput != "") {
- var http;
- if (window.XMLHttpRequest) {// code for IE7+, Firefox, Chrome, Opera, Safari
- http=new XMLHttpRequest();
- } else {// code for IE6, IE5
- http=new ActiveXObject('Microsoft.XMLHTTP');
- }
-
- http.onreadystatechange=function() {
- if(http.readyState==4 && http.status == 200) {
- func(http.responseText)
- }
- // Probably want Exception code too.
- }
-
- http.open(meth,sURL,false);
- http.setRequestHeader('Content-Type','text/plain;charset=UTF-8');
- http.send(sInput);
- }
-}
\ No newline at end of file
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-Object.defineProperty(Element.prototype, 'outerHeight', {
- 'get': function(){
- var height = this.clientHeight;
- height += getStyle(this,'marginTop');
- height += getStyle(this,'marginBottom');
- height += getStyle(this,'borderTopWidth');
- height += getStyle(this,'borderBottomWidth');
- return height;
- }
-});
-
-if (document.addEventListener) {
- document.addEventListener('DOMContentLoaded', function () {
- var height = document.querySelector("#footer").outerHeight;
- document.querySelector("#inner").setAttribute("style",
- "margin-bottom:" + height.toString()+ "px");
- });
-} else {
- window.attachEvent("onload", function () {
- var height = document.querySelector("#footer").outerHeight;
- document.querySelector("#inner").setAttribute("style",
- "margin-bottom:" + height.toString()+ "px");
- });
-}
-
-
-
-function getStyle(el, prop) {
- var result = el.currentStyle ? el.currentStyle[prop] :
- document.defaultView.getComputedStyle(el,"")[prop];
- if (parseInt(result,10))
- return parseInt(result,10);
- else
- return 0;
-}
-
-function divVisibility(divID) {
- var element = document.querySelector("#"+divID);
- if (element.style.display=="block")
- element.style.display="none";
- else
- element.style.display="block";
-}
-
-function datesURL(histPage) {
- var validated=true;
- var yearStart = document.querySelector('#yearStart').value;
- var yearEnd = document.querySelector('#yearEnd').value;
- var monthStart = document.querySelector('#monthStart').value;
- var monthEnd = document.querySelector('#monthEnd').value;
- if (monthStart.length == 1) monthStart = 0 + monthStart;
- if (monthEnd.length == 1) monthEnd = 0 + monthEnd;
-
- validated &= validateYear(yearStart);
- validated &= validateYear(yearEnd);
- validated &= validateMonth(monthStart);
- validated &= validateMonth(monthEnd);
-
- if (validated) window.location=histPage+"&dates="+yearStart+monthStart+"-"+yearEnd+monthEnd;
- else alert("Please correct your date selections");
-}
-
-function userFilter(approvalPage) {
- var user = document.querySelector('#userTextBox').value;
- if (user != "")
- window.location=approvalPage+"?user="+user;
- else
- window.location=approvalPage;
-}
-
-function validateYear(year) {
- var today = new Date();
- if (year >= 1900 && year <= today.getFullYear()) return true;
- else return false;
-}
-
-function validateMonth(month) {
- if (month) return true;
- else return false;
-}
-
-function alterLink(breadcrumbToFind, newTarget) {
- var breadcrumbs = document.querySelector("#breadcrumbs").getElementsByTagName("A");
- for (var i=0; i< breadcrumbs.length;i++) {
- var breadcrumbHref = breadcrumbs[i].getAttribute('href');
- if (breadcrumbHref.indexOf(breadcrumbToFind)>-1)
- breadcrumbs[i].setAttribute('href', newTarget);
- }
-}
-
-// clipBoardData object not cross-browser supported. Only IE it seems
-function copyToClipboard(controlId) {
- var control = document.getElementById(controlId);
- if (control == null) {
- alert("ERROR - control not found - " + controlId);
- } else {
- var controlValue = control.href;
- window.clipboardData.setData("text/plain", controlValue);
- alert("Copied text to clipboard : " + controlValue);
- }
-}
+++ /dev/null
-/*******************************************************************************
- * Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
- *******************************************************************************/
-function getCommand() {
- if(typeof String.prototype.trim !== 'function') {
- String.prototype.trim = function() {
- return this.replace(/^\s+|\s+$/g, '');
- };
- }
-
- var cmds = [];
- cmds = document.querySelector("#command_field").value.split(" ");
- var cleanCmd = "";
- if (document.querySelector("#details_img").getAttribute("class") == "selected")
- cleanCmd += "set details=true ";
- for (var i = 0; i < cmds.length;i++) {
- var trimmed = cmds[i].trim();
- if (trimmed != "")
- cleanCmd += trimmed + " ";
- }
-
- return cleanCmd.trim();
-}
-
-function moveCommandToDiv() {
-
- var textInput = document.querySelector("#command_field");
- var content = document.createTextNode(textInput.value);
- var parContent = document.createElement("p");
- var consoleDiv = document.querySelector("#console_area");
- var commandCount = consoleDiv.querySelectorAll(".command").length;
- parContent.setAttribute("class", "command");
- parContent.appendChild(content);
- consoleDiv.appendChild(parContent);
-
- textInput.value = "";
-}
-
-function printResponse(response) {
- var parContent = document.createElement("p");
- parContent.setAttribute("class", "response");
- var preTag = document.createElement("pre");
- parContent.appendChild(preTag);
- var content = document.createTextNode(response);
- preTag.appendChild(content);
- var consoleDiv = document.querySelector("#console_area");
- consoleDiv.appendChild(parContent);
-
- consoleDiv.scrollTop = consoleDiv.scrollHeight;
-}
-
-function clearHistory() {
- var consoleDiv = document.querySelector("#console_area");
- var curr;
- while (curr=consoleDiv.firstChild) {
- consoleDiv.removeChild(curr);
- }
- document.querySelector("#command_field").value = "";
- currentCmd = 0;
-}
-
-function buttonChangeFontSize(direction) {
- var slider = document.querySelector("#text_size_slider");
- var currentSize = parseInt(slider.value);
- var newSize;
- if (direction == "inc") {
- newSize = currentSize + 10;
- } else {
- newSize = currentSize - 10;
- }
- if (newSize > slider.max) newSize = parseInt(slider.max);
- if (newSize < slider.min) newSize = parseInt(slider.min);
- slider.value = newSize;
- changeFontSize(newSize);
-}
-
-function changeFontSize(size) {
- var consoleDiv = document.querySelector("#console_area");
- consoleDiv.style.fontSize = size + "%";
-}
-
-function handleDivHiding(id, img) {
- var options_link = document.querySelector("#options_link");
- var divHeight = toggleVisibility(document.querySelector("#"+id));
-
- if (id == 'options') {
- if (options_link.getAttribute("class") == "open") {
- changeImg(document.querySelector("#options_img"), "../../theme/options_down.png");
- options_link.setAttribute("class", "closed");
- } else {
- changeImg(document.querySelector("#options_img"), "../../theme/options_up.png");
- options_link.setAttribute("class", "open");
- }
- moveToggleImg(options_link, divHeight);
- } else { //id=text_slider
- selectOption(img,divHeight);
- }
-
-}
-
-function selectOption(img, divHeight) {
- var options_link = document.querySelector("#options_link");
- var anySelected;
- if (img.getAttribute("class") != "selected") {
- anySelected = document.querySelectorAll(".selected").length>0;
- if (anySelected == false)
- divHeight += 4;
- img.setAttribute("class", "selected");
- } else {
- img.setAttribute("class", "");
- anySelected = document.querySelectorAll(".selected").length>0;
- if (anySelected == false)
- divHeight -= 4;
-
- }
-
- moveToggleImg(options_link, divHeight);
-}
-
-function toggleVisibility(element) {
- var divHeight;
- if(element.style.display == 'block') {
- divHeight = 0 - element.clientHeight;
- element.style.display = 'none';
- } else {
- element.style.display = 'block';
- divHeight = element.clientHeight;
- }
- return divHeight;
-}
-
-function moveToggleImg(element, height) {
- var curTop = (element.style.top == "" ? 0 : parseInt(element.style.top));
- element.style.top = curTop + height;
-}
-
-function changeImg(img, loc) {
- img.src = loc;
-}
-
-var currentCmd = 0;
-function keyPressed() {
- document.querySelector("#command_field").onkeyup=function(e) {
- if (!e) e = window.event;
- var keyCode = e.which || e.keyCode;
- if (keyCode == 38 || keyCode == 40 || keyCode == 13 || keyCode == 27) {
- var cmdHistoryList = document.querySelectorAll(".command");
- switch (keyCode) {
- case 13:
- // press enter
-
- if (getCommand().toLowerCase()=="clear") {
- clearHistory();
- } else {
- currentCmd = cmdHistoryList.length + 1;
- document.querySelector("#submit").click();
- }
- break;
-
- case 27:
- //press escape
- currentCmd = cmdHistoryList.length;
- document.querySelector("#command_field").value = "";
- break;
-
- case 38:
- // press arrow up
- if (currentCmd != 0)
- currentCmd -= 1;
- if (cmdHistoryList.length != 0)
- document.querySelector("#command_field").value = cmdHistoryList[currentCmd].innerHTML;
- break;
- case 40:
- // press arrow down
- var cmdText = "";
- currentCmd = (currentCmd == cmdHistoryList.length) ? currentCmd : currentCmd + 1;
- if (currentCmd < cmdHistoryList.length)
- cmdText = cmdHistoryList[currentCmd].innerHTML;
-
- document.querySelector("#command_field").value = cmdText;
- break;
- }
- }
- }
-}
-
-function saveToFile() {
- var commands = document.querySelectorAll(".command");
- var responses = document.querySelectorAll(".response");
- var textToWrite = "";
- for (var i = 0; i < commands.length; i++) {
- textToWrite += "> " + commands[i].innerHTML + "\r\n";
- textToWrite += prettyResponse(responses[i].firstChild.innerHTML);
- }
-
- var ie = navigator.userAgent.match(/MSIE\s([\d.]+)/);
- var ie11 = navigator.userAgent.match(/Trident\/7.0/) && navigator.userAgent.match(/rv:11/);
- var ieVer=(ie ? ie[1] : (ie11 ? 11 : -1));
-
-// if (ie && ieVer<10) {
-// console.log("No blobs on IE ver<10");
-// return;
-// }
-
- var textFileAsBlob = new Blob([textToWrite], {type:'text/plain'});
- var fileName = "AAFcommands.log";
-
- if (ieVer >= 10) {
-// window.navigator.msSaveBlob(textFileAsBlob, fileName);
- window.navigator.msSaveOrOpenBlob(textFileAsBlob, fileName);
- } else {
- var downloadLink = document.createElement("a");
- downloadLink.download = fileName;
- downloadLink.innerHTML = "Download File";
- if (window.webkitURL != null) {
- // Chrome allows the link to be clicked
- // without actually adding it to the DOM.
- downloadLink.href = window.webkitURL.createObjectURL(textFileAsBlob);
- } else {
- // Firefox requires the link to be added to the DOM
- // before it can be clicked.
- downloadLink.href = window.URL.createObjectURL(textFileAsBlob);
- downloadLink.onclick = destroyClickedElement;
- downloadLink.style.display = "none";
- document.body.appendChild(downloadLink);
- }
-
- downloadLink.click();
- }
-}
-
-function prettyResponse(response) {
- var lines = response.split('\n');
- var cleanResponse = "";
- for (var i=0; i < lines.length; i++) {
- cleanResponse += lines[i] + "\r\n";
- }
- cleanResponse = cleanResponse.replace(/(<)/g,"<").replace(/(>)/g,">");
- return cleanResponse;
-}
-
-function destroyClickedElement(event){
- document.body.removeChild(event.target);
-}
-
-function fakePlaceholder() {
- document.querySelector("#command_field").setAttribute("value", "Type your AAFCLI commands here");
-}
-
-function maximizeConsole(img) {
- var footer = document.querySelector("#footer");
- var console_area = document.querySelector("#console_area");
- var content = document.querySelector("#content");
- var input_area = document.querySelector("#input_area");
- var help_msg = document.querySelector("#help_msg");
- var console_space = document.documentElement.clientHeight;
- console_space -= input_area.outerHeight;
- console_space -= help_msg.outerHeight;
- var height = getStyle(console_area,'paddingTop') + getStyle(console_area,'paddingBottom');
- console_space -= height;
-
-
- if (content.getAttribute("class") != "maximized") {
- content.setAttribute("class", "maximized");
- footer.style.display="none";
- console_area.style.resize="none";
- console_area.style.height=console_space.toString()+"px";
- } else {
- content.removeAttribute("class");
- footer.style.display="";
- console_area.style.resize="vertical";
- console_area.style.height="300px";
- }
- selectOption(img,0);
-}
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>\r
-<!--\r
- ============LICENSE_START====================================================\r
- * org.onap.aaf\r
- * ===========================================================================\r
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * ===========================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- * \r
- * http://www.apache.org/licenses/LICENSE-2.0\r
- * \r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END====================================================\r
- *\r
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- *\r
--->\r
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\r
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">\r
- <modelVersion>4.0.0</modelVersion>\r
- <parent>\r
- <groupId>org.onap.aaf.authz</groupId>\r
- <artifactId>parent</artifactId>\r
- <version>1.0.1-SNAPSHOT</version>\r
- <relativePath>../pom.xml</relativePath>\r
- </parent>\r
- \r
- <artifactId>authz-gw</artifactId>\r
- <name>Authz Gate/Wall</name>\r
- <description>GW API</description>\r
- <url>https://github.com/att/AAF</url>\r
-\r
- <developers>\r
- <developer>\r
- <name>Jonathan Gathman</name>\r
- <email></email>\r
- <organization>ATT</organization>\r
- <organizationUrl></organizationUrl>\r
- </developer>\r
- </developers>\r
-\r
- <properties>\r
- <maven.test.failure.ignore>true</maven.test.failure.ignore>\r
- <project.swmVersion>30</project.swmVersion>\r
- <project.innoVersion>1.0.0-SNAPSHOT</project.innoVersion>\r
- <project.cadiVersion>1.0.0-SNAPSHOT</project.cadiVersion>\r
- <sonar.language>java</sonar.language>\r
- <sonar.skip>true</sonar.skip>\r
- <sonar.java.coveragePlugin>jacoco</sonar.java.coveragePlugin>\r
- <sonar.surefire.reportsPath>${project.build.directory}/surefire-reports</sonar.surefire.reportsPath>\r
- <sonar.jacoco.reportPath>${project.build.directory}/coverage-reports/jacoco.exec</sonar.jacoco.reportPath>\r
- <sonar.jacoco.itReportPath>${project.build.directory}/coverage-reports/jacoco-it.exec</sonar.jacoco.itReportPath>\r
- <sonar.jacoco.reportMissing.force.zero>true</sonar.jacoco.reportMissing.force.zero>\r
- <sonar.projectVersion>${project.version}</sonar.projectVersion>\r
- <nexusproxy>https://nexus.onap.org</nexusproxy>\r
- <snapshotNexusPath>/content/repositories/snapshots/</snapshotNexusPath>\r
- <releaseNexusPath>/content/repositories/releases/</releaseNexusPath>\r
- <stagingNexusPath>/content/repositories/staging/</stagingNexusPath>\r
- <sitePath>/content/sites/site/org/onap/aaf/authz/${project.artifactId}/${project.version}</sitePath>\r
- </properties>\r
- \r
- <dependencies>\r
- <dependency>\r
- <groupId>org.onap.aaf.authz</groupId>\r
- <artifactId>authz-core</artifactId>\r
- <version>${project.version}</version>\r
- \r
- <exclusions>\r
- <exclusion> \r
- <groupId>javax.servlet</groupId>\r
- <artifactId>servlet-api</artifactId>\r
- </exclusion>\r
- </exclusions> \r
- </dependency>\r
- \r
- <dependency> \r
- <groupId>org.onap.aaf.cadi</groupId>\r
- <artifactId>cadi-aaf</artifactId>\r
- <version>${project.cadiVersion}</version>\r
- </dependency>\r
-\r
-\r
- \r
- </dependencies>\r
- \r
- <build>\r
- <plugins>\r
- <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->\r
- <plugin>\r
- <groupId>org.codehaus.mojo</groupId>\r
- <artifactId>jaxb2-maven-plugin</artifactId>\r
- </plugin>\r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-jar-plugin</artifactId>\r
- <configuration>\r
- <includes>\r
- <include>**/*.class</include>\r
- </includes>\r
- </configuration>\r
- <version>2.3.1</version>\r
- </plugin>\r
-\r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-javadoc-plugin</artifactId>\r
- <version>2.10.4</version>\r
- <configuration>\r
- <failOnError>false</failOnError>\r
- </configuration>\r
- <executions>\r
- <execution>\r
- <id>attach-javadocs</id>\r
- <goals>\r
- <goal>jar</goal>\r
- </goals>\r
- </execution>\r
- </executions>\r
- </plugin> \r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-source-plugin</artifactId>\r
- <version>2.2.1</version>\r
- <executions>\r
- <execution>\r
- <id>attach-sources</id>\r
- <goals>\r
- <goal>jar-no-fork</goal>\r
- </goals>\r
- </execution>\r
- </executions>\r
- </plugin>\r
-\r
-<plugin>\r
- <groupId>org.sonatype.plugins</groupId>\r
- <artifactId>nexus-staging-maven-plugin</artifactId>\r
- <version>1.6.7</version>\r
- <extensions>true</extensions>\r
- <configuration>\r
- <nexusUrl>${nexusproxy}</nexusUrl>\r
- <stagingProfileId>176c31dfe190a</stagingProfileId>\r
- <serverId>ecomp-staging</serverId>\r
- </configuration>\r
- </plugin> \r
- <plugin>\r
- <groupId>org.jacoco</groupId>\r
- <artifactId>jacoco-maven-plugin</artifactId>\r
- <version>0.7.7.201606060606</version>\r
- <configuration>\r
- <dumpOnExit>true</dumpOnExit>\r
- <includes>\r
- <include>org.onap.aaf.*</include>\r
- </includes>\r
- </configuration>\r
- <executions>\r
- <execution>\r
- <id>pre-unit-test</id>\r
- <goals>\r
- <goal>prepare-agent</goal>\r
- </goals>\r
- <configuration>\r
- <destFile>${project.build.directory}/coverage-reports/jacoco.exec</destFile>\r
- <!-- <append>true</append> -->\r
- </configuration>\r
- </execution>\r
- <execution>\r
- <id>pre-integration-test</id>\r
- <phase>pre-integration-test</phase>\r
- <goals>\r
- <goal>prepare-agent</goal>\r
- </goals>\r
- <configuration>\r
- <destFile>${project.build.directory}/coverage-reports/jacoco-it.exec</destFile>\r
- <!-- <append>true</append> -->\r
- </configuration>\r
- </execution>\r
- <execution>\r
- <goals>\r
- <goal>merge</goal>\r
- </goals>\r
- <phase>post-integration-test</phase>\r
- <configuration>\r
- <fileSets>\r
- <fileSet implementation="org.apache.maven.shared.model.fileset.FileSet">\r
- <directory>${project.build.directory}/coverage-reports</directory>\r
- <includes>\r
- <include>*.exec</include>\r
- </includes>\r
- </fileSet>\r
- </fileSets>\r
- <destFile>${project.build.directory}/jacoco-dev.exec</destFile>\r
- </configuration>\r
- </execution>\r
- </executions>\r
- </plugin> \r
-\r
- \r
- </plugins>\r
- </build>\r
-<distributionManagement>\r
- <repository>\r
- <id>ecomp-releases</id>\r
- <name>AAF Release Repository</name>\r
- <url>${nexusproxy}${releaseNexusPath}</url>\r
- </repository>\r
- <snapshotRepository>\r
- <id>ecomp-snapshots</id>\r
- <name>AAF Snapshot Repository</name>\r
- <url>${nexusproxy}${snapshotNexusPath}</url>\r
- </snapshotRepository>\r
- <site>\r
- <id>ecomp-site</id>\r
- <url>dav:${nexusproxy}${sitePath}</url>\r
- </site>\r
- </distributionManagement>\r
-\r
-</project>\r
+++ /dev/null
-##
-## AUTHZ GateWall (authz-gw) Properties
-##
-
-hostname=_HOSTNAME_
-
-## DISCOVERY (DME2) Parameters on the Command Line
-AFT_LATITUDE=_AFT_LATITUDE_
-AFT_LONGITUDE=_AFT_LONGITUDE_
-AFT_ENVIRONMENT=_AFT_ENVIRONMENT_
-AFT_ENV_CONTEXT=_ENV_CONTEXT_
-
-DEPLOYED_VERSION=_ARTIFACT_VERSION_
-
-## Pull in common/security properties
-
-cadi_prop_files=_COMMON_DIR_/com.att.aaf.common.props;_COMMON_DIR_/com.att.aaf.props
-
-
-##DME2 related parameters
-DMEServiceName=service=com.att.authz.authz-gw/version=_MAJOR_VER_._MINOR_VER_._PATCH_VER_/envContext=_ENV_CONTEXT_/routeOffer=_ROUTE_OFFER_
-AFT_DME2_PORT_RANGE=_AUTHZ_GW_PORT_RANGE_
-
-# Turn on both AAF TAF & LUR 2.0
-aaf_url=https://DME2RESOLVE/service=com.att.authz.AuthorizationService/version=_MAJOR_VER_._MINOR_VER_/envContext=_ENV_CONTEXT_/routeOffer=_ROUTE_OFFER_
-
-# CSP
-csp_domain=PROD
-
-# GUI Login Page
-cadi_loginpage_url=https://DME2RESOLVE/service=com.att.authz.authz-gui/version=_MAJOR_VER_._MINOR_VER_/envContext=_ENV_CONTEXT_/routeOffer=_ROUTE_OFFER_/login
-
-
+++ /dev/null
-#-------------------------------------------------------------------------------\r
-# ============LICENSE_START====================================================\r
-# * org.onap.aaf\r
-# * ===========================================================================\r
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
-# * ===========================================================================\r
-# * Licensed under the Apache License, Version 2.0 (the "License");\r
-# * you may not use this file except in compliance with the License.\r
-# * You may obtain a copy of the License at\r
-# * \r
-# * http://www.apache.org/licenses/LICENSE-2.0\r
-# * \r
-# * Unless required by applicable law or agreed to in writing, software\r
-# * distributed under the License is distributed on an "AS IS" BASIS,\r
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
-# * See the License for the specific language governing permissions and\r
-# * limitations under the License.\r
-# * ============LICENSE_END====================================================\r
-# *\r
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
-# *\r
-#-------------------------------------------------------------------------------\r
-###############################################################################\r
-# Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.\r
-###############################################################################\r
-#\r
-# Licensed to the Apache Software Foundation (ASF) under one\r
-# or more contributor license agreements. See the NOTICE file\r
-# distributed with this work for additional information\r
-# regarding copyright ownership. The ASF licenses this file\r
-# to you under the Apache License, Version 2.0 (the\r
-# "License"); you may not use this file except in compliance\r
-# with the License. You may obtain a copy of the License at\r
-#\r
-# http://www.apache.org/licenses/LICENSE-2.0\r
-#\r
-# Unless required by applicable law or agreed to in writing,\r
-# software distributed under the License is distributed on an\r
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\r
-# KIND, either express or implied. See the License for the\r
-# specific language governing permissions and limitations\r
-# under the License.\r
-#\r
-log4j.appender.INIT=org.apache.log4j.DailyRollingFileAppender \r
-log4j.appender.INIT.File=_LOG_DIR_/${LOG4J_FILENAME_init}\r
-log4j.appender.INIT.DatePattern='.'yyyy-MM-dd\r
-#log4j.appender.INIT.MaxFileSize=_MAX_LOG_FILE_SIZE_\r
-#log4j.appender.INIT.MaxBackupIndex=_MAX_LOG_FILE_BACKUP_COUNT_\r
-log4j.appender.INIT.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.INIT.layout.ConversionPattern=%d %p [%c] %m %n\r
-\r
-log4j.appender.GW=org.apache.log4j.DailyRollingFileAppender \r
-log4j.appender.GW.File=_LOG_DIR_/${LOG4J_FILENAME_gw}\r
-log4j.appender.GW.DatePattern='.'yyyy-MM-dd\r
-#log4j.appender.GW.MaxFileSize=_MAX_LOG_FILE_SIZE_\r
-#log4j.appender.GW.MaxBackupIndex=_MAX_LOG_FILE_BACKUP_COUNT_\r
-log4j.appender.GW.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.GW.layout.ConversionPattern=%d %p [%c] %m %n\r
-\r
-log4j.appender.AUDIT=org.apache.log4j.DailyRollingFileAppender\r
-log4j.appender.AUDIT.File=_LOG_DIR_/${LOG4J_FILENAME_audit}\r
-log4j.appender.AUDIT.DatePattern='.'yyyy-MM-dd\r
-#log4j.appender.GW.MaxFileSize=_MAX_LOG_FILE_SIZE_\r
-#log4j.appender.GW.MaxBackupIndex=_MAX_LOG_FILE_BACKUP_COUNT_\r
-log4j.appender.AUDIT.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.AUDIT.layout.ConversionPattern=%d %p [%c] %m %n\r
-\r
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender\r
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\r
-log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] %m %n\r
-\r
-# General Apache libraries\r
-log4j.rootLogger=WARN\r
-log4j.logger.org.apache=WARN,INIT\r
-log4j.logger.dme2=WARN,INIT\r
-log4j.logger.init=INFO,INIT\r
-log4j.logger.gw=_LOG4J_LEVEL_,GW\r
-log4j.logger.audit=INFO,AUDIT\r
-\r
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\r
-<!--\r
- ============LICENSE_START====================================================\r
- * org.onap.aaf\r
- * ===========================================================================\r
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * ===========================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- * \r
- * http://www.apache.org/licenses/LICENSE-2.0\r
- * \r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END====================================================\r
- *\r
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- *\r
--->\r
-<ns2:ManagedResourceList xmlns:ns2="http://scld.att.com/lrm/util" xmlns="http://scld.att.com/lrm/commontypes" xmlns:ns3="http://scld.att.com/lrm/types">\r
- <ns2:ManagedResource>\r
- <ResourceDescriptor>\r
- <ResourceName>com.att.authz._ARTIFACT_ID_</ResourceName>\r
- <ResourceVersion>\r
- <Major>_MAJOR_VER_</Major>\r
- <Minor>_MINOR_VER_</Minor>\r
- <Patch>_PATCH_VER_</Patch> \r
- </ResourceVersion>\r
- <RouteOffer>_ROUTE_OFFER_</RouteOffer>\r
- </ResourceDescriptor>\r
- <ResourceType>Java</ResourceType>\r
- <ResourcePath>com.att.authz.gw.GwAPI</ResourcePath>\r
- <ResourceProps>\r
- <Tag>process.workdir</Tag>\r
- <Value>_ROOT_DIR_</Value>\r
- </ResourceProps> \r
- <ResourceProps>\r
- <Tag>jvm.version</Tag>\r
- <Value>1.8</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>jvm.args</Tag>\r
- <Value>-DAFT_LATITUDE=_AFT_LATITUDE_ -DAFT_LONGITUDE=_AFT_LONGITUDE_ -DAFT_ENVIRONMENT=_AFT_ENVIRONMENT_ -Dplatform=_SCLD_PLATFORM_ -Dcom.sun.jndi.ldap.connect.pool.maxsize=20 -Dcom.sun.jndi.ldap.connect.pool.prefsize=10 -Dcom.sun.jndi.ldap.connect.pool.timeout=3000 </Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>jvm.classpath</Tag>\r
- <Value>_ROOT_DIR_/etc:_ROOT_DIR_/lib/*:</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>jvm.heap.min</Tag>\r
- <Value>512m</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>jvm.heap.max</Tag>\r
- <Value>2048m</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>start.class</Tag>\r
- <Value>com.att.authz.gw.GwAPI</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>stdout.redirect</Tag>\r
- <Value>_ROOT_DIR_/logs/SystemOut.log</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>stderr.redirect</Tag>\r
- <Value>_ROOT_DIR_/logs/SystemErr.log</Value>\r
- </ResourceProps>\r
- <ResourceOSID>aft</ResourceOSID>\r
- <ResourceStartType>AUTO</ResourceStartType>\r
- <ResourceStartPriority>4</ResourceStartPriority>\r
- <ResourceMinCount>_RESOURCE_MIN_COUNT_</ResourceMinCount>\r
- <ResourceMaxCount>_RESOURCE_MAX_COUNT_</ResourceMaxCount> \r
- <ResourceRegistration>_RESOURCE_REGISTRATION_</ResourceRegistration>\r
- <ResourceSWMComponent>com.att.authz:_ARTIFACT_ID_</ResourceSWMComponent>\r
- <ResourceSWMComponentVersion>_ARTIFACT_VERSION_</ResourceSWMComponentVersion>\r
- </ns2:ManagedResource>\r
-</ns2:ManagedResourceList>\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw;\r
-\r
-import java.net.HttpURLConnection;\r
-import java.util.ArrayList;\r
-import java.util.EnumSet;\r
-import java.util.List;\r
-import java.util.Map;\r
-import java.util.Properties;\r
-\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.gw.api.API_AAFAccess;\r
-import org.onap.aaf.authz.gw.api.API_Api;\r
-import org.onap.aaf.authz.gw.api.API_Find;\r
-import org.onap.aaf.authz.gw.api.API_Proxy;\r
-import org.onap.aaf.authz.gw.api.API_TGuard;\r
-import org.onap.aaf.authz.gw.facade.GwFacade_1_0;\r
-import org.onap.aaf.authz.gw.mapper.Mapper.API;\r
-import org.onap.aaf.authz.server.AbsServer;\r
-import org.onap.aaf.cache.Cache;\r
-import org.onap.aaf.cache.Cache.Dated;\r
-import org.onap.aaf.cssa.rserv.HttpMethods;\r
-\r
-import com.att.aft.dme2.api.DME2Exception;\r
-\r
-import com.att.aft.dme2.api.DME2Manager;\r
-import com.att.aft.dme2.api.DME2Server;\r
-import com.att.aft.dme2.api.DME2ServerProperties;\r
-import com.att.aft.dme2.api.DME2ServiceHolder;\r
-import com.att.aft.dme2.api.util.DME2FilterHolder;\r
-import com.att.aft.dme2.api.util.DME2FilterHolder.RequestDispatcherType;\r
-import com.att.aft.dme2.api.util.DME2ServletHolder;\r
-import org.onap.aaf.cadi.CadiException;\r
-//import org.onap.aaf.cadi.PropAccess;\r
-import org.onap.aaf.cadi.aaf.v2_0.AAFAuthn;\r
-import org.onap.aaf.cadi.aaf.v2_0.AAFLurPerm;\r
-import org.onap.aaf.cadi.config.Config;\r
-import org.onap.aaf.inno.env.APIException;\r
-\r
-public class GwAPI extends AbsServer {\r
- private static final String USER_PERMS = "userPerms";\r
- private GwFacade_1_0 facade; // this is the default Facade\r
- private GwFacade_1_0 facade_1_0_XML;\r
- public Map<String, Dated> cacheUser;\r
- public final String aafurl;\r
- public final AAFAuthn<HttpURLConnection> aafAuthn;\r
- public final AAFLurPerm aafLurPerm;\r
- public DME2Manager dme2Man;\r
-\r
- \r
- /**\r
- * Construct AuthzAPI with all the Context Supporting Routes that Authz needs\r
- * \r
- * @param env\r
- * @param si \r
- * @param dm \r
- * @param decryptor \r
- * @throws APIException \r
- */\r
- public GwAPI(AuthzEnv env) throws Exception {\r
- super(env,"AAF GW");\r
- aafurl = env.getProperty(Config.AAF_URL); \r
-\r
- // Setup Logging\r
- //env.setLog4JNames("log4j.properties","authz","gw","audit","init","trace");\r
-\r
- aafLurPerm = aafCon.newLur();\r
- // Note: If you need both Authn and Authz construct the following:\r
- aafAuthn = aafCon.newAuthn(aafLurPerm);\r
-\r
- // Initialize Facade for all uses\r
- //AuthzTrans trans = env.newTrans();\r
-\r
- // facade = GwFacadeFactory.v1_0(env,trans,Data.TYPE.JSON); // Default Facade\r
- // facade_1_0_XML = GwFacadeFactory.v1_0(env,trans,Data.TYPE.XML);\r
-\r
- synchronized(env) {\r
- if(cacheUser == null) {\r
- cacheUser = Cache.obtain(USER_PERMS);\r
- //Cache.startCleansing(env, USER_PERMS);\r
- Cache.addShutdownHook(); // Setup Shutdown Hook to close cache\r
- }\r
- }\r
- \r
- ////////////////////////////////////////////////////////////////////////////\r
- // Time Critical\r
- // These will always be evaluated first\r
- ////////////////////////////////////////////////////////////////////////\r
- API_AAFAccess.init(this,facade);\r
- API_Find.init(this, facade);\r
- API_TGuard.init(this, facade);\r
- API_Proxy.init(this, facade);\r
- \r
- ////////////////////////////////////////////////////////////////////////\r
- // Management APIs\r
- ////////////////////////////////////////////////////////////////////////\r
- // There are several APIs around each concept, and it gets a bit too\r
- // long in this class to create. The initialization of these Management\r
- // APIs have therefore been pushed to StandAlone Classes with static\r
- // init functions\r
- API_Api.init(this, facade);\r
-\r
- ////////////////////////////////////////////////////////////////////////\r
- // Default Function\r
- ////////////////////////////////////////////////////////////////////////\r
- API_AAFAccess.initDefault(this,facade);\r
-\r
- }\r
- \r
- /**\r
- * Setup XML and JSON implementations for each supported Version type\r
- * \r
- * We do this by taking the Code passed in and creating clones of these with the appropriate Facades and properties\r
- * to do Versions and Content switches\r
- * \r
- */\r
- public void route(HttpMethods meth, String path, API api, GwCode code) throws Exception {\r
- String version = "1.0";\r
- // Get Correct API Class from Mapper\r
- Class<?> respCls = facade.mapper().getClass(api); \r
- if(respCls==null) throw new Exception("Unknown class associated with " + api.getClass().getName() + ' ' + api.name());\r
- // setup Application API HTML ContentTypes for JSON and Route\r
- String application = applicationJSON(respCls, version);\r
- //route(env,meth,path,code,application,"application/json;version="+version,"*/*");\r
-\r
- // setup Application API HTML ContentTypes for XML and Route\r
- application = applicationXML(respCls, version);\r
- //route(env,meth,path,code.clone(facade_1_0_XML,false),application,"text/xml;version="+version);\r
- \r
- // Add other Supported APIs here as created\r
- }\r
- \r
- public void routeAll(HttpMethods meth, String path, API api, GwCode code) throws Exception {\r
- //route(env,meth,path,code,""); // this will always match\r
- }\r
-\r
-\r
- /**\r
- * Start up AuthzAPI as DME2 Service\r
- * @param env\r
- * @param props\r
- * @throws DME2Exception\r
- * @throws CadiException \r
- */\r
- public void startDME2(Properties props) throws DME2Exception, CadiException {\r
- \r
- dme2Man = new DME2Manager("GatewayDME2Manager",props);\r
-\r
- DME2ServiceHolder svcHolder;\r
- List<DME2ServletHolder> slist = new ArrayList<DME2ServletHolder>();\r
- svcHolder = new DME2ServiceHolder();\r
- String serviceName = env.getProperty("DMEServiceName",null);\r
- if(serviceName!=null) {\r
- svcHolder.setServiceURI(serviceName);\r
- svcHolder.setManager(dme2Man);\r
- svcHolder.setContext("/");\r
- \r
- \r
- \r
- DME2ServletHolder srvHolder = new DME2ServletHolder(this, new String[] {"/dme2","/api"});\r
- srvHolder.setContextPath("/*");\r
- slist.add(srvHolder);\r
- \r
- EnumSet<RequestDispatcherType> edlist = EnumSet.of(\r
- RequestDispatcherType.REQUEST,\r
- RequestDispatcherType.FORWARD,\r
- RequestDispatcherType.ASYNC\r
- );\r
-\r
- ///////////////////////\r
- // Apply Filters\r
- ///////////////////////\r
- List<DME2FilterHolder> flist = new ArrayList<DME2FilterHolder>();\r
- \r
- // Leave Login page un secured\r
- // AuthzTransOnlyFilter atof = new AuthzTransOnlyFilter(env);\r
- // flist.add(new DME2FilterHolder(atof,"/login", edlist));\r
-\r
- // Secure all other interactions with AuthzTransFilter\r
-// flist.add(new DME2FilterHolder(\r
-// new AuthzTransFilter(env, aafCon, new AAFTrustChecker(\r
-// env.getProperty(Config.CADI_TRUST_PROP, Config.CADI_USER_CHAIN),\r
-// Define.ROOT_NS + ".mechid|"+Define.ROOT_COMPANY+"|trust"\r
-// )),\r
-// "/*", edlist));\r
-// \r
-\r
- svcHolder.setFilters(flist);\r
- svcHolder.setServletHolders(slist);\r
- \r
- DME2Server dme2svr = dme2Man.getServer();\r
-// dme2svr.setGracefulShutdownTimeMs(1000);\r
- \r
- // env.init().log("Starting GW Jetty/DME2 server...");\r
- dme2svr.start();\r
- DME2ServerProperties dsprops = dme2svr.getServerProperties();\r
- try {\r
-// if(env.getProperty("NO_REGISTER",null)!=null)\r
- dme2Man.bindService(svcHolder);\r
-// env.init().log("DME2 is available as HTTP"+(dsprops.isSslEnable()?"/S":""),"on port:",dsprops.getPort());\r
-\r
- while(true) { // Per DME2 Examples...\r
- Thread.sleep(5000);\r
- }\r
- } catch(InterruptedException e) {\r
- // env.init().log("AAF Jetty Server interrupted!");\r
- } catch(Exception e) { // Error binding service doesn't seem to stop DME2 or Process\r
- // env.init().log(e,"DME2 Initialization Error");\r
- dme2svr.stop();\r
- System.exit(1);\r
- }\r
- } else {\r
- //env.init().log("Properties must contain DMEServiceName");\r
- }\r
- }\r
-\r
- public static void main(String[] args) {\r
- setup(GwAPI.class,"authGW.props");\r
- }\r
-\r
-// public void route(PropAccess env, HttpMethods get, String string, GwCode gwCode, String string2, String string3,\r
-// String string4) {\r
-// // TODO Auto-generated method stub\r
-// \r
-// }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.gw.facade.GwFacade;\r
-import org.onap.aaf.cssa.rserv.HttpCode;\r
-\r
-public abstract class GwCode extends HttpCode<AuthzTrans, GwFacade> implements Cloneable {\r
- public boolean useJSON;\r
-\r
- public GwCode(GwFacade facade, String description, boolean useJSON, String ... roles) {\r
- super(facade, description, roles);\r
- this.useJSON = useJSON;\r
- }\r
- \r
- public <D extends GwCode> D clone(GwFacade facade, boolean useJSON) throws Exception {\r
- @SuppressWarnings("unchecked")\r
- D d = (D)clone();\r
- d.useJSON = useJSON;\r
- d.context = facade;\r
- return d;\r
- }\r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw.api;\r
-\r
-import java.io.IOException;\r
-import java.net.ConnectException;\r
-import java.net.MalformedURLException;\r
-import java.net.URI;\r
-import java.security.Principal;\r
-\r
-import javax.servlet.ServletOutputStream;\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.gw.GwAPI;\r
-import org.onap.aaf.authz.gw.GwCode;\r
-import org.onap.aaf.authz.gw.facade.GwFacade;\r
-import org.onap.aaf.authz.gw.mapper.Mapper.API;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.cache.Cache.Dated;\r
-import org.onap.aaf.cssa.rserv.HttpMethods;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-import org.onap.aaf.cadi.CadiException;\r
-import org.onap.aaf.cadi.Locator;\r
-import org.onap.aaf.cadi.Locator.Item;\r
-import org.onap.aaf.cadi.LocatorException;\r
-import org.onap.aaf.cadi.aaf.AAFPermission;\r
-import org.onap.aaf.cadi.client.Future;\r
-import org.onap.aaf.cadi.client.Rcli;\r
-import org.onap.aaf.cadi.client.Retryable;\r
-import org.onap.aaf.cadi.dme2.DME2Locator;\r
-import org.onap.aaf.cadi.principal.BasicPrincipal;\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.TimeTaken;\r
-\r
-public class API_AAFAccess {\r
- private static final String AUTHZ_DME2_GUI = "com.att.authz.authz-gui";\r
- static final String AFT_ENVIRONMENT="AFT_ENVIRONMENT";\r
- static final String AFT_ENV_CONTEXT="AFT_ENV_CONTEXT";\r
- static final String AFTUAT="AFTUAT";\r
- \r
- private static final String PROD = "PROD";\r
- private static final String IST = "IST"; // main NONPROD system\r
- private static final String PERF = "PERF";\r
- private static final String TEST = "TEST";\r
- private static final String DEV = "DEV";\r
- \r
-// private static String service, version, envContext; \r
- private static String routeOffer;\r
-\r
- private static final String GET_PERMS_BY_USER = "Get Perms by User";\r
- private static final String USER_HAS_PERM ="User Has Perm";\r
-// private static final String USER_IN_ROLE ="User Has Role";\r
- private static final String BASIC_AUTH ="AAF Basic Auth";\r
- \r
- /**\r
- * Normal Init level APIs\r
- * \r
- * @param gwAPI\r
- * @param facade\r
- * @throws Exception\r
- */\r
- public static void init(final GwAPI gwAPI, GwFacade facade) throws Exception {\r
- String aftenv = gwAPI.env.getProperty(AFT_ENVIRONMENT);\r
- if(aftenv==null) throw new Exception(AFT_ENVIRONMENT + " must be set");\r
- \r
- int equals, count=0;\r
- for(int slash = gwAPI.aafurl.indexOf('/');slash>0;++count) {\r
- equals = gwAPI.aafurl.indexOf('=',slash)+1;\r
- slash = gwAPI.aafurl.indexOf('/',slash+1);\r
- switch(count) {\r
- case 2:\r
-// service = gwAPI.aafurl.substring(equals, slash);\r
- break;\r
- case 3:\r
-// version = gwAPI.aafurl.substring(equals, slash);\r
- break;\r
- case 4:\r
-// envContext = gwAPI.aafurl.substring(equals, slash);\r
- break;\r
- case 5:\r
- routeOffer = gwAPI.aafurl.substring(equals);\r
- break;\r
- }\r
- }\r
- if(count<6) throw new MalformedURLException(gwAPI.aafurl);\r
- \r
- gwAPI.route(HttpMethods.GET,"/authz/perms/user/:user",API.VOID,new GwCode(facade,GET_PERMS_BY_USER, true) {\r
- @Override\r
- public void handle(final AuthzTrans trans, final HttpServletRequest req, final HttpServletResponse resp) throws Exception {\r
- TimeTaken tt = trans.start(GET_PERMS_BY_USER, Env.SUB);\r
- try {\r
- final String accept = req.getHeader("ACCEPT");\r
- final String user = pathParam(req,":user");\r
- if(!user.contains("@")) {\r
- context.error(trans,resp,Result.ERR_BadData,"User [%s] must be fully qualified with domain",user);\r
- return;\r
- }\r
- String key = trans.user() + user + (accept!=null&&accept.contains("xml")?"-xml":"-json");\r
- TimeTaken tt2 = trans.start("Cache Lookup",Env.SUB);\r
- Dated d;\r
- try {\r
- d = gwAPI.cacheUser.get(key);\r
- } finally {\r
- tt2.done();\r
- }\r
- \r
- if(d==null || d.data.isEmpty()) {\r
- tt2 = trans.start("AAF Service Call",Env.REMOTE);\r
- try {\r
- gwAPI.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {\r
- @Override\r
- public Void code(Rcli<?> client) throws CadiException, ConnectException, APIException {\r
- Future<String> fp = client.read("/authz/perms/user/"+user,accept);\r
- if(fp.get(5000)) {\r
- gwAPI.cacheUser.put(key, new Dated(new User(fp.code(),fp.body())));\r
- resp.setStatus(HttpStatus.OK_200);\r
- ServletOutputStream sos;\r
- try {\r
- sos = resp.getOutputStream();\r
- sos.print(fp.value);\r
- } catch (IOException e) {\r
- throw new CadiException(e);\r
- }\r
- } else {\r
- gwAPI.cacheUser.put(key, new Dated(new User(fp.code(),fp.body())));\r
- context.error(trans,resp,fp.code(),fp.body());\r
- }\r
- return null;\r
- }\r
- });\r
- } finally {\r
- tt2.done();\r
- }\r
- } else {\r
- User u = (User)d.data.get(0);\r
- resp.setStatus(u.code);\r
- ServletOutputStream sos = resp.getOutputStream();\r
- sos.print(u.resp);\r
- }\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- });\r
-\r
- gwAPI.route(gwAPI.env,HttpMethods.GET,"/authn/basicAuth",new GwCode(facade,BASIC_AUTH, true) {\r
- @Override\r
- public void handle(final AuthzTrans trans, final HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Principal p = trans.getUserPrincipal();\r
- if(p == null) {\r
- trans.error().log("Transaction not Authenticated... no Principal");\r
- resp.setStatus(HttpStatus.FORBIDDEN_403);\r
- } else if (p instanceof BasicPrincipal) {\r
- // the idea is that if call is made with this credential, and it's a BasicPrincipal, it's ok\r
- // otherwise, it wouldn't have gotten here.\r
- resp.setStatus(HttpStatus.OK_200);\r
- } else {\r
- trans.checkpoint("Basic Auth Check Failed: This wasn't a Basic Auth Trans");\r
- // For Auth Security questions, we don't give any info to client on why failed\r
- resp.setStatus(HttpStatus.FORBIDDEN_403);\r
- }\r
- }\r
- },"text/plain","*/*","*");\r
-\r
- /**\r
- * Query User Has Perm\r
- */\r
- gwAPI.route(HttpMethods.GET,"/ask/:user/has/:type/:instance/:action",API.VOID,new GwCode(facade,USER_HAS_PERM, true) {\r
- @Override\r
- public void handle(final AuthzTrans trans, final HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- try {\r
- resp.getOutputStream().print(\r
- gwAPI.aafLurPerm.fish(pathParam(req,":user"), new AAFPermission(\r
- pathParam(req,":type"),\r
- pathParam(req,":instance"),\r
- pathParam(req,":action"))));\r
- resp.setStatus(HttpStatus.OK_200);\r
- } catch(Exception e) {\r
- context.error(trans, resp, Result.ERR_General, e.getMessage());\r
- }\r
- }\r
- });\r
-\r
- if(AFTUAT.equals(aftenv)) {\r
- gwAPI.route(HttpMethods.GET,"/ist/aaf/:version/:path*",API.VOID ,new GwCode(facade,"Access UAT GUI for AAF", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- try{\r
- redirect(trans, req, resp, context, \r
- new DME2Locator(gwAPI.env, gwAPI.dme2Man, AUTHZ_DME2_GUI, pathParam(req,":version"), IST, routeOffer), \r
- pathParam(req,":path"));\r
- } catch (LocatorException e) {\r
- context.error(trans, resp, Result.ERR_BadData, e.getMessage());\r
- } catch (Exception e) {\r
- context.error(trans, resp, Result.ERR_General, e.getMessage());\r
- }\r
- }\r
- });\r
-\r
- gwAPI.route(HttpMethods.GET,"/test/aaf/:version/:path*",API.VOID ,new GwCode(facade,"Access TEST GUI for AAF", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- try{\r
- redirect(trans, req, resp, context, \r
- new DME2Locator(gwAPI.env, gwAPI.dme2Man, AUTHZ_DME2_GUI, pathParam(req,":version"), TEST, routeOffer), \r
- pathParam(req,":path"));\r
- } catch (LocatorException e) {\r
- context.error(trans, resp, Result.ERR_BadData, e.getMessage());\r
- } catch (Exception e) {\r
- context.error(trans, resp, Result.ERR_General, e.getMessage());\r
- }\r
- }\r
- });\r
-\r
- gwAPI.route(HttpMethods.GET,"/perf/aaf/:version/:path*",API.VOID ,new GwCode(facade,"Access PERF GUI for AAF", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- try{\r
- redirect(trans, req, resp, context, \r
- new DME2Locator(gwAPI.env, gwAPI.dme2Man, AUTHZ_DME2_GUI, pathParam(req,":version"), PERF, routeOffer), \r
- pathParam(req,":path"));\r
- } catch (LocatorException e) {\r
- context.error(trans, resp, Result.ERR_BadData, e.getMessage());\r
- } catch (Exception e) {\r
- context.error(trans, resp, Result.ERR_General, e.getMessage());\r
- }\r
- }\r
- });\r
-\r
- gwAPI.route(HttpMethods.GET,"/dev/aaf/:version/:path*",API.VOID,new GwCode(facade,"Access DEV GUI for AAF", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- try {\r
- redirect(trans, req, resp, context, \r
- new DME2Locator(gwAPI.env, gwAPI.dme2Man, AUTHZ_DME2_GUI, pathParam(req,":version"), DEV, routeOffer), \r
- pathParam(req,":path"));\r
- } catch (LocatorException e) {\r
- context.error(trans, resp, Result.ERR_BadData, e.getMessage());\r
- } catch (Exception e) {\r
- context.error(trans, resp, Result.ERR_General, e.getMessage());\r
- }\r
- }\r
- });\r
- } else {\r
- gwAPI.route(HttpMethods.GET,"/aaf/:version/:path*",API.VOID,new GwCode(facade,"Access PROD GUI for AAF", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- try {\r
- redirect(trans, req, resp, context, \r
- new DME2Locator(gwAPI.env, gwAPI.dme2Man, AUTHZ_DME2_GUI, pathParam(req,":version"), PROD, routeOffer), \r
- pathParam(req,":path"));\r
- } catch (LocatorException e) {\r
- context.error(trans, resp, Result.ERR_BadData, e.getMessage());\r
- } catch (Exception e) {\r
- context.error(trans, resp, Result.ERR_General, e.getMessage());\r
- }\r
- }\r
- });\r
- }\r
- \r
- }\r
- \r
- public static void initDefault(final GwAPI gwAPI, GwFacade facade) throws Exception {\r
- String aftenv = gwAPI.env.getProperty(AFT_ENVIRONMENT);\r
- if(aftenv==null) throw new Exception(AFT_ENVIRONMENT + " must be set");\r
- \r
- String aftctx = gwAPI.env.getProperty(AFT_ENV_CONTEXT);\r
- if(aftctx==null) throw new Exception(AFT_ENV_CONTEXT + " must be set");\r
-\r
- /**\r
- * "login" url\r
- */\r
- gwAPI.route(HttpMethods.GET,"/login",API.VOID,new GwCode(facade,"Access " + aftctx + " GUI for AAF", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- try {\r
- redirect(trans, req, resp, context, \r
- new DME2Locator(gwAPI.env, gwAPI.dme2Man, AUTHZ_DME2_GUI, "2.0", aftctx, routeOffer), \r
- "login");\r
- } catch (LocatorException e) {\r
- context.error(trans, resp, Result.ERR_BadData, e.getMessage());\r
- } catch (Exception e) {\r
- context.error(trans, resp, Result.ERR_General, e.getMessage());\r
- }\r
- }\r
- });\r
-\r
- /**\r
- * Default URL\r
- */\r
- gwAPI.route(HttpMethods.GET,"/",API.VOID,new GwCode(facade,"Access " + aftctx + " GUI for AAF", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- try {\r
- redirect(trans, req, resp, context, \r
- new DME2Locator(gwAPI.env, gwAPI.dme2Man, AUTHZ_DME2_GUI, "2.0", aftctx, routeOffer), \r
- "gui/home");\r
- } catch (LocatorException e) {\r
- context.error(trans, resp, Result.ERR_BadData, e.getMessage());\r
- } catch (Exception e) {\r
- context.error(trans, resp, Result.ERR_General, e.getMessage());\r
- }\r
- }\r
- });\r
- }\r
-\r
- private static void redirect(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp, GwFacade context, Locator loc, String path) throws IOException {\r
- try {\r
- if(loc.hasItems()) {\r
- Item item = loc.best();\r
- URI uri = (URI) loc.get(item);\r
- StringBuilder redirectURL = new StringBuilder(uri.toString()); \r
- redirectURL.append('/');\r
- redirectURL.append(path);\r
- String str = req.getQueryString();\r
- if(str!=null) {\r
- redirectURL.append('?');\r
- redirectURL.append(str);\r
- }\r
- trans.info().log("Redirect to",redirectURL);\r
- resp.sendRedirect(redirectURL.toString());\r
- } else {\r
- context.error(trans, resp, Result.err(Result.ERR_NotFound,"%s is not valid",req.getPathInfo()));\r
- }\r
- } catch (LocatorException e) {\r
- context.error(trans, resp, Result.err(Result.ERR_NotFound,"No DME2 Endpoints found for %s",req.getPathInfo()));\r
- }\r
- }\r
-\r
- private static class User {\r
- public final int code;\r
- public final String resp;\r
- \r
- public User(int code, String resp) {\r
- this.code = code;\r
- this.resp = resp;\r
- }\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw.api;\r
-\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.gw.GwAPI;\r
-import org.onap.aaf.authz.gw.GwCode;\r
-import org.onap.aaf.authz.gw.facade.GwFacade;\r
-import org.onap.aaf.authz.gw.mapper.Mapper.API;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.cssa.rserv.HttpMethods;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-import org.onap.aaf.cadi.Symm;\r
-\r
-/**\r
- * API Apis\r
- *\r
- */\r
-public class API_Api {\r
- /**\r
- * Normal Init level APIs\r
- * \r
- * @param gwAPI\r
- * @param facade\r
- * @throws Exception\r
- */\r
- public static void init(final GwAPI gwAPI, GwFacade facade) throws Exception {\r
- ////////\r
- // Overall APIs\r
- ///////\r
- gwAPI.route(HttpMethods.GET,"/api",API.VOID,new GwCode(facade,"Document API", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getAPI(trans,resp,gwAPI);\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200);\r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
-\r
- }\r
- });\r
-\r
- ////////\r
- // Overall Examples\r
- ///////\r
- gwAPI.route(HttpMethods.GET,"/api/example/*",API.VOID,new GwCode(facade,"Document API", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- String pathInfo = req.getPathInfo();\r
- int question = pathInfo.lastIndexOf('?');\r
- \r
- pathInfo = pathInfo.substring(13, question<0?pathInfo.length():question);// IMPORTANT, this is size of "/api/example/"\r
- String nameOrContextType=Symm.base64noSplit.decode(pathInfo);\r
-// String param = req.getParameter("optional");\r
- Result<Void> r = context.getAPIExample(trans,resp,nameOrContextType,\r
- question>=0 && "optional=true".equalsIgnoreCase(req.getPathInfo().substring(question+1))\r
- );\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200);\r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
-\r
- }\r
- });\r
-\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw.api;\r
-\r
-import java.net.URI;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.gw.GwAPI;\r
-import org.onap.aaf.authz.gw.GwCode;\r
-import org.onap.aaf.authz.gw.facade.GwFacade;\r
-import org.onap.aaf.authz.gw.mapper.Mapper.API;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.cssa.rserv.HttpMethods;\r
-\r
-import org.onap.aaf.cadi.Locator;\r
-import org.onap.aaf.cadi.Locator.Item;\r
-import org.onap.aaf.cadi.LocatorException;\r
-import org.onap.aaf.cadi.dme2.DME2Locator;\r
-\r
-/**\r
- * API Apis.. using Redirect for mechanism\r
- * \r
- *\r
- */\r
-public class API_Find {\r
- /**\r
- * Normal Init level APIs\r
- * \r
- * @param gwAPI\r
- * @param facade\r
- * @throws Exception\r
- */\r
- public static void init(final GwAPI gwAPI, GwFacade facade) throws Exception {\r
- ////////\r
- // Overall APIs\r
- ///////\r
- gwAPI.route(HttpMethods.GET,"/dme2/:service/:version/:envContext/:routeOffer/:path*",API.VOID,new GwCode(facade,"Document API", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- //TODO cache this...\r
- try {\r
- Locator loc = new DME2Locator(gwAPI.env, gwAPI.dme2Man, \r
- pathParam(req,":service"),\r
- pathParam(req,":version"),\r
- pathParam(req,":envContext"),\r
- pathParam(req,":routeOffer")\r
- );\r
- if(loc.hasItems()) {\r
- Item item = loc.best();\r
- URI uri = (URI) loc.get(item);\r
- String redirectURL = uri.toString() + '/' + pathParam(req,":path");\r
- trans.warn().log("Redirect to",redirectURL);\r
- resp.sendRedirect(redirectURL);\r
- } else {\r
- context.error(trans, resp, Result.err(Result.ERR_NotFound,"%s is not valid",req.getPathInfo()));\r
- }\r
- } catch (LocatorException e) {\r
- context.error(trans, resp, Result.err(Result.ERR_NotFound,"No DME2 Endpoints found for %s",req.getPathInfo()));\r
- }\r
- }\r
- });\r
-\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw.api;\r
-\r
-import java.net.ConnectException;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.gw.GwAPI;\r
-import org.onap.aaf.authz.gw.GwCode;\r
-import org.onap.aaf.authz.gw.facade.GwFacade;\r
-import org.onap.aaf.authz.gw.mapper.Mapper.API;\r
-import org.onap.aaf.cssa.rserv.HttpMethods;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-import org.onap.aaf.cadi.CadiException;\r
-import org.onap.aaf.cadi.client.Future;\r
-import org.onap.aaf.cadi.client.Rcli;\r
-import org.onap.aaf.cadi.client.Retryable;\r
-import org.onap.aaf.cadi.config.Config;\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.TimeTaken;\r
-\r
-/**\r
- * API Apis.. using Redirect for mechanism\r
- * \r
- *\r
- */\r
-public class API_Proxy {\r
-\r
- /**\r
- * Normal Init level APIs\r
- * \r
- * @param gwAPI\r
- * @param facade\r
- * @throws Exception\r
- */\r
- public static void init(final GwAPI gwAPI, GwFacade facade) throws Exception {\r
- \r
- String aafurl = gwAPI.env.getProperty(Config.AAF_URL);\r
- if(aafurl==null) {\r
- } else {\r
-\r
- ////////\r
- // Transferring APIs\r
- ///////\r
- gwAPI.routeAll(HttpMethods.GET,"/proxy/:path*",API.VOID,new GwCode(facade,"Proxy GET", true) {\r
- @Override\r
- public void handle(final AuthzTrans trans, final HttpServletRequest req, final HttpServletResponse resp) throws Exception {\r
- TimeTaken tt = trans.start("Forward to AAF Service", Env.REMOTE);\r
- try {\r
- gwAPI.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {\r
- @Override\r
- public Void code(Rcli<?> client) throws CadiException, ConnectException, APIException {\r
- Future<Void> ft = client.transfer(req,resp,pathParam(req, ":path"),HttpStatus.OK_200);\r
- ft.get(10000); // Covers return codes and err messages\r
- return null;\r
- }\r
- });\r
- \r
- } catch (CadiException | APIException e) {\r
- trans.error().log(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- });\r
- \r
- gwAPI.routeAll(HttpMethods.POST,"/proxy/:path*",API.VOID,new GwCode(facade,"Proxy POST", true) {\r
- @Override\r
- public void handle(final AuthzTrans trans, final HttpServletRequest req, final HttpServletResponse resp) throws Exception {\r
- TimeTaken tt = trans.start("Forward to AAF Service", Env.REMOTE);\r
- try {\r
- gwAPI.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {\r
- @Override\r
- public Void code(Rcli<?> client) throws CadiException, ConnectException, APIException {\r
- Future<Void> ft = client.transfer(req,resp,pathParam(req, ":path"),HttpStatus.CREATED_201);\r
- ft.get(10000); // Covers return codes and err messages\r
- return null;\r
- }\r
- });\r
- } catch (CadiException | APIException e) {\r
- trans.error().log(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- });\r
- \r
- gwAPI.routeAll(HttpMethods.PUT,"/proxy/:path*",API.VOID,new GwCode(facade,"Proxy PUT", true) {\r
- @Override\r
- public void handle(final AuthzTrans trans, final HttpServletRequest req, final HttpServletResponse resp) throws Exception {\r
- TimeTaken tt = trans.start("Forward to AAF Service", Env.REMOTE);\r
- try {\r
- gwAPI.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {\r
- @Override\r
- public Void code(Rcli<?> client) throws CadiException, ConnectException, APIException {\r
- Future<Void> ft = client.transfer(req,resp,pathParam(req, ":path"),HttpStatus.OK_200);\r
- ft.get(10000); // Covers return codes and err messages\r
- return null;\r
- }\r
- });\r
- } catch (CadiException | APIException e) {\r
- trans.error().log(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- });\r
- \r
- gwAPI.routeAll(HttpMethods.DELETE,"/proxy/:path*",API.VOID,new GwCode(facade,"Proxy DELETE", true) {\r
- @Override\r
- public void handle(final AuthzTrans trans, final HttpServletRequest req, final HttpServletResponse resp) throws Exception {\r
- TimeTaken tt = trans.start("Forward to AAF Service", Env.REMOTE);\r
- try {\r
- gwAPI.clientAsUser(trans.getUserPrincipal(), new Retryable<Void>() {\r
- @Override\r
- public Void code(Rcli<?> client) throws CadiException, ConnectException, APIException {\r
- Future<Void> ft = client.transfer(req,resp,pathParam(req, ":path"),HttpStatus.OK_200);\r
- ft.get(10000); // Covers return codes and err messages\r
- return null;\r
- }\r
- });\r
- } catch (CadiException | APIException e) {\r
- trans.error().log(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- });\r
- }\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw.api;\r
-\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.gw.GwAPI;\r
-import org.onap.aaf.authz.gw.GwCode;\r
-import org.onap.aaf.authz.gw.facade.GwFacade;\r
-import org.onap.aaf.authz.gw.mapper.Mapper.API;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.cssa.rserv.HttpMethods;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-\r
-/**\r
- * API Apis\r
- *\r
- */\r
-public class API_TGuard {\r
- /**\r
- * Normal Init level APIs\r
- * \r
- * @param gwAPI\r
- * @param facade\r
- * @throws Exception\r
- */\r
- public static void init(final GwAPI gwAPI, GwFacade facade) throws Exception {\r
- String aftenv = gwAPI.env.getProperty(API_AAFAccess.AFT_ENVIRONMENT);\r
- if(aftenv==null) throw new Exception(API_AAFAccess.AFT_ENVIRONMENT + " must be set");\r
-\r
- ////////\r
- // Do not deploy these to PROD\r
- ///////\r
- if(API_AAFAccess.AFTUAT.equals(aftenv)) {\r
- gwAPI.route(HttpMethods.GET,"/tguard/:path*",API.VOID,new GwCode(facade,"TGuard Test", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getAPI(trans,resp,gwAPI);\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200);\r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
- }\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw.facade;\r
-\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.cssa.rserv.RServlet;\r
-\r
-\r
-/**\r
- * \r
- *\r
- */\r
-public interface GwFacade {\r
-\r
-///////////////////// STANDARD ELEMENTS //////////////////\r
- /** \r
- * @param trans\r
- * @param response\r
- * @param result\r
- */\r
- void error(AuthzTrans trans, HttpServletResponse response, Result<?> result);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param response\r
- * @param status\r
- */\r
- void error(AuthzTrans trans, HttpServletResponse response, int status, String msg, String ... detail);\r
-\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param resp\r
- * @param rservlet\r
- * @return\r
- */\r
- public Result<Void> getAPI(AuthzTrans trans, HttpServletResponse resp, RServlet<AuthzTrans> rservlet);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param resp\r
- * @param typeCode\r
- * @param optional\r
- * @return\r
- */\r
- public abstract Result<Void> getAPIExample(AuthzTrans trans, HttpServletResponse resp, String typeCode, boolean optional);\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw.facade;\r
-\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.gw.mapper.Mapper_1_0;\r
-import org.onap.aaf.authz.gw.service.GwServiceImpl;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Data;\r
-\r
-import gw.v1_0.Error;\r
-import gw.v1_0.InRequest;\r
-import gw.v1_0.Out;\r
-\r
-\r
-public class GwFacadeFactory {\r
- public static GwFacade_1_0 v1_0(AuthzEnv env, AuthzTrans trans, Data.TYPE type) throws APIException {\r
- return new GwFacade_1_0(env,\r
- new GwServiceImpl<\r
- InRequest,\r
- Out,\r
- Error>(trans,new Mapper_1_0()),\r
- type); \r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw.facade;\r
-\r
-\r
-import static org.onap.aaf.authz.layer.Result.ERR_ActionNotCompleted;\r
-import static org.onap.aaf.authz.layer.Result.ERR_BadData;\r
-import static org.onap.aaf.authz.layer.Result.ERR_ConflictAlreadyExists;\r
-import static org.onap.aaf.authz.layer.Result.ERR_Denied;\r
-import static org.onap.aaf.authz.layer.Result.ERR_NotFound;\r
-import static org.onap.aaf.authz.layer.Result.ERR_NotImplemented;\r
-import static org.onap.aaf.authz.layer.Result.ERR_Policy;\r
-import static org.onap.aaf.authz.layer.Result.ERR_Security;\r
-\r
-import java.lang.reflect.Method;\r
-\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.gw.mapper.Mapper;\r
-import org.onap.aaf.authz.gw.mapper.Mapper.API;\r
-import org.onap.aaf.authz.gw.service.GwService;\r
-import org.onap.aaf.authz.gw.service.GwServiceImpl;\r
-import org.onap.aaf.authz.layer.FacadeImpl;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.cssa.rserv.RServlet;\r
-import org.onap.aaf.cssa.rserv.RouteReport;\r
-import org.onap.aaf.cssa.rserv.doc.ApiDoc;\r
-\r
-import org.onap.aaf.cadi.aaf.client.Examples;\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Data;\r
-import org.onap.aaf.inno.env.Data.TYPE;\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.TimeTaken;\r
-import org.onap.aaf.rosetta.env.RosettaDF;\r
-\r
-import gw.v1_0.Api;\r
-\r
-/**\r
- * AuthzFacade\r
- * \r
- * This Service Facade encapsulates the essence of the API Service can do, and provides\r
- * a single created object for elements such as RosettaDF.\r
- *\r
- * The Responsibilities of this class are to:\r
- * 1) Interact with the Service Implementation (which might be supported by various kinds of Backend Storage)\r
- * 2) Validate incoming data (if applicable)\r
- * 3) Convert the Service response into the right Format, and mark the Content Type\r
- * a) In the future, we may support multiple Response Formats, aka JSON or XML, based on User Request.\r
- * 4) Log Service info, warnings and exceptions as necessary\r
- * 5) When asked by the API layer, this will create and write Error content to the OutputStream\r
- * \r
- * Note: This Class does NOT set the HTTP Status Code. That is up to the API layer, so that it can be \r
- * clearly coordinated with the API Documentation\r
- * \r
- *\r
- */\r
-public abstract class GwFacadeImpl<IN,OUT,ERROR> extends FacadeImpl implements GwFacade \r
- {\r
- private GwService<IN,OUT,ERROR> service;\r
-\r
- private final RosettaDF<ERROR> errDF;\r
- private final RosettaDF<Api> apiDF;\r
-\r
- public GwFacadeImpl(AuthzEnv env, GwService<IN,OUT,ERROR> service, Data.TYPE dataType) throws APIException {\r
- this.service = service;\r
- (errDF = env.newDataFactory(mapper().getClass(API.ERROR))).in(dataType).out(dataType);\r
- (apiDF = env.newDataFactory(Api.class)).in(dataType).out(dataType);\r
- }\r
- \r
- public Mapper<IN,OUT,ERROR> mapper() {\r
- return service.mapper();\r
- }\r
- \r
- /* (non-Javadoc)\r
- * @see com.att.authz.facade.AuthzFacade#error(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, int)\r
- * \r
- * Note: Conforms to AT&T TSS RESTful Error Structure\r
- */\r
- @Override\r
- public void error(AuthzTrans trans, HttpServletResponse response, Result<?> result) {\r
- String msg = result.details==null?"":result.details.trim();\r
- String[] detail;\r
- if(result.variables==null) {\r
- detail = new String[1];\r
- } else {\r
- int l = result.variables.length;\r
- detail=new String[l+1];\r
- System.arraycopy(result.variables, 0, detail, 1, l);\r
- }\r
- error(trans, response, result.status,msg,detail);\r
- }\r
- \r
- @Override\r
- public void error(AuthzTrans trans, HttpServletResponse response, int status, String msg, String ... _detail) {\r
- String[] detail = _detail;\r
- if(detail.length==0) {\r
- detail=new String[1];\r
- }\r
- String msgId;\r
- switch(status) {\r
- case 202:\r
- case ERR_ActionNotCompleted:\r
- msgId = "SVC1202";\r
- detail[0] = "Accepted, Action not complete";\r
- response.setStatus(/*httpstatus=*/202);\r
- break;\r
-\r
- case 403:\r
- case ERR_Policy:\r
- case ERR_Security:\r
- case ERR_Denied:\r
- msgId = "SVC1403";\r
- detail[0] = "Forbidden";\r
- response.setStatus(/*httpstatus=*/403);\r
- break;\r
- \r
- case 404:\r
- case ERR_NotFound:\r
- msgId = "SVC1404";\r
- detail[0] = "Not Found";\r
- response.setStatus(/*httpstatus=*/404);\r
- break;\r
-\r
- case 406:\r
- case ERR_BadData:\r
- msgId="SVC1406";\r
- detail[0] = "Not Acceptable";\r
- response.setStatus(/*httpstatus=*/406);\r
- break;\r
- \r
- case 409:\r
- case ERR_ConflictAlreadyExists:\r
- msgId = "SVC1409";\r
- detail[0] = "Conflict Already Exists";\r
- response.setStatus(/*httpstatus=*/409);\r
- break;\r
- \r
- case 501:\r
- case ERR_NotImplemented:\r
- msgId = "SVC1501";\r
- detail[0] = "Not Implemented"; \r
- response.setStatus(/*httpstatus=*/501);\r
- break;\r
- \r
-\r
- default:\r
- msgId = "SVC1500";\r
- detail[0] = "General Service Error";\r
- response.setStatus(/*httpstatus=*/500);\r
- break;\r
- }\r
-\r
- try {\r
- StringBuilder holder = new StringBuilder();\r
- errDF.newData(trans).load(\r
- mapper().errorFromMessage(holder,msgId,msg,detail)).to(response.getOutputStream());\r
- trans.checkpoint(\r
- "ErrResp [" + \r
- msgId +\r
- "] " +\r
- holder.toString(),\r
- Env.ALWAYS);\r
- } catch (Exception e) {\r
- trans.error().log(e,"unable to send response for",msg);\r
- }\r
- }\r
- \r
- /* (non-Javadoc)\r
- * @see com.att.authz.facade.AuthzFacade#getAPI(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse)\r
- */\r
- public final static String API_REPORT = "apiReport";\r
- @Override\r
- public Result<Void> getAPI(AuthzTrans trans, HttpServletResponse resp, RServlet<AuthzTrans> rservlet) {\r
- TimeTaken tt = trans.start(API_REPORT, Env.SUB);\r
- try {\r
- Api api = new Api();\r
- Api.Route ar;\r
- Method[] meths = GwServiceImpl.class.getDeclaredMethods();\r
- for(RouteReport rr : rservlet.routeReport()) {\r
- api.getRoute().add(ar = new Api.Route());\r
- ar.setMeth(rr.meth.name());\r
- ar.setPath(rr.path);\r
- ar.setDesc(rr.desc);\r
- ar.getContentType().addAll(rr.contextTypes);\r
- for(Method m : meths) {\r
- ApiDoc ad;\r
- if((ad = m.getAnnotation(ApiDoc.class))!=null &&\r
- rr.meth.equals(ad.method()) &&\r
- rr.path.equals(ad.path())) {\r
- for(String param : ad.params()) {\r
- ar.getParam().add(param);\r
- }\r
- for(String text : ad.text()) {\r
- ar.getComments().add(text);\r
- }\r
- ar.setExpected(ad.expectedCode());\r
- for(int ec : ad.errorCodes()) {\r
- ar.getExplicitErr().add(ec);\r
- }\r
- }\r
- }\r
- }\r
- apiDF.newData(trans).load(api).to(resp.getOutputStream());\r
- setContentType(resp,apiDF.getOutType());\r
- return Result.ok();\r
-\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,API_REPORT);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- public final static String API_EXAMPLE = "apiExample";\r
- /* (non-Javadoc)\r
- * @see com.att.authz.facade.AuthzFacade#getAPIExample(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getAPIExample(AuthzTrans trans, HttpServletResponse resp, String nameOrContentType, boolean optional) {\r
- TimeTaken tt = trans.start(API_EXAMPLE, Env.SUB);\r
- try {\r
- String content =Examples.print(apiDF.getEnv(), nameOrContentType, optional); \r
- resp.getOutputStream().print(content);\r
- setContentType(resp,content.contains("<?xml")?TYPE.XML:TYPE.JSON);\r
- return Result.ok();\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,API_EXAMPLE);\r
- return Result.err(Result.ERR_NotImplemented,e.getMessage());\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw.facade;\r
-\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.gw.service.GwService;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Data;\r
-\r
-import gw.v1_0.Error;\r
-import gw.v1_0.InRequest;\r
-import gw.v1_0.Out;\r
-\r
-public class GwFacade_1_0 extends GwFacadeImpl<InRequest,Out,Error>\r
-{\r
- public GwFacade_1_0(AuthzEnv env, GwService<InRequest,Out,Error> service, Data.TYPE type) throws APIException {\r
- super(env, service, type);\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw.mapper;\r
-\r
-public interface Mapper<IN,OUT,ERROR>\r
-{\r
- public enum API{IN_REQ,OUT,ERROR,VOID};\r
- public Class<?> getClass(API api);\r
- public<A> A newInstance(API api);\r
-\r
- public ERROR errorFromMessage(StringBuilder holder, String msgID, String text, String... detail);\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw.mapper;\r
-\r
-import org.onap.aaf.cadi.util.Vars;\r
-\r
-import gw.v1_0.Error;\r
-import gw.v1_0.InRequest;\r
-import gw.v1_0.Out;\r
-\r
-public class Mapper_1_0 implements Mapper<InRequest,Out,Error> {\r
- \r
- @Override\r
- public Class<?> getClass(API api) {\r
- switch(api) {\r
- case IN_REQ: return InRequest.class;\r
- case OUT: return Out.class;\r
- case ERROR: return Error.class;\r
- case VOID: return Void.class;\r
- }\r
- return null;\r
- }\r
-\r
- @SuppressWarnings("unchecked")\r
- @Override\r
- public <A> A newInstance(API api) {\r
- switch(api) {\r
- case IN_REQ: return (A) new InRequest();\r
- case OUT: return (A) new Out();\r
- case ERROR: return (A)new Error();\r
- case VOID: return null;\r
- }\r
- return null;\r
- }\r
-\r
- ////////////// Mapping Functions /////////////\r
- @Override\r
- public gw.v1_0.Error errorFromMessage(StringBuilder holder, String msgID, String text,String... var) {\r
- Error err = new Error();\r
- err.setMessageId(msgID);\r
- // AT&T Restful Error Format requires numbers "%" placements\r
- err.setText(Vars.convert(holder, text, var));\r
- for(String s : var) {\r
- err.getVariables().add(s);\r
- }\r
- return err;\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw.service;\r
-\r
-import org.onap.aaf.authz.gw.mapper.Mapper;\r
-\r
-public interface GwService<IN,OUT,ERROR> {\r
- public Mapper<IN,OUT,ERROR> mapper();\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw.service;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.gw.mapper.Mapper;\r
-\r
-public class GwServiceImpl<IN,OUT,ERROR> \r
- implements GwService<IN,OUT,ERROR> {\r
- \r
- private Mapper<IN,OUT,ERROR> mapper;\r
- \r
- public GwServiceImpl(AuthzTrans trans, Mapper<IN,OUT,ERROR> mapper) {\r
- this.mapper = mapper;\r
- }\r
- \r
- public Mapper<IN,OUT,ERROR> mapper() {return mapper;}\r
-\r
-//////////////// APIs ///////////////////\r
-};\r
+++ /dev/null
-<!-- Used by gw (ATT 2015) -->
-<xs:schema
- xmlns:xs="http://www.w3.org/2001/XMLSchema"
- xmlns:gw="urn:gw:v1_0"
- targetNamespace="urn:gw:v1_0"
- elementFormDefault="qualified">
-
-
-<!--
- Requests
- -->
- <xs:complexType name="Request">
- <xs:sequence>
- </xs:sequence>
- </xs:complexType>
-
-<!--
- In
--->
- <xs:element name="inRequest">
- <xs:complexType>
- <xs:complexContent>
- <xs:extension base="gw:Request">
- <xs:sequence>
- <xs:element name="name" type="xs:string"/>
- <xs:element name="action" type="xs:string"/>
- </xs:sequence>
- </xs:extension>
- </xs:complexContent>
- </xs:complexType>
- </xs:element>
-
-
-<!--
- Out
--->
- <xs:element name="out">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="name" type="xs:string"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-<!-- **************** STANDARD ELEMENTS ******************* -->
-<!--
- Errors
- Note: This Error Structure has been made to conform to the AT&T TSS Policies
- -->
- <xs:element name="error">
- <xs:complexType>
- <xs:sequence>
- <!--
- Unique message identifier of the format ‘ABCnnnn’ where ‘ABC’ is
- either ‘SVC’ for Service Exceptions or ‘POL’ for Policy Exception.
- Exception numbers may be in the range of 0001 to 9999 where :
- * 0001 to 0199 are reserved for common exception messages
- * 0200 to 0999 are reserved for Parlay Web Services specification use
- * 1000-9999 are available for exceptions
- -->
- <xs:element name="messageId" type="xs:string" minOccurs="1" maxOccurs="1"/>
-
- <!--
- Message text, with replacement
- variables marked with %n, where n is
- an index into the list of <variables>
- elements, starting at 1
- -->
- <xs:element name="text" type="xs:string" minOccurs="1" maxOccurs="1"/>
-
- <!--
- List of zero or more strings that
- represent the contents of the variables
- used by the message text. -->
- <xs:element name="variables" type="xs:string" minOccurs="0" maxOccurs="unbounded" />
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-<!--
- API
--->
- <xs:element name="api">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="route" minOccurs="0" maxOccurs="unbounded">
- <xs:complexType>
- <xs:sequence>
- <xs:element name="meth" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="path" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="param" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <xs:element name="desc" type="xs:string" minOccurs="1" maxOccurs="1"/>
- <xs:element name="comments" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <xs:element name="contentType" type="xs:string" minOccurs="0" maxOccurs="unbounded"/>
- <xs:element name="expected" type="xs:int" minOccurs="1" maxOccurs="1"/>
- <xs:element name="explicitErr" type="xs:int" minOccurs="0" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-</xs:schema>
\ No newline at end of file
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.gw;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Test;\r
-\r
-public class JU_GwAPI {\r
-\r
- @Test\r
- public void test() {\r
- fail("Not yet implemented");\r
- }\r
- \r
- @Test\r
- public void testRoute() {\r
- fail("Not yet implemented");\r
- }\r
- \r
- @Test\r
- public void testRouteAll() {\r
- fail("Not yet implemented");\r
- }\r
- \r
- @Test\r
- public void testStartDME2() {\r
- fail("Not yet implemented");\r
- }\r
-\r
-}\r
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>\r
-<!--\r
- ============LICENSE_START====================================================\r
- * org.onap.aaf\r
- * ===========================================================================\r
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * ===========================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- * \r
- * http://www.apache.org/licenses/LICENSE-2.0\r
- * \r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END====================================================\r
- *\r
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- *\r
--->\r
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\r
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">\r
- <modelVersion>4.0.0</modelVersion>\r
- <parent>\r
- <groupId>org.onap.aaf.authz</groupId>\r
- <artifactId>parent</artifactId>\r
- <version>1.0.1-SNAPSHOT</version>\r
- <relativePath>../pom.xml</relativePath>\r
- </parent>\r
- \r
- <artifactId>authz-service</artifactId>\r
- <name>Authz Service</name>\r
- <description>API for Authorization and Authentication</description>\r
- <url>https://github.com/att/AAF</url>\r
- \r
- <developers>\r
- <developer>\r
- <name>Jonathan Gathman</name>\r
- <email></email>\r
- <organization>ATT</organization>\r
- <organizationUrl></organizationUrl>\r
- </developer>\r
- </developers>\r
-\r
- <properties>\r
- <maven.build.timestamp.format>yyyy.MM.dd'T'hh.mm.ss'Z'</maven.build.timestamp.format>\r
- <maven.test.failure.ignore>true</maven.test.failure.ignore>\r
- <project.swmVersion>1</project.swmVersion>\r
- <project.innoVersion>1.0.0-SNAPSHOT</project.innoVersion>\r
- <project.cadiVersion>1.0.0-SNAPSHOT</project.cadiVersion>\r
- <dockerLocation>${basedir}/target/</dockerLocation>\r
- <distFilesRootDirPath>opt/app/aaf/${project.artifactId}/${project.version}</distFilesRootDirPath>\r
- <sonar.language>java</sonar.language>\r
- <sonar.skip>true</sonar.skip>\r
- <sonar.java.coveragePlugin>jacoco</sonar.java.coveragePlugin>\r
- <sonar.surefire.reportsPath>${project.build.directory}/surefire-reports</sonar.surefire.reportsPath>\r
- <sonar.jacoco.reportPath>${project.build.directory}/coverage-reports/jacoco.exec</sonar.jacoco.reportPath>\r
- <sonar.jacoco.itReportPath>${project.build.directory}/coverage-reports/jacoco-it.exec</sonar.jacoco.itReportPath>\r
- <sonar.jacoco.reportMissing.force.zero>true</sonar.jacoco.reportMissing.force.zero>\r
- <sonar.projectVersion>${project.version}</sonar.projectVersion>\r
- <nexusproxy>https://nexus.onap.org</nexusproxy>\r
- <docker.push.registry>localhost:5000</docker.push.registry>\r
- <snapshotNexusPath>/content/repositories/snapshots/</snapshotNexusPath>\r
- <releaseNexusPath>/content/repositories/releases/</releaseNexusPath>\r
- <stagingNexusPath>/content/repositories/staging/</stagingNexusPath>\r
- <sitePath>/content/sites/site/org/onap/aaf/authz/${project.artifactId}/${project.version}</sitePath>\r
- <skip.docker.build>true</skip.docker.build>\r
- <skip.docker.push>true</skip.docker.push>\r
- <skip.staging.artifacts>false</skip.staging.artifacts>\r
- </properties>\r
- \r
- \r
- <dependencies>\r
- <dependency>\r
- <groupId>org.onap.aaf.authz</groupId>\r
- <artifactId>authz-client</artifactId>\r
- <version>${project.version}</version>\r
- </dependency>\r
- \r
- <dependency>\r
- <groupId>org.onap.aaf.authz</groupId>\r
- <artifactId>authz-cmd</artifactId>\r
- <version>${project.version}</version>\r
- </dependency> \r
- <dependency>\r
- <groupId>org.onap.aaf.authz</groupId>\r
- <artifactId>authz-core</artifactId>\r
- <version>${project.version}</version>\r
- <exclusions>\r
- <exclusion> \r
- <groupId>javax.servlet</groupId>\r
- <artifactId>servlet-api</artifactId>\r
- </exclusion>\r
- </exclusions> \r
- </dependency>\r
- \r
- <dependency>\r
- <groupId>org.onap.aaf.authz</groupId>\r
- <artifactId>authz-cass</artifactId>\r
- <version>${project.version}</version>\r
- </dependency>\r
-\r
- <dependency>\r
- <groupId>org.onap.aaf.authz</groupId>\r
- <artifactId>authz-defOrg</artifactId>\r
- <version>${project.version}</version>\r
- </dependency>\r
-\r
-\r
- \r
- <dependency > \r
- <groupId>org.onap.aaf.inno</groupId>\r
- <artifactId>env</artifactId>\r
- <version>${project.innoVersion}</version>\r
- </dependency>\r
-\r
-\r
- <dependency>\r
- <groupId>org.onap.aaf.cadi</groupId>\r
- <artifactId>cadi-core</artifactId>\r
- <version>${project.cadiVersion}</version>\r
- </dependency>\r
-\r
- <dependency>\r
- <groupId>com.att.aft</groupId>\r
- <artifactId>dme2</artifactId>\r
- </dependency>\r
-\r
- <dependency>\r
- <groupId>org.onap.aaf.inno</groupId>\r
- <artifactId>rosetta</artifactId>\r
- <version>${project.innoVersion}</version>\r
- </dependency>\r
- <dependency>\r
- <groupId>org.onap.aaf.cadi</groupId>\r
- <artifactId>cadi-aaf</artifactId>\r
- <version>${project.cadiVersion}</version>\r
- </dependency> \r
- </dependencies>\r
-\r
-\r
- <build>\r
- <finalName>authz-service</finalName>\r
- <plugins>\r
- \r
- \r
-<plugin>\r
- <groupId>com.spotify</groupId>\r
- <artifactId>docker-maven-plugin</artifactId>\r
- <version>1.0.0</version>\r
- <configuration>\r
- <imageName>onap/aaf/authz-service</imageName>\r
- <!-- <dockerDirectory>${dockerLocation}</dockerDirectory> -->\r
- <dockerDirectory>${basedir}/src/main/resources/docker</dockerDirectory>\r
- <imageTags>\r
- <imageTag>latest</imageTag>\r
- <imageTag>${project.docker.latesttagtimestamp.version}</imageTag>\r
- <imageTag>${project.docker.latesttag.version}</imageTag>\r
- </imageTags>\r
- <forceTags>true</forceTags>\r
- <!-- <resources>\r
- <resource>\r
- <targetPath>/</targetPath>\r
- <directory>${project.build.directory}/opt</directory>\r
- <filtering>true</filtering>\r
- <includes>\r
- <include>**/**</include>\r
- </includes>\r
- </resource>\r
- </resources> --> \r
- <resources>\r
- <resource>\r
- <targetPath>/</targetPath>\r
- <directory>${project.build.directory}/opt</directory>\r
- <include>${project.build.finalName}.jar</include>\r
- </resource>\r
- <resource>\r
- <targetPath>/</targetPath>\r
- <directory>${project.build.directory}</directory>\r
- <include>**/**</include>\r
- </resource>\r
- </resources>\r
- </configuration>\r
- <executions>\r
- <execution>\r
- <id>build-image</id>\r
- <phase>package</phase>\r
- <goals>\r
- <goal>build</goal>\r
- </goals>\r
- <configuration>\r
- <skipDockerBuild>${skip.docker.build}</skipDockerBuild>\r
- </configuration>\r
- </execution> \r
-\r
- <execution>\r
- <id>tag-image-project-version</id>\r
- <phase>package</phase>\r
- <goals>\r
- <goal>tag</goal>\r
- </goals>\r
- <configuration>\r
- \r
- <newName>${docker.push.registry}/onap/aaf/authz-service:${project.version}</newName>\r
- <skipDockerTag>${skip.docker.push}</skipDockerTag>\r
- </configuration>\r
- </execution>\r
- \r
- <execution>\r
- <id>tag-image-latest</id>\r
- <phase>package</phase>\r
- <goals>\r
- <goal>tag</goal>\r
- </goals>\r
- <configuration>\r
- \r
- <newName>${docker.push.registry}/onap/aaf/authz-service:latest</newName>\r
- <skipDockerTag>${skip.docker.push}</skipDockerTag>\r
- </configuration>\r
- </execution>\r
- \r
- <execution>\r
- <id>push-image-latest</id>\r
- <phase>deploy</phase>\r
- <goals>\r
- <goal>push</goal>\r
- </goals>\r
- <configuration>\r
- <imageName>${docker.push.registry}/onap/aaf/authz-service:${project.version}</imageName>\r
- <skipDockerPush>${skip.docker.push}</skipDockerPush>\r
- </configuration>\r
- </execution>\r
- \r
- <execution>\r
- <id>push-image</id>\r
- <phase>deploy</phase>\r
- <goals>\r
- <goal>push</goal>\r
- </goals>\r
- <configuration>\r
- <imageName>${docker.push.registry}/onap/aaf/authz-service:latest</imageName>\r
- <skipDockerPush>${skip.docker.push}</skipDockerPush>\r
- </configuration>\r
- </execution>\r
- </executions>\r
- </plugin>\r
-\r
- <plugin>\r
- <artifactId>maven-resources-plugin</artifactId>\r
- <version>2.7</version>\r
- <executions>\r
- <execution>\r
- <id>copy-docker-file</id>\r
- <phase>package</phase>\r
- <goals>\r
- <goal>copy-resources</goal>\r
- </goals>\r
- <configuration>\r
- <outputDirectory>${dockerLocation}</outputDirectory>\r
- <overwrite>true</overwrite>\r
- <resources>\r
- <resource>\r
- <directory>${basedir}/src/main/resources/docker</directory>\r
- <filtering>true</filtering>\r
- <includes>\r
- <include>**/*</include>\r
- </includes>\r
- </resource>\r
- </resources>\r
- </configuration>\r
- </execution> \r
- <execution>\r
- <id>copy-resources-1</id>\r
- <phase>validate</phase>\r
- <goals>\r
- <goal>copy-resources</goal>\r
- </goals>\r
- <configuration>\r
- <outputDirectory>${project.build.directory}/opt/dme2reg/</outputDirectory>\r
- <resources>\r
- <resource>\r
- <directory>${project.basedir}/src/main/resources/dme2reg/</directory> \r
- <includes>\r
- <include>**/*.txt</include>\r
- </includes>\r
- </resource>\r
- </resources>\r
- </configuration>\r
- </execution>\r
- <execution>\r
- <id>copy-resources-2</id>\r
- <phase>validate</phase>\r
- <goals>\r
- <goal>copy-resources</goal>\r
- </goals>\r
- <configuration>\r
- <outputDirectory>${project.build.directory}/opt/app/aaf/authz-service/etc</outputDirectory>\r
- <resources>\r
- <resource>\r
- <directory>${project.basedir}/src/main/resources/etc</directory>\r
- <includes>\r
- <include>**/**</include>\r
- </includes>\r
- </resource>\r
- </resources>\r
- </configuration>\r
- </execution>\r
- \r
- <execution>\r
- <id>copy-resources-3</id>\r
- <phase>validate</phase>\r
- <goals>\r
- <goal>copy-resources</goal>\r
- </goals>\r
- <configuration>\r
- <outputDirectory>${project.build.directory}/opt/app/aaf/authz-service/lib</outputDirectory>\r
- <resources>\r
- <resource>\r
- <directory>${project.basedir}/../authz-cmd/target</directory>\r
- <includes>\r
- <include>**/*.jar</include>\r
- </includes>\r
- </resource>\r
- </resources>\r
- </configuration>\r
- </execution>\r
- <execution>\r
- <id>copy-resources-4</id>\r
- <phase>validate</phase>\r
- <goals>\r
- <goal>copy-resources</goal>\r
- </goals>\r
- <configuration>\r
- <outputDirectory>${project.build.directory}/opt/app/aaf/authz-service/</outputDirectory>\r
- <resources>\r
- <resource>\r
- <directory>${project.basedir}/../authz-cmd</directory>\r
- <includes>\r
- <include>**/aafcli.sh</include>\r
- </includes>\r
- </resource>\r
- </resources>\r
- </configuration>\r
- </execution>\r
- <execution>\r
- <id>copy-resources-5</id>\r
- <phase>validate</phase>\r
- <goals>\r
- <goal>copy-resources</goal>\r
- </goals>\r
- <configuration>\r
- <outputDirectory>${project.build.directory}/opt/app/aaf/authz-service/etc/</outputDirectory>\r
- <resources>\r
- <resource>\r
- <directory>${project.basedir}/src/main/config</directory>\r
- <includes>\r
- <include>**/**</include>\r
- </includes>\r
- </resource>\r
- </resources>\r
- </configuration>\r
- </execution>\r
- <execution>\r
- <id>copy-resources-6</id>\r
- <phase>validate</phase>\r
- <goals>\r
- <goal>copy-resources</goal>\r
- </goals>\r
- <configuration>\r
- <outputDirectory>${project.build.directory}/opt/app/aaf/authz-service/etc/data</outputDirectory>\r
- <resources>\r
- <resource>\r
- <directory>${project.basedir}/../opt/app/aaf/data</directory>\r
- <includes>\r
- <include>**/**</include>\r
- </includes>\r
- </resource>\r
- </resources>\r
- </configuration>\r
- </execution>\r
- </executions>\r
- </plugin>\r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-dependency-plugin</artifactId>\r
- <version>2.10</version>\r
- <executions>\r
- <execution>\r
- <id>copy-dependencies</id>\r
- <phase>package</phase>\r
- <goals>\r
- <goal>copy-dependencies</goal>\r
- </goals>\r
- <configuration>\r
- <outputDirectory>${project.build.directory}/opt/app/aaf/authz-service/lib</outputDirectory>\r
- <overWriteReleases>false</overWriteReleases>\r
- <overWriteSnapshots>false</overWriteSnapshots>\r
- <overWriteIfNewer>true</overWriteIfNewer>\r
- </configuration>\r
- </execution>\r
- </executions>\r
- </plugin>\r
- \r
- <plugin>\r
- <groupId>org.codehaus.mojo</groupId>\r
- <artifactId>exec-maven-plugin</artifactId>\r
- <version>1.5.0</version>\r
- <configuration>\r
- <executable>java</executable>\r
- <arguments>\r
- <argument>-DAFT_LATITUDE=33</argument>\r
- <argument>-DAFT_LONGITUDE=-84</argument>\r
- <argument>-DAFT_ENVIRONMENT=AFTUAT</argument>\r
- \r
- <argument>-XX:NewRatio=3</argument>\r
- <argument>-XX:+PrintGCTimeStamps</argument>\r
- <argument>-XX:+PrintGCDetails</argument>\r
- <argument>-Xloggc:gc.log</argument>\r
- <argument>-classpath</argument>\r
- \r
- <classpath>\r
- \r
- </classpath>\r
- <argument>org.onap.aaf.authz.service.AuthAPI</argument>\r
- \r
- <argument>service=org.onap.aaf.authz.AuthorizationService/version=2.0/envContext=DEV/routeOffer=Dev</argument>\r
- </arguments>\r
- </configuration>\r
- </plugin>\r
- \r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-jar-plugin</artifactId>\r
- <configuration>\r
- <excludes>\r
- <exclude>*.properties</exclude>\r
- </excludes>\r
- </configuration>\r
- <version>2.3.1</version>\r
- </plugin>\r
- \r
-\r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-javadoc-plugin</artifactId>\r
- <version>2.10.4</version>\r
- <configuration>\r
- <failOnError>false</failOnError>\r
- </configuration>\r
- <executions>\r
- <execution>\r
- <id>attach-javadocs</id>\r
- <goals>\r
- <goal>jar</goal>\r
- </goals>\r
- </execution>\r
- </executions>\r
- </plugin> \r
- \r
- \r
- <plugin>\r
- <groupId>org.apache.maven.plugins</groupId>\r
- <artifactId>maven-source-plugin</artifactId>\r
- <version>2.2.1</version>\r
- <executions>\r
- <execution>\r
- <id>attach-sources</id>\r
- <goals>\r
- <goal>jar-no-fork</goal>\r
- </goals>\r
- </execution>\r
- </executions>\r
- </plugin>\r
- \r
-<plugin>\r
- <groupId>org.sonatype.plugins</groupId>\r
- <artifactId>nexus-staging-maven-plugin</artifactId>\r
- <version>1.6.7</version>\r
- <extensions>true</extensions>\r
- <configuration>\r
- <nexusUrl>${nexusproxy}</nexusUrl>\r
- <stagingProfileId>176c31dfe190a</stagingProfileId>\r
- <serverId>ecomp-staging</serverId>\r
- </configuration>\r
- </plugin> \r
- <plugin>\r
- <groupId>org.jacoco</groupId>\r
- <artifactId>jacoco-maven-plugin</artifactId>\r
- <version>0.7.7.201606060606</version>\r
- <configuration>\r
- <dumpOnExit>true</dumpOnExit>\r
- <includes>\r
- <include>org.onap.aaf.*</include>\r
- </includes>\r
- </configuration>\r
- <executions>\r
- <execution>\r
- <id>pre-unit-test</id>\r
- <goals>\r
- <goal>prepare-agent</goal>\r
- </goals>\r
- <configuration>\r
- <destFile>${project.build.directory}/coverage-reports/jacoco.exec</destFile>\r
- <!-- <append>true</append> -->\r
- </configuration>\r
- </execution>\r
- <execution>\r
- <id>pre-integration-test</id>\r
- <phase>pre-integration-test</phase>\r
- <goals>\r
- <goal>prepare-agent</goal>\r
- </goals>\r
- <configuration>\r
- <destFile>${project.build.directory}/coverage-reports/jacoco-it.exec</destFile>\r
- <!-- <append>true</append> -->\r
- </configuration>\r
- </execution>\r
- <execution>\r
- <goals>\r
- <goal>merge</goal>\r
- </goals>\r
- <phase>post-integration-test</phase>\r
- <configuration>\r
- <fileSets>\r
- <fileSet implementation="org.apache.maven.shared.model.fileset.FileSet">\r
- <directory>${project.build.directory}/coverage-reports</directory>\r
- <includes>\r
- <include>*.exec</include>\r
- </includes>\r
- </fileSet>\r
- </fileSets>\r
- <destFile>${project.build.directory}/jacoco-dev.exec</destFile>\r
- </configuration>\r
- </execution>\r
- </executions>\r
- </plugin> \r
-\r
- \r
- </plugins>\r
-\r
- </build>\r
-\r
-\r
- <distributionManagement>\r
- <repository>\r
- <id>ecomp-releases</id>\r
- <name>AAF Release Repository</name>\r
- <url>${nexusproxy}${releaseNexusPath}</url>\r
- </repository>\r
- <snapshotRepository>\r
- <id>ecomp-snapshots</id>\r
- <name>AAF Snapshot Repository</name>\r
- <url>${nexusproxy}${snapshotNexusPath}</url>\r
- </snapshotRepository>\r
- <site>\r
- <id>ecomp-site</id>\r
- <url>dav:${nexusproxy}${sitePath}</url>\r
- </site>\r
- </distributionManagement>\r
- <profiles>\r
- <profile>\r
- <id>docker</id>\r
- <properties>\r
- <skip.staging.artifacts>true</skip.staging.artifacts>\r
- <skip.docker.build>false</skip.docker.build>\r
- <skip.docker.tag>false</skip.docker.tag>\r
- <skip.docker.push>false</skip.docker.push>\r
- </properties>\r
- </profile>\r
- </profiles>\r
-</project>\r
+++ /dev/null
-<!--\r
- ============LICENSE_START====================================================\r
- * org.onap.aaf\r
- * ===========================================================================\r
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * ===========================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- * \r
- * http://www.apache.org/licenses/LICENSE-2.0\r
- * \r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END====================================================\r
- *\r
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- *\r
--->\r
-<assembly>\r
- <id>swm</id>\r
- <formats>\r
- <format>zip</format>\r
- </formats>\r
- \r
- <baseDirectory>${artifactId}</baseDirectory>\r
- <fileSets>\r
- <fileSet>\r
- <directory>target/swm</directory>\r
- </fileSet>\r
- </fileSets>\r
-</assembly>\r
+++ /dev/null
-#-------------------------------------------------------------------------------\r
-# ============LICENSE_START====================================================\r
-# * org.onap.aaf\r
-# * ===========================================================================\r
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
-# * ===========================================================================\r
-# * Licensed under the Apache License, Version 2.0 (the "License");\r
-# * you may not use this file except in compliance with the License.\r
-# * You may obtain a copy of the License at\r
-# * \r
-# * http://www.apache.org/licenses/LICENSE-2.0\r
-# * \r
-# * Unless required by applicable law or agreed to in writing, software\r
-# * distributed under the License is distributed on an "AS IS" BASIS,\r
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
-# * See the License for the specific language governing permissions and\r
-# * limitations under the License.\r
-# * ============LICENSE_END====================================================\r
-# *\r
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
-# *\r
-#-------------------------------------------------------------------------------\r
-###############################################################################\r
-# Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.\r
-###############################################################################\r
-#\r
-# Licensed to the Apache Software Foundation (ASF) under one\r
-# or more contributor license agreements. See the NOTICE file\r
-# distributed with this work for additional information\r
-# regarding copyright ownership. The ASF licenses this file\r
-# to you under the Apache License, Version 2.0 (the\r
-# "License"); you may not use this file except in compliance\r
-# with the License. You may obtain a copy of the License at\r
-#\r
-# http://www.apache.org/licenses/LICENSE-2.0\r
-#\r
-# Unless required by applicable law or agreed to in writing,\r
-# software distributed under the License is distributed on an\r
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\r
-# KIND, either express or implied. See the License for the\r
-# specific language governing permissions and limitations\r
-# under the License.\r
-#\r
-log4j.appender.INIT=org.apache.log4j.DailyRollingFileAppender \r
-log4j.appender.INIT.File=_LOG_DIR_/${LOG4J_FILENAME_init}\r
-log4j.appender.INIT.DatePattern='.'yyyy-MM-dd\r
-#log4j.appender.INIT.MaxFileSize=_MAX_LOG_FILE_SIZE_\r
-#log4j.appender.INIT.MaxBackupIndex=_MAX_LOG_FILE_BACKUP_COUNT_\r
-log4j.appender.INIT.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.INIT.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSSZ} %m %n\r
-\r
-\r
-log4j.appender.SRVR=org.apache.log4j.DailyRollingFileAppender \r
-log4j.appender.SRVR.File=logs/${LOG4J_FILENAME_authz}\r
-log4j.appender.SRVR.DatePattern='.'yyyy-MM-dd\r
-#log4j.appender.SRVR.MaxFileSize=_MAX_LOG_FILE_SIZE_\r
-#log4j.appender.SRVR.MaxBackupIndex=_MAX_LOG_FILE_BACKUP_COUNT_\r
-log4j.appender.SRVR.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.SRVR.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSSZ} %p [%c] %m %n\r
-\r
-log4j.appender.AUDIT=org.apache.log4j.DailyRollingFileAppender\r
-log4j.appender.AUDIT.File=_LOG_DIR_/${LOG4J_FILENAME_audit}\r
-log4j.appender.AUDIT.DatePattern='.'yyyy-MM-dd\r
-#log4j.appender.AUDIT.MaxFileSize=_MAX_LOG_FILE_SIZE_\r
-#log4j.appender.AUDIT.MaxBackupIndex=_MAX_LOG_FILE_BACKUP_COUNT_\r
-log4j.appender.AUDIT.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.AUDIT.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSSZ} %m %n\r
-\r
-log4j.appender.TRACE=org.apache.log4j.DailyRollingFileAppender\r
-log4j.appender.TRACE.File=logs/${LOG4J_FILENAME_trace}\r
-log4j.appender.TRACE.DatePattern='.'yyyy-MM-dd\r
-#log4j.appender.TRACE.MaxFileSize=_MAX_LOG_FILE_SIZE_\r
-#log4j.appender.TRACE.MaxBackupIndex=_MAX_LOG_FILE_BACKUP_COUNT_\r
-log4j.appender.TRACE.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.TRACE.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSSZ} %m %n\r
-\r
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender\r
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\r
-log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] %m %n\r
-\r
-# General Apache libraries\r
-log4j.rootLogger=WARN\r
-log4j.logger.org.apache=WARN,INIT\r
-log4j.logger.dme2=WARN,INIT\r
-log4j.logger.init=INFO,INIT\r
-log4j.logger.authz=_LOG4J_LEVEL_,SRVR\r
-log4j.logger.audit=INFO,AUDIT\r
-log4j.logger.trace=TRACE,TRACE\r
-\r
-\r
-log4j.appender.SVR=org.apache.log4j.RollingFileAppender \r
-log4j.appender.SVR.File=${user.home}/.aaf/authz-cmd.log\r
-log4j.appender.SVR.MaxFileSize=10000KB\r
-log4j.appender.SVR.MaxBackupIndex=1\r
-log4j.appender.SVR.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.SVR.layout.ConversionPattern=%d %p [%c] %m %n\r
-\r
-# General Apache libraries\r
-log4j.rootLogger=WARN,SVR\r
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\r
-<!--\r
- ============LICENSE_START====================================================\r
- * org.onap.aaf\r
- * ===========================================================================\r
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * ===========================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- * \r
- * http://www.apache.org/licenses/LICENSE-2.0\r
- * \r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END====================================================\r
- *\r
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- *\r
--->\r
-<ns2:ManagedResourceList xmlns:ns2="http://scld.att.com/lrm/util" xmlns="http://scld.att.com/lrm/commontypes" xmlns:ns3="http://scld.att.com/lrm/types">\r
- <ns2:ManagedResource>\r
- <ResourceDescriptor>\r
- <ResourceName>com.att.authz._ARTIFACT_ID_</ResourceName>\r
- <ResourceVersion>\r
- <Major>_MAJOR_VER_</Major>\r
- <Minor>_MINOR_VER_</Minor>\r
- <Patch>_PATCH_VER_</Patch> \r
- </ResourceVersion>\r
- <RouteOffer>_ROUTE_OFFER_</RouteOffer>\r
- </ResourceDescriptor>\r
- <ResourceType>Java</ResourceType>\r
- <ResourcePath>com.att.authz.service.AuthzAPI</ResourcePath>\r
- <ResourceProps>\r
- <Tag>process.workdir</Tag>\r
- <Value>_ROOT_DIR_</Value>\r
- </ResourceProps> \r
- <ResourceProps>\r
- <Tag>jvm.version</Tag>\r
- <Value>1.8</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>jvm.args</Tag>\r
- <Value>-DAFT_LATITUDE=_AFT_LATITUDE_ -DAFT_LONGITUDE=_AFT_LONGITUDE_ -DAFT_ENVIRONMENT=_AFT_ENVIRONMENT_ -Dplatform=_SCLD_PLATFORM_ -Dcom.sun.jndi.ldap.connect.pool.maxsize=20 -Dcom.sun.jndi.ldap.connect.pool.prefsize=10 -Dcom.sun.jndi.ldap.connect.pool.timeout=3000 </Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>jvm.classpath</Tag>\r
- <Value>_ROOT_DIR_/etc:_ROOT_DIR_/lib/*:</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>jvm.heap.min</Tag>\r
- <Value>1024m</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>jvm.heap.max</Tag>\r
- <Value>2048m</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>start.class</Tag>\r
- <Value>com.att.authz.service.AuthAPI</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>stdout.redirect</Tag>\r
- <Value>_ROOT_DIR_/logs/SystemOut.log</Value>\r
- </ResourceProps>\r
- <ResourceProps>\r
- <Tag>stderr.redirect</Tag>\r
- <Value>_ROOT_DIR_/logs/SystemErr.log</Value>\r
- </ResourceProps>\r
- <ResourceOSID>aft</ResourceOSID>\r
- <ResourceStartType>AUTO</ResourceStartType>\r
- <ResourceStartPriority>2</ResourceStartPriority>\r
- <ResourceMinCount>_RESOURCE_MIN_COUNT_</ResourceMinCount>\r
- <ResourceMaxCount>_RESOURCE_MAX_COUNT_</ResourceMaxCount> \r
- <ResourceRegistration>_RESOURCE_REGISTRATION_</ResourceRegistration>\r
- <ResourceSWMComponent>com.att.authz:_ARTIFACT_ID_</ResourceSWMComponent>\r
- <ResourceSWMComponentVersion>_ARTIFACT_VERSION_</ResourceSWMComponentVersion>\r
- </ns2:ManagedResource>\r
-</ns2:ManagedResourceList>\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.cadi;\r
-\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-\r
-import java.security.Principal;\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO.Data;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-\r
-import org.onap.aaf.cadi.Lur;\r
-import org.onap.aaf.cadi.Permission;\r
-\r
-public class DirectAAFLur implements Lur {\r
- private final AuthzEnv env;\r
- private final Question question;\r
- \r
- public DirectAAFLur(AuthzEnv env, Question question) {\r
- this.env = env;\r
- this.question = question;\r
- }\r
-\r
- @Override\r
- public boolean fish(Principal bait, Permission pond) {\r
- return fish(env.newTransNoAvg(),bait,pond);\r
- }\r
- \r
- public boolean fish(AuthzTrans trans, Principal bait, Permission pond) {\r
- Result<List<Data>> pdr = question.getPermsByUser(trans, bait.getName(),false);\r
- switch(pdr.status) {\r
- case OK:\r
- for(PermDAO.Data d : pdr.value) {\r
- if(new PermPermission(d).match(pond)) return true;\r
- }\r
- break;\r
- default:\r
- trans.error().log("Can't access Cassandra to fulfill Permission Query: ",pdr.status,"-",pdr.details);\r
- }\r
- return false;\r
- }\r
-\r
- @Override\r
- public void fishAll(Principal bait, List<Permission> permissions) {\r
- Result<List<Data>> pdr = question.getPermsByUser(env.newTrans(), bait.getName(),false);\r
- switch(pdr.status) {\r
- case OK:\r
- for(PermDAO.Data d : pdr.value) {\r
- permissions.add(new PermPermission(d));\r
- }\r
- break;\r
- default:\r
- env.error().log("Can't access Cassandra to fulfill Permission Query: ",pdr.status,"-", pdr.details);\r
- }\r
- }\r
- \r
- @Override\r
- public void destroy() {\r
- }\r
-\r
- @Override\r
- public boolean handlesExclusively(Permission pond) {\r
- return false;\r
- }\r
- \r
- /**\r
- * Small Class implementing CADI's Permission with Cassandra Data\r
- *\r
- */\r
- public static class PermPermission implements Permission {\r
- private PermDAO.Data data;\r
- \r
- public PermPermission(PermDAO.Data d) {\r
- data = d;\r
- }\r
- \r
- public PermPermission(AuthzTrans trans, Question q, String p) {\r
- data = PermDAO.Data.create(trans, q, p);\r
- }\r
- \r
- public PermPermission(String ns, String type, String instance, String action) {\r
- data = new PermDAO.Data();\r
- data.ns = ns;\r
- data.type = type;\r
- data.instance = instance;\r
- data.action = action;\r
- }\r
-\r
- @Override\r
- public String getKey() {\r
- return data.type;\r
- }\r
-\r
- @Override\r
- public boolean match(Permission p) {\r
- if(p==null)return false;\r
- PermDAO.Data pd;\r
- if(p instanceof DirectAAFLur.PermPermission) {\r
- pd = ((DirectAAFLur.PermPermission)p).data;\r
- if(data.ns.equals(pd.ns))\r
- if(data.type.equals(pd.type))\r
- if(data.instance!=null && (data.instance.equals(pd.instance) || "*".equals(data.instance)))\r
- if(data.action!=null && (data.action.equals(pd.action) || "*".equals(data.action)))\r
- return true;\r
- } else{\r
- String[] lp = p.getKey().split("\\|");\r
- if(lp.length<3)return false;\r
- if(data.fullType().equals(lp[0]))\r
- if(data.instance!=null && (data.instance.equals(lp[1]) || "*".equals(data.instance)))\r
- if(data.action!=null && (data.action.equals(lp[2]) || "*".equals(data.action)))\r
- return true;\r
- }\r
- return false;\r
- }\r
-\r
- @Override\r
- public String permType() {\r
- return "AAFLUR";\r
- }\r
- \r
- }\r
- \r
- public String toString() {\r
- return "DirectAAFLur is enabled";\r
- \r
- }\r
-\r
- @Override\r
- public boolean supports(String userName) {\r
- //TODO\r
- return true;\r
- }\r
-\r
- @Override\r
- public Permission createPerm(String p) {\r
- // TODO Auto-generated method stub\r
- return null;\r
- }\r
-\r
- @Override\r
- public void clear(Principal p, StringBuilder report) {\r
- // TODO Auto-generated method stub\r
- \r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.cadi;\r
-\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-\r
-import java.util.Date;\r
-\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.DAOException;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-\r
-import org.onap.aaf.cadi.CredVal;\r
-\r
-/**\r
- * DirectAAFUserPass is intended to provide password Validation directly from Cassandra Database, and is only\r
- * intended for use in AAF itself. The normal "AAF Taf" objects are, of course, clients.\r
- * \r
- *\r
- */\r
-public class DirectAAFUserPass implements CredVal {\r
- private final AuthzEnv env;\r
- private final Question question;\r
- \r
- public DirectAAFUserPass(AuthzEnv env, Question question, String appPass) {\r
- this.env = env;\r
- this.question = question;\r
- }\r
- \r
- @Override\r
- public boolean validate(String user, Type type, byte[] pass) {\r
- try {\r
- AuthzTrans trans = env.newTransNoAvg();\r
- Result<Date> result = question.doesUserCredMatch(trans, user, pass);\r
- trans.logAuditTrail(env.info());\r
- switch(result.status) {\r
- case OK:\r
- return true;\r
- default:\r
- \r
- env.warn().log(user, "failed Password Validation:",result.errorString());\r
- }\r
- } catch (DAOException e) {\r
- System.out.println(" exception in DirectAAFUserPass class ");\r
- e.printStackTrace();\r
- env.error().log(e,"Cannot validate User/Pass from Cassandra");\r
- }\r
- return false;\r
- }\r
-\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.cadi;\r
-\r
-import java.nio.ByteBuffer;\r
-import java.security.Principal;\r
-import java.security.cert.CertificateException;\r
-import java.security.cert.X509Certificate;\r
-import java.util.List;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.cssa.rserv.TransFilter;\r
-import org.onap.aaf.dao.aaf.cached.CachedCertDAO;\r
-import org.onap.aaf.dao.aaf.cass.CertDAO.Data;\r
-\r
-import org.onap.aaf.cadi.principal.X509Principal;\r
-import org.onap.aaf.cadi.taf.cert.CertIdentity;\r
-import org.onap.aaf.cadi.taf.cert.X509Taf;\r
-\r
-/**\r
- * Direct view of CertIdentities\r
- * \r
- * Warning: this class is difficult to instantiate. The only service that can use it is AAF itself, and is thus \r
- * entered in the "init" after the CachedCertDAO is created.\r
- * \r
- *\r
- */\r
-public class DirectCertIdentity implements CertIdentity {\r
- private static CachedCertDAO certDAO;\r
-\r
- @Override\r
- public Principal identity(HttpServletRequest req, X509Certificate cert, byte[] _certBytes) throws CertificateException {\r
- byte[] certBytes = _certBytes;\r
- if(cert==null && certBytes==null) {\r
- return null;\r
- }\r
- if(certBytes==null) {\r
- certBytes = cert.getEncoded();\r
- }\r
- byte[] fingerprint = X509Taf.getFingerPrint(certBytes);\r
-\r
- AuthzTrans trans = (AuthzTrans) req.getAttribute(TransFilter.TRANS_TAG);\r
- \r
- Result<List<Data>> cresp = certDAO.read(trans, ByteBuffer.wrap(fingerprint));\r
- if(cresp.isOKhasData()) {\r
- Data cdata = cresp.value.get(0);\r
- return new X509Principal(cdata.id,cert,certBytes);\r
- }\r
- return null;\r
- }\r
-\r
- public static void set(CachedCertDAO ccd) {\r
- certDAO = ccd;\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.facade;\r
-\r
-import java.util.Date;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.cssa.rserv.RServlet;\r
-import org.onap.aaf.dao.aaf.cass.NsType;\r
-\r
-/**\r
- * AuthzFacade\r
- * This layer is responsible for covering the Incoming Messages, be they XML, JSON or just entries on the URL,\r
- * and converting them to data that can be called on the Service Layer.\r
- * \r
- * Upon response, this layer, because it knew the incoming Data Formats (i.e. XML/JSON), the HTTP call types\r
- * are set on "ContentType" on Response.\r
- * \r
- * Finally, we wrap the call in Time Stamps with explanation of what is happing for Audit trails.\r
- * \r
- *\r
- */\r
-public interface AuthzFacade {\r
- public static final int PERM_DEPEND_424 = -1000;\r
- public static final int ROLE_DEPEND_424 = -1001;\r
-\r
- /*\r
- * Namespaces\r
- */\r
- public abstract Result<Void> requestNS(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp, NsType type);\r
- \r
- public abstract Result<Void> getNSsByName(AuthzTrans trans, HttpServletResponse resp, String ns);\r
- \r
- public abstract Result<Void> getNSsByAdmin(AuthzTrans trans, HttpServletResponse resp, String user, boolean full);\r
- \r
- public abstract Result<Void> getNSsByResponsible(AuthzTrans trans, HttpServletResponse resp, String user, boolean full);\r
- \r
- public abstract Result<Void> getNSsByEither(AuthzTrans trans, HttpServletResponse resp, String user, boolean full);\r
-\r
- public abstract Result<Void> getNSsChildren(AuthzTrans trans, HttpServletResponse resp, String pathParam);\r
-\r
- public abstract Result<Void> addAdminToNS(AuthzTrans trans, HttpServletResponse resp, String ns, String id);\r
-\r
- public abstract Result<Void> delAdminFromNS(AuthzTrans trans, HttpServletResponse resp, String ns, String id);\r
-\r
- public abstract Result<Void> addResponsibilityForNS(AuthzTrans trans, HttpServletResponse resp, String ns, String id);\r
-\r
- public abstract Result<Void> delResponsibilityForNS(AuthzTrans trans, HttpServletResponse resp, String ns, String id);\r
- \r
- public abstract Result<Void> updateNsDescription(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp);\r
-\r
- public abstract Result<Void> deleteNS(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp, String ns);\r
-\r
- // NS Attribs\r
- public abstract Result<Void> createAttribForNS(AuthzTrans trans, HttpServletResponse resp, String ns, String key, String value);\r
-\r
- public abstract Result<Void> readNsByAttrib(AuthzTrans trans, HttpServletResponse resp, String key);\r
-\r
- public abstract Result<Void> updAttribForNS(AuthzTrans trans, HttpServletResponse resp, String ns, String key, String value);\r
-\r
- public abstract Result<Void> delAttribForNS(AuthzTrans trans, HttpServletResponse resp, String ns, String key);\r
-\r
- /*\r
- * Permissions\r
- */\r
- public abstract Result<Void> createPerm(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp); \r
- \r
- public abstract Result<Void> getPermsByName(AuthzTrans trans, HttpServletResponse resp, \r
- String type, String instance, String action);\r
-\r
- public abstract Result<Void> getPermsByUser(AuthzTrans trans, HttpServletResponse response, String user);\r
- \r
- public abstract Result<Void> getPermsByUserWithAAFQuery(AuthzTrans trans, HttpServletRequest request, HttpServletResponse response, String user);\r
-\r
- public abstract Result<Void> getPermsByType(AuthzTrans trans, HttpServletResponse resp, String type);\r
-\r
- public abstract Result<Void> getPermsForRole(AuthzTrans trans, HttpServletResponse response, String roleName);\r
-\r
- public abstract Result<Void> getPermsByNS(AuthzTrans trans, HttpServletResponse response, String ns);\r
- \r
- public abstract Result<Void> renamePerm(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp,\r
- String type, String instance, String action);\r
- \r
- public abstract Result<Void> updatePermDescription(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp);\r
- \r
- public abstract Result<Void> resetPermRoles(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp);\r
-\r
- public abstract Result<Void> deletePerm(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp);\r
-\r
- public abstract Result<Void> deletePerm(AuthzTrans trans, HttpServletResponse resp, \r
- String perm, String type, String action);\r
-\r
- /*\r
- * Roles\r
- */\r
- public abstract Result<Void> createRole(AuthzTrans trans, HttpServletRequest req, HttpServletResponse response);\r
- \r
- public abstract Result<Void> getRolesByName(AuthzTrans trans,HttpServletResponse resp, String name);\r
-\r
- public abstract Result<Void> getRolesByNS(AuthzTrans trans, HttpServletResponse resp, String ns);\r
-\r
- public abstract Result<Void> getRolesByNameOnly(AuthzTrans trans, HttpServletResponse resp, String nameOnly);\r
-\r
- public abstract Result<Void> getRolesByUser(AuthzTrans trans, HttpServletResponse resp, String user);\r
-\r
- public abstract Result<Void> getRolesByPerm(AuthzTrans trans, HttpServletResponse resp, String type, String instance, String action);\r
-\r
- public abstract Result<Void> updateRoleDescription(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp);\r
- \r
- public abstract Result<Void> addPermToRole(AuthzTrans trans,HttpServletRequest req, HttpServletResponse resp);\r
- \r
- public abstract Result<Void> delPermFromRole(AuthzTrans trans,HttpServletRequest req, HttpServletResponse resp);\r
-\r
- public abstract Result<Void> deleteRole(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp);\r
-\r
- public abstract Result<Void> deleteRole(AuthzTrans trans, HttpServletResponse resp, String role);\r
-\r
- /*\r
- * Users\r
- */\r
- \r
- public abstract Result<Void> getUsersByRole(AuthzTrans trans, HttpServletResponse resp, String role);\r
- \r
- public abstract Result<Void> getUsersByPermission(AuthzTrans trans, HttpServletResponse resp, \r
- String type, String instance, String action);\r
-\r
-\r
-\r
- /*\r
- * Delegates\r
- */\r
- public abstract Result<Void> createDelegate(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp);\r
- \r
- public abstract Result<Void> updateDelegate(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp);\r
- \r
- public abstract Result<Void> deleteDelegate(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp);\r
- \r
- public abstract Result<Void> deleteDelegate(AuthzTrans trans, String user);\r
- \r
- public abstract Result<Void> getDelegatesByUser(AuthzTrans trans, String userName, HttpServletResponse resp);\r
-\r
- public abstract Result<Void> getDelegatesByDelegate(AuthzTrans trans, String userName, HttpServletResponse resp);\r
-\r
- /*\r
- * Credentials\r
- */\r
- public abstract Result<Void> createUserCred(AuthzTrans trans, HttpServletRequest req);\r
-\r
- public abstract Result<Void> changeUserCred(AuthzTrans trans, HttpServletRequest req);\r
-\r
- public abstract Result<Void> extendUserCred(AuthzTrans trans, HttpServletRequest req, String days);\r
-\r
- public abstract Result<Void> getCredsByNS(AuthzTrans trans, HttpServletResponse resp, String ns);\r
-\r
- public abstract Result<Void> getCredsByID(AuthzTrans trans, HttpServletResponse resp, String id);\r
-\r
- public abstract Result<Void> deleteUserCred(AuthzTrans trans, HttpServletRequest req);\r
-\r
- public abstract Result<Void> validBasicAuth(AuthzTrans trans, HttpServletResponse resp, String basicAuth);\r
-\r
- public abstract Result<Date> doesCredentialMatch(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp);\r
-\r
- /*\r
- * Miscellaneous\r
- */\r
- /**\r
- * Place Standard Messages based on HTTP Code onto Error Data Structure, and write to OutputStream\r
- * Log message\r
- */\r
- public abstract void error(AuthzTrans trans, HttpServletResponse response, Result<?> result);\r
-\r
- /*\r
- * UserRole\r
- */\r
- public abstract Result<Void> requestUserRole(AuthzTrans trans,HttpServletRequest req, HttpServletResponse resp);\r
- \r
- public abstract Result<Void> getUserInRole(AuthzTrans trans, HttpServletResponse resp, String user, String role);\r
- \r
- public abstract Result<Void> getUserRolesByRole(AuthzTrans trans, HttpServletResponse resp, String role);\r
- \r
- public abstract Result<Void> getUserRolesByUser(AuthzTrans trans, HttpServletResponse resp, String user);\r
-\r
- public abstract Result<Void> deleteUserRole(AuthzTrans trans, HttpServletResponse resp, String user, String role);\r
- \r
- public abstract Result<Void> resetUsersForRole(AuthzTrans trans, HttpServletResponse resp, HttpServletRequest req);\r
-\r
- public abstract Result<Void> resetRolesForUser(AuthzTrans trans, HttpServletResponse resp, HttpServletRequest req);\r
- \r
- public abstract Result<Void> extendUserRoleExpiration(AuthzTrans trans, HttpServletResponse resp, String user,\r
- String role);\r
-\r
- /*\r
- * Approval \r
- */\r
- public abstract Result<Void> updateApproval(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp);\r
- \r
- public abstract Result<Void> getApprovalsByUser(AuthzTrans trans, HttpServletResponse resp, String user);\r
- \r
- public abstract Result<Void> getApprovalsByTicket(AuthzTrans trans, HttpServletResponse resp, String ticket);\r
- \r
- public abstract Result<Void> getApprovalsByApprover(AuthzTrans trans, HttpServletResponse resp, String approver);\r
-\r
-\r
- /*\r
- * History\r
- */\r
- public abstract Result<Void> getHistoryByUser(AuthzTrans trans, HttpServletResponse resp, String user, int[] yyyymm, final int sort);\r
- \r
- public abstract Result<Void> getHistoryByRole(AuthzTrans trans, HttpServletResponse resp, String subject, int[] yyyymm, final int sort);\r
-\r
- public abstract Result<Void> getHistoryByPerm(AuthzTrans trans, HttpServletResponse resp, String subject, int[] yyyymm, final int sort);\r
-\r
- public abstract Result<Void> getHistoryByNS(AuthzTrans trans, HttpServletResponse resp, String subject, int[] yyyymm, final int sort);\r
-\r
- /*\r
- * Cache \r
- */\r
- public abstract Result<Void> cacheClear(AuthzTrans trans, String pathParam);\r
-\r
- public abstract Result<Void> cacheClear(AuthzTrans trans, String string,String segments);\r
- \r
- public abstract void dbReset(AuthzTrans trans);\r
-\r
-\r
-\r
- /*\r
- * API\r
- */\r
- public Result<Void> getAPI(AuthzTrans trans, HttpServletResponse resp, RServlet<AuthzTrans> rservlet);\r
-\r
- public abstract Result<Void> getAPIExample(AuthzTrans trans, HttpServletResponse resp, String typeCode, boolean optional);\r
-\r
- public abstract Result<Void> getCertInfoByID(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp, String id);\r
-\r
-\r
-\r
-\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.facade;\r
-\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.service.AuthzCassServiceImpl;\r
-import org.onap.aaf.authz.service.mapper.Mapper_2_0;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Data;\r
-\r
-\r
-public class AuthzFacadeFactory {\r
- public static AuthzFacade_2_0 v2_0(AuthzEnv env, AuthzTrans trans, Data.TYPE type, Question question) throws APIException {\r
- return new AuthzFacade_2_0(env,\r
- new AuthzCassServiceImpl<\r
- aaf.v2_0.Nss,\r
- aaf.v2_0.Perms,\r
- aaf.v2_0.Pkey,\r
- aaf.v2_0.Roles,\r
- aaf.v2_0.Users,\r
- aaf.v2_0.UserRoles,\r
- aaf.v2_0.Delgs,\r
- aaf.v2_0.Certs,\r
- aaf.v2_0.Keys,\r
- aaf.v2_0.Request,\r
- aaf.v2_0.History,\r
- aaf.v2_0.Error,\r
- aaf.v2_0.Approvals>\r
- (trans,new Mapper_2_0(question),question),\r
- type);\r
- }\r
- \r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.facade;\r
-\r
-import static org.onap.aaf.authz.layer.Result.ERR_ActionNotCompleted;\r
-import static org.onap.aaf.authz.layer.Result.ERR_Backend;\r
-import static org.onap.aaf.authz.layer.Result.ERR_BadData;\r
-import static org.onap.aaf.authz.layer.Result.ERR_ConflictAlreadyExists;\r
-import static org.onap.aaf.authz.layer.Result.ERR_Denied;\r
-import static org.onap.aaf.authz.layer.Result.ERR_NotFound;\r
-import static org.onap.aaf.authz.layer.Result.ERR_NotImplemented;\r
-import static org.onap.aaf.authz.layer.Result.ERR_Policy;\r
-import static org.onap.aaf.authz.layer.Result.ERR_Security;\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-import static org.onap.aaf.dao.aaf.cass.Status.ERR_ChoiceNeeded;\r
-import static org.onap.aaf.dao.aaf.cass.Status.ERR_DelegateNotFound;\r
-import static org.onap.aaf.dao.aaf.cass.Status.ERR_DependencyExists;\r
-import static org.onap.aaf.dao.aaf.cass.Status.ERR_FutureNotRequested;\r
-import static org.onap.aaf.dao.aaf.cass.Status.ERR_InvalidDelegate;\r
-import static org.onap.aaf.dao.aaf.cass.Status.ERR_NsNotFound;\r
-import static org.onap.aaf.dao.aaf.cass.Status.ERR_PermissionNotFound;\r
-import static org.onap.aaf.dao.aaf.cass.Status.ERR_RoleNotFound;\r
-import static org.onap.aaf.dao.aaf.cass.Status.ERR_UserNotFound;\r
-import static org.onap.aaf.dao.aaf.cass.Status.ERR_UserRoleNotFound;\r
-\r
-import java.io.IOException;\r
-import java.lang.reflect.Method;\r
-import java.util.Date;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.FacadeImpl;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.service.AuthzCassServiceImpl;\r
-import org.onap.aaf.authz.service.AuthzService;\r
-import org.onap.aaf.authz.service.mapper.Mapper;\r
-import org.onap.aaf.authz.service.mapper.Mapper.API;\r
-import org.onap.aaf.cssa.rserv.RServlet;\r
-import org.onap.aaf.cssa.rserv.RouteReport;\r
-import org.onap.aaf.cssa.rserv.doc.ApiDoc;\r
-import org.onap.aaf.dao.aaf.cass.NsType;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-\r
-import org.onap.aaf.cadi.aaf.client.Examples;\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Data;\r
-import org.onap.aaf.inno.env.Data.TYPE;\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.TimeTaken;\r
-import org.onap.aaf.inno.env.util.Chrono;\r
-import org.onap.aaf.rosetta.Marshal;\r
-import org.onap.aaf.rosetta.env.RosettaDF;\r
-import org.onap.aaf.rosetta.env.RosettaData;\r
-\r
-import aaf.v2_0.Api;\r
-\r
-/**\r
- * AuthzFacade\r
- * \r
- * This Service Facade encapsulates the essence of the API Service can do, and provides\r
- * a single created object for elements such as RosettaDF.\r
- *\r
- * The Responsibilities of this class are to:\r
- * 1) Interact with the Service Implementation (which might be supported by various kinds of Backend Storage)\r
- * 2) Validate incoming data (if applicable)\r
- * 3) Convert the Service response into the right Format, and mark the Content Type\r
- * a) In the future, we may support multiple Response Formats, aka JSON or XML, based on User Request.\r
- * 4) Log Service info, warnings and exceptions as necessary\r
- * 5) When asked by the API layer, this will create and write Error content to the OutputStream\r
- * \r
- * Note: This Class does NOT set the HTTP Status Code. That is up to the API layer, so that it can be \r
- * clearly coordinated with the API Documentation\r
- * \r
- *\r
- */\r
-public abstract class AuthzFacadeImpl<NSS,PERMS,PERMKEY,ROLES,USERS,USERROLES,DELGS,CERTS,KEYS,REQUEST,HISTORY,ERR,APPROVALS> extends FacadeImpl implements AuthzFacade \r
- {\r
- private static final String FORBIDDEN = "Forbidden";\r
- private static final String NOT_FOUND = "Not Found";\r
- private static final String NOT_ACCEPTABLE = "Not Acceptable";\r
- private static final String GENERAL_SERVICE_ERROR = "General Service Error";\r
- private static final String NO_DATA = "***No Data***";\r
- private AuthzService<NSS,PERMS,PERMKEY,ROLES,USERS,USERROLES,DELGS,CERTS,KEYS,REQUEST,HISTORY,ERR,APPROVALS> service = null;\r
- private final RosettaDF<NSS> nssDF;\r
- private final RosettaDF<PERMS> permsDF;\r
- private final RosettaDF<ROLES> roleDF;\r
- private final RosettaDF<USERS> usersDF;\r
- private final RosettaDF<USERROLES> userrolesDF;\r
- private final RosettaDF<CERTS> certsDF;\r
- private final RosettaDF<DELGS> delgDF;\r
- private final RosettaDF<REQUEST> permRequestDF;\r
- private final RosettaDF<REQUEST> roleRequestDF;\r
- private final RosettaDF<REQUEST> userRoleRequestDF;\r
- private final RosettaDF<REQUEST> rolePermRequestDF;\r
- private final RosettaDF<REQUEST> nsRequestDF;\r
- private final RosettaDF<REQUEST> credRequestDF;\r
- private final RosettaDF<REQUEST> delgRequestDF;\r
- private final RosettaDF<HISTORY> historyDF;\r
- private final RosettaDF<KEYS> keysDF;\r
-\r
- private final RosettaDF<ERR> errDF;\r
- private final RosettaDF<APPROVALS> approvalDF;\r
- // Note: Api is not different per Version\r
- private final RosettaDF<Api> apiDF;\r
-\r
-\r
- @SuppressWarnings("unchecked")\r
- public AuthzFacadeImpl(AuthzEnv env, AuthzService<NSS,PERMS,PERMKEY,ROLES,USERS,USERROLES,DELGS,CERTS,KEYS,REQUEST,HISTORY,ERR,APPROVALS> service, Data.TYPE dataType) throws APIException {\r
- this.service = service;\r
- (nssDF = env.newDataFactory(service.mapper().getClass(API.NSS))).in(dataType).out(dataType);\r
- (permRequestDF = env.newDataFactory(service.mapper().getClass(API.PERM_REQ))).in(dataType).out(dataType);\r
- (permsDF = env.newDataFactory(service.mapper().getClass(API.PERMS))).in(dataType).out(dataType);\r
-// (permKeyDF = env.newDataFactory(service.mapper().getClass(API.PERM_KEY))).in(dataType).out(dataType);\r
- (roleDF = env.newDataFactory(service.mapper().getClass(API.ROLES))).in(dataType).out(dataType);\r
- (roleRequestDF = env.newDataFactory(service.mapper().getClass(API.ROLE_REQ))).in(dataType).out(dataType);\r
- (usersDF = env.newDataFactory(service.mapper().getClass(API.USERS))).in(dataType).out(dataType);\r
- (userrolesDF = env.newDataFactory(service.mapper().getClass(API.USER_ROLES))).in(dataType).out(dataType);\r
- (certsDF = env.newDataFactory(service.mapper().getClass(API.CERTS))).in(dataType).out(dataType)\r
- .rootMarshal((Marshal<CERTS>) service.mapper().getMarshal(API.CERTS));\r
- ;\r
- (userRoleRequestDF = env.newDataFactory(service.mapper().getClass(API.USER_ROLE_REQ))).in(dataType).out(dataType);\r
- (rolePermRequestDF = env.newDataFactory(service.mapper().getClass(API.ROLE_PERM_REQ))).in(dataType).out(dataType);\r
- (nsRequestDF = env.newDataFactory(service.mapper().getClass(API.NS_REQ))).in(dataType).out(dataType);\r
- (credRequestDF = env.newDataFactory(service.mapper().getClass(API.CRED_REQ))).in(dataType).out(dataType);\r
- (delgRequestDF = env.newDataFactory(service.mapper().getClass(API.DELG_REQ))).in(dataType).out(dataType);\r
- (historyDF = env.newDataFactory(service.mapper().getClass(API.HISTORY))).in(dataType).out(dataType);\r
- ( keysDF = env.newDataFactory(service.mapper().getClass(API.KEYS))).in(dataType).out(dataType);\r
- (delgDF = env.newDataFactory(service.mapper().getClass(API.DELGS))).in(dataType).out(dataType);\r
- (approvalDF = env.newDataFactory(service.mapper().getClass(API.APPROVALS))).in(dataType).out(dataType);\r
- (errDF = env.newDataFactory(service.mapper().getClass(API.ERROR))).in(dataType).out(dataType);\r
- (apiDF = env.newDataFactory(Api.class)).in(dataType).out(dataType);\r
- }\r
- \r
- public Mapper<NSS,PERMS,PERMKEY,ROLES,USERS,USERROLES,DELGS,CERTS,KEYS,REQUEST,HISTORY,ERR,APPROVALS> mapper() {\r
- return service.mapper();\r
- }\r
- \r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#error(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, int)\r
- * \r
- * Note: Conforms to AT&T TSS RESTful Error Structure\r
- */\r
- @Override\r
- public void error(AuthzTrans trans, HttpServletResponse response, Result<?> result) {\r
- String msg = result.details==null?"%s":"%s - " + result.details.trim();\r
- String msgId;\r
- String[] detail;\r
- if(result.variables==null) {\r
- detail = new String[1];\r
- } else {\r
- int l = result.variables.length;\r
- detail=new String[l+1];\r
- System.arraycopy(result.variables, 0, detail, 1, l);\r
- }\r
- //int httpstatus;\r
- \r
- switch(result.status) {\r
- case ERR_ActionNotCompleted:\r
- msgId = "SVC1202";\r
- detail[0] = "Accepted, Action not complete";\r
- response.setStatus(/*httpstatus=*/202);\r
- break;\r
-\r
- case ERR_Policy:\r
- msgId = "SVC3403";\r
- detail[0] = FORBIDDEN;\r
- response.setStatus(/*httpstatus=*/403);\r
- break;\r
- case ERR_Security:\r
- msgId = "SVC2403";\r
- detail[0] = FORBIDDEN;\r
- response.setStatus(/*httpstatus=*/403);\r
- break;\r
- case ERR_Denied:\r
- msgId = "SVC1403";\r
- detail[0] = FORBIDDEN;\r
- response.setStatus(/*httpstatus=*/403);\r
- break;\r
- // This is still forbidden to directly impact, but can be Requested when passed\r
- // with "request=true" query Param\r
- case ERR_FutureNotRequested:\r
- msgId = "SVC2403";\r
- detail[0] = msg;\r
- response.setStatus(/*httpstatus=*/403);\r
- break;\r
- \r
- case ERR_NsNotFound:\r
- msgId = "SVC2404";\r
- detail[0] = NOT_FOUND;\r
- response.setStatus(/*httpstatus=*/404);\r
- break;\r
- case ERR_RoleNotFound:\r
- msgId = "SVC3404";\r
- detail[0] = NOT_FOUND;\r
- response.setStatus(/*httpstatus=*/404);\r
- break;\r
- case ERR_PermissionNotFound:\r
- msgId = "SVC4404";\r
- detail[0] = NOT_FOUND;\r
- response.setStatus(/*httpstatus=*/404);\r
- break;\r
- case ERR_UserNotFound:\r
- msgId = "SVC5404";\r
- detail[0] = NOT_FOUND;\r
- response.setStatus(/*httpstatus=*/404);\r
- break;\r
- case ERR_UserRoleNotFound:\r
- msgId = "SVC6404";\r
- detail[0] = NOT_FOUND;\r
- response.setStatus(/*httpstatus=*/404);\r
- break;\r
- case ERR_DelegateNotFound:\r
- msgId = "SVC7404";\r
- detail[0] = NOT_FOUND;\r
- response.setStatus(/*httpstatus=*/404);\r
- break;\r
- case ERR_NotFound:\r
- msgId = "SVC1404";\r
- detail[0] = NOT_FOUND;\r
- response.setStatus(/*httpstatus=*/404);\r
- break;\r
-\r
- case ERR_InvalidDelegate:\r
- msgId="SVC2406";\r
- detail[0] = NOT_ACCEPTABLE;\r
- response.setStatus(/*httpstatus=*/406);\r
- break;\r
- case ERR_BadData:\r
- msgId="SVC1406";\r
- detail[0] = NOT_ACCEPTABLE;\r
- response.setStatus(/*httpstatus=*/406);\r
- break;\r
- \r
- case ERR_ConflictAlreadyExists:\r
- msgId = "SVC1409";\r
- detail[0] = "Conflict Already Exists";\r
- response.setStatus(/*httpstatus=*/409);\r
- break;\r
- \r
- case ERR_DependencyExists:\r
- msgId = "SVC1424";\r
- detail[0] = "Failed Dependency";\r
- response.setStatus(/*httpstatus=*/424);\r
- break;\r
- \r
- case ERR_NotImplemented:\r
- msgId = "SVC1501";\r
- detail[0] = "Not Implemented"; \r
- response.setStatus(/*httpstatus=*/501);\r
- break;\r
- \r
- case Status.ACC_Future:\r
- msgId = "SVC1202";\r
- detail[0] = "Accepted for Future, pending Approvals";\r
- response.setStatus(/*httpstatus=*/202);\r
- break;\r
- case ERR_ChoiceNeeded:\r
- msgId = "SVC1300";\r
- detail = result.variables;\r
- response.setStatus(/*httpstatus=*/300);\r
- break;\r
- case ERR_Backend: \r
- msgId = "SVC2500";\r
- detail[0] = GENERAL_SERVICE_ERROR;\r
- response.setStatus(/*httpstatus=*/500);\r
- break;\r
-\r
- default: \r
- msgId = "SVC1500";\r
- detail[0] = GENERAL_SERVICE_ERROR;\r
- response.setStatus(/*httpstatus=*/500);\r
- break;\r
- }\r
-\r
- try {\r
- StringBuilder holder = new StringBuilder();\r
- errDF.newData(trans).load(\r
- service.mapper()\r
- .errorFromMessage(holder,msgId,msg,detail))\r
- .to(response.getOutputStream());\r
- trans.checkpoint(\r
- holder.toString(),\r
-// String.format("ErrResp [" + msgId + "] " + msg,(Object[])detail),\r
- Env.ALWAYS);\r
- } catch (Exception e) {\r
- trans.error().log(e,"unable to send response for",msg);\r
- }\r
- }\r
- \r
- ///////////////////////////\r
- // Namespace\r
- ///////////////////////////\r
- public static final String CREATE_NS = "createNamespace";\r
- public static final String ADD_NS_ADMIN = "addNamespaceAdmin";\r
- public static final String DELETE_NS_ADMIN = "delNamespaceAdmin";\r
- public static final String ADD_NS_RESPONSIBLE = "addNamespaceResponsible";\r
- public static final String DELETE_NS_RESPONSIBLE = "delNamespaceResponsible";\r
- public static final String GET_NS_BY_NAME = "getNamespaceByName";\r
- public static final String GET_NS_BY_ADMIN = "getNamespaceByAdmin";\r
- public static final String GET_NS_BY_RESPONSIBLE = "getNamespaceByResponsible";\r
- public static final String GET_NS_BY_EITHER = "getNamespaceByEither";\r
- public static final String GET_NS_CHILDREN = "getNamespaceChildren";\r
- public static final String UPDATE_NS_DESC = "updateNamespaceDescription";\r
- public static final String DELETE_NS = "deleteNamespace";\r
- \r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#createNS(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)\r
- */\r
- @Override\r
- public Result<Void> requestNS(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp, NsType type) {\r
- TimeTaken tt = trans.start(CREATE_NS, Env.SUB|Env.ALWAYS);\r
- try {\r
- REQUEST request;\r
- try {\r
- Data<REQUEST> rd = nsRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,rd.asString());\r
- }\r
- request = rd.asObject();\r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN,CREATE_NS);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
- }\r
- \r
- Result<Void> rp = service.createNS(trans,request,type);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,nsRequestDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,CREATE_NS);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#addAdminToNS(org.onap.aaf.authz.env.AuthzTrans, java.lang.String, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> addAdminToNS(AuthzTrans trans, HttpServletResponse resp, String ns, String id) {\r
- TimeTaken tt = trans.start(ADD_NS_ADMIN + ' ' + ns + ' ' + id, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<Void> rp = service.addAdminNS(trans,ns,id);\r
- switch(rp.status) {\r
- case OK: \r
- //TODO Perms??\r
- setContentType(resp,nsRequestDF.getOutType());\r
- resp.getOutputStream().println();\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,ADD_NS_ADMIN);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#delAdminFromNS(org.onap.aaf.authz.env.AuthzTrans, java.lang.String, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> delAdminFromNS(AuthzTrans trans, HttpServletResponse resp, String ns, String id) {\r
- TimeTaken tt = trans.start(DELETE_NS_ADMIN + ' ' + ns + ' ' + id, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<Void> rp = service.delAdminNS(trans, ns, id);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,nsRequestDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,DELETE_NS_ADMIN);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#addAdminToNS(org.onap.aaf.authz.env.AuthzTrans, java.lang.String, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> addResponsibilityForNS(AuthzTrans trans, HttpServletResponse resp, String ns, String id) {\r
- TimeTaken tt = trans.start(ADD_NS_RESPONSIBLE + ' ' + ns + ' ' + id, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<Void> rp = service.addResponsibleNS(trans,ns,id);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,nsRequestDF.getOutType());\r
- resp.getOutputStream().println();\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,ADD_NS_RESPONSIBLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#delAdminFromNS(org.onap.aaf.authz.env.AuthzTrans, java.lang.String, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> delResponsibilityForNS(AuthzTrans trans, HttpServletResponse resp, String ns, String id) {\r
- TimeTaken tt = trans.start(DELETE_NS_RESPONSIBLE + ' ' + ns + ' ' + id, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<Void> rp = service.delResponsibleNS(trans, ns, id);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,nsRequestDF.getOutType());\r
- resp.getOutputStream().println();\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,DELETE_NS_RESPONSIBLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getNSsByName(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getNSsByName(AuthzTrans trans, HttpServletResponse resp, String ns) {\r
- TimeTaken tt = trans.start(GET_NS_BY_NAME + ' ' + ns, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<NSS> rp = service.getNSbyName(trans, ns);\r
- switch(rp.status) {\r
- case OK: \r
- RosettaData<NSS> data = nssDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,nssDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_NS_BY_NAME);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
-// TODO: uncomment when on cassandra 2.1.2 for MyNamespace GUI page\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getNSsByAdmin(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getNSsByAdmin(AuthzTrans trans, HttpServletResponse resp, String user, boolean full){\r
- TimeTaken tt = trans.start(GET_NS_BY_ADMIN + ' ' + user, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<NSS> rp = service.getNSbyAdmin(trans, user, full);\r
- switch(rp.status) {\r
- case OK: \r
- RosettaData<NSS> data = nssDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,nssDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_NS_BY_ADMIN);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
-// TODO: uncomment when on cassandra 2.1.2 for MyNamespace GUI page\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getNSsByResponsible(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getNSsByResponsible(AuthzTrans trans, HttpServletResponse resp, String user, boolean full){\r
- TimeTaken tt = trans.start(GET_NS_BY_RESPONSIBLE + ' ' + user, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<NSS> rp = service.getNSbyResponsible(trans, user, full);\r
- switch(rp.status) {\r
- case OK: \r
- RosettaData<NSS> data = nssDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
-\r
- setContentType(resp,nssDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_NS_BY_RESPONSIBLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getNSsByResponsible(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getNSsByEither(AuthzTrans trans, HttpServletResponse resp, String user, boolean full){\r
- TimeTaken tt = trans.start(GET_NS_BY_EITHER + ' ' + user, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<NSS> rp = service.getNSbyEither(trans, user, full);\r
- \r
- switch(rp.status) {\r
- case OK: \r
- RosettaData<NSS> data = nssDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
-\r
- setContentType(resp,nssDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_NS_BY_EITHER);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getNSsByResponsible(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getNSsChildren(AuthzTrans trans, HttpServletResponse resp, String parent){\r
- TimeTaken tt = trans.start(GET_NS_CHILDREN + ' ' + parent, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<NSS> rp = service.getNSsChildren(trans, parent);\r
- switch(rp.status) {\r
- case OK: \r
- RosettaData<NSS> data = nssDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,nssDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_NS_CHILDREN);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @Override\r
- public Result<Void> updateNsDescription(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(UPDATE_NS_DESC, Env.SUB|Env.ALWAYS);\r
- try {\r
- REQUEST rreq;\r
- try {\r
- RosettaData<REQUEST> data = nsRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- rreq = data.asObject();\r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN,UPDATE_NS_DESC);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
-\r
- }\r
- Result<Void> rp = service.updateNsDescription(trans, rreq);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,nsRequestDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,UPDATE_NS_DESC);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- /*\r
- * (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#requestNS(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)\r
- */\r
- @Override\r
- public Result<Void> deleteNS(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp, String ns) {\r
- TimeTaken tt = trans.start(DELETE_NS + ' ' + ns, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<Void> rp = service.deleteNS(trans,ns);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,nsRequestDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,DELETE_NS);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- private final static String NS_CREATE_ATTRIB = "nsCreateAttrib";\r
- private final static String NS_UPDATE_ATTRIB = "nsUpdateAttrib";\r
- private final static String READ_NS_BY_ATTRIB = "readNsByAttrib";\r
- private final static String NS_DELETE_ATTRIB = "nsDeleteAttrib";\r
- \r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#createAttribForNS(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String, java.lang.String, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> createAttribForNS(AuthzTrans trans, HttpServletResponse resp, String ns, String key, String value) {\r
- TimeTaken tt = trans.start(NS_CREATE_ATTRIB + ' ' + ns + ':'+key+':'+value, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<?> rp = service.createNsAttrib(trans,ns,key,value);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp, keysDF.getOutType());\r
- resp.getOutputStream().println();\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,NS_CREATE_ATTRIB);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#readAttribForNS(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> readNsByAttrib(AuthzTrans trans, HttpServletResponse resp, String key) {\r
- TimeTaken tt = trans.start(READ_NS_BY_ATTRIB + ' ' + key, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<KEYS> rp = service.readNsByAttrib(trans, key);\r
- switch(rp.status) {\r
- case OK: \r
- RosettaData<KEYS> data = keysDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,keysDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,READ_NS_BY_ATTRIB);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#updAttribForNS(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String, java.lang.String, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> updAttribForNS(AuthzTrans trans, HttpServletResponse resp, String ns, String key, String value) {\r
- TimeTaken tt = trans.start(NS_UPDATE_ATTRIB + ' ' + ns + ':'+key+':'+value, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<?> rp = service.updateNsAttrib(trans,ns,key,value);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp, keysDF.getOutType());\r
- resp.getOutputStream().println();\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,NS_UPDATE_ATTRIB);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
-\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#delAttribForNS(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> delAttribForNS(AuthzTrans trans, HttpServletResponse resp, String ns, String key) {\r
- TimeTaken tt = trans.start(NS_DELETE_ATTRIB + ' ' + ns + ':'+key, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<?> rp = service.deleteNsAttrib(trans,ns,key);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp, keysDF.getOutType());\r
- resp.getOutputStream().println();\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,NS_DELETE_ATTRIB);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
-//\r
-// PERMISSION\r
-//\r
- public static final String CREATE_PERMISSION = "createPermission";\r
- public static final String GET_PERMS_BY_TYPE = "getPermsByType";\r
- public static final String GET_PERMS_BY_NAME = "getPermsByName";\r
- public static final String GET_PERMISSIONS_BY_USER = "getPermissionsByUser";\r
- public static final String GET_PERMISSIONS_BY_USER_WITH_QUERY = "getPermissionsByUserWithQuery";\r
- public static final String GET_PERMISSIONS_BY_ROLE = "getPermissionsByRole";\r
- public static final String GET_PERMISSIONS_BY_NS = "getPermissionsByNS";\r
- public static final String UPDATE_PERMISSION = "updatePermission";\r
- public static final String UPDATE_PERM_DESC = "updatePermissionDescription";\r
- public static final String SET_PERMISSION_ROLES_TO = "setPermissionRolesTo";\r
- public static final String DELETE_PERMISSION = "deletePermission";\r
- \r
- /*\r
- * (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#createOrUpdatePerm(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse, boolean, java.lang.String, java.lang.String, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> createPerm(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start( CREATE_PERMISSION, Env.SUB|Env.ALWAYS); \r
- try {\r
- REQUEST rreq;\r
- try {\r
- RosettaData<REQUEST> data = permRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- rreq = data.asObject(); \r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN,CREATE_PERMISSION);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
- }\r
- \r
- Result<Void> rp = service.createPerm(trans,rreq);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,CREATE_PERMISSION);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getChildPerms(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getPermsByType(AuthzTrans trans, HttpServletResponse resp, String perm) {\r
- TimeTaken tt = trans.start(GET_PERMS_BY_TYPE + ' ' + perm, Env.SUB|Env.ALWAYS);\r
- try {\r
- \r
- Result<PERMS> rp = service.getPermsByType(trans, perm);\r
- switch(rp.status) {\r
- case OK:\r
- RosettaData<PERMS> data = permsDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_PERMS_BY_TYPE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- @Override\r
- public Result<Void> getPermsByName(AuthzTrans trans, HttpServletResponse resp, \r
- String type, String instance, String action) {\r
- \r
- TimeTaken tt = trans.start(GET_PERMS_BY_NAME + ' ' + type\r
- + '|' + instance + '|' + action, Env.SUB|Env.ALWAYS);\r
- try {\r
- \r
- Result<PERMS> rp = service.getPermsByName(trans, type, instance, action);\r
- switch(rp.status) {\r
- case OK:\r
- RosettaData<PERMS> data = permsDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_PERMS_BY_TYPE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getPermissionByUser(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getPermsByUser(AuthzTrans trans, HttpServletResponse resp, String user) {\r
- TimeTaken tt = trans.start(GET_PERMISSIONS_BY_USER + ' ' + user, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<PERMS> rp = service.getPermsByUser(trans, user);\r
- switch(rp.status) {\r
- case OK: \r
- RosettaData<PERMS> data = permsDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_PERMISSIONS_BY_USER, user);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getPermissionByUser(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getPermsByUserWithAAFQuery(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp, String user) {\r
- TimeTaken tt = trans.start(GET_PERMISSIONS_BY_USER_WITH_QUERY + ' ' + user, Env.SUB|Env.ALWAYS);\r
- try {\r
- PERMS perms;\r
- try {\r
- RosettaData<PERMS> data = permsDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- perms = data.asObject(); \r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN,CREATE_PERMISSION);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
- }\r
-\r
- Result<PERMS> rp = service.getPermsByUser(trans, perms, user);\r
- switch(rp.status) {\r
- case OK: \r
- RosettaData<PERMS> data = permsDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_PERMISSIONS_BY_USER_WITH_QUERY , user);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getPermissionsForRole(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getPermsForRole(AuthzTrans trans, HttpServletResponse resp, String roleName) {\r
- TimeTaken tt = trans.start(GET_PERMISSIONS_BY_ROLE + ' ' + roleName, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<PERMS> rp = service.getPermsByRole(trans, roleName);\r
- switch(rp.status) {\r
- case OK:\r
- RosettaData<PERMS> data = permsDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_PERMISSIONS_BY_ROLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- @Override\r
- public Result<Void> getPermsByNS(AuthzTrans trans,HttpServletResponse resp,String ns) {\r
- TimeTaken tt = trans.start(GET_PERMISSIONS_BY_NS + ' ' + ns, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<PERMS> rp = service.getPermsByNS(trans, ns);\r
- switch(rp.status) {\r
- case OK:\r
- RosettaData<PERMS> data = permsDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_PERMISSIONS_BY_NS);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /*\r
- * (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#createOrUpdatePerm(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse, boolean, java.lang.String, java.lang.String, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> renamePerm(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp,\r
- String origType, String origInstance, String origAction) {\r
- String cmdDescription = UPDATE_PERMISSION;\r
- TimeTaken tt = trans.start( cmdDescription + ' ' + origType + ' ' + origInstance + ' ' + origAction, Env.SUB|Env.ALWAYS); \r
- try {\r
- REQUEST rreq;\r
- try {\r
- RosettaData<REQUEST> data = permRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- rreq = data.asObject(); \r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN,cmdDescription);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
- }\r
- \r
- Result<Void> rp = service.renamePerm(trans,rreq, origType, origInstance, origAction);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,cmdDescription);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- @Override\r
- public Result<Void> updatePermDescription(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(UPDATE_PERM_DESC, Env.SUB|Env.ALWAYS);\r
- try {\r
- REQUEST rreq;\r
- try {\r
- RosettaData<REQUEST> data = permRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- rreq = data.asObject();\r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN,UPDATE_PERM_DESC);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
-\r
- }\r
- Result<Void> rp = service.updatePermDescription(trans, rreq);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,permRequestDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,UPDATE_PERM_DESC);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- \r
- @Override\r
- public Result<Void> resetPermRoles(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(SET_PERMISSION_ROLES_TO, Env.SUB|Env.ALWAYS);\r
- try {\r
- REQUEST rreq;\r
- try {\r
- RosettaData<REQUEST> data = rolePermRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- rreq = data.asObject();\r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN, SET_PERMISSION_ROLES_TO);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
- }\r
- \r
- Result<Void> rp = service.resetPermRoles(trans, rreq);\r
- \r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,SET_PERMISSION_ROLES_TO);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- @Override\r
- public Result<Void> deletePerm(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(DELETE_PERMISSION, Env.SUB|Env.ALWAYS);\r
- try {\r
- REQUEST rreq;\r
- try {\r
- RosettaData<REQUEST> data = permRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- rreq = data.asObject();\r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN,DELETE_PERMISSION);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
-\r
- }\r
-\r
- Result<Void> rp = service.deletePerm(trans,rreq);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,DELETE_PERMISSION);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @Override\r
- public Result<Void> deletePerm(AuthzTrans trans, HttpServletResponse resp, String type, String instance, String action) {\r
- TimeTaken tt = trans.start(DELETE_PERMISSION + type + ' ' + instance + ' ' + action, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<Void> rp = service.deletePerm(trans,type,instance,action);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,DELETE_PERMISSION);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- public static final String CREATE_ROLE = "createRole";\r
- public static final String GET_ROLES_BY_USER = "getRolesByUser";\r
- public static final String GET_ROLES_BY_NS = "getRolesByNS";\r
- public static final String GET_ROLES_BY_NAME_ONLY = "getRolesByNameOnly";\r
- public static final String GET_ROLES_BY_NAME = "getRolesByName";\r
- public static final String GET_ROLES_BY_PERM = "getRolesByPerm";\r
- public static final String UPDATE_ROLE_DESC = "updateRoleDescription"; \r
- public static final String ADD_PERM_TO_ROLE = "addPermissionToRole";\r
- public static final String DELETE_PERM_FROM_ROLE = "deletePermissionFromRole";\r
- public static final String UPDATE_MGTPERM_ROLE = "updateMgtPermRole";\r
- public static final String DELETE_ROLE = "deleteRole";\r
- public static final String GET_CERT_BY_ID = "getCertByID";\r
-\r
- @Override\r
- public Result<Void> createRole(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(CREATE_ROLE, Env.SUB|Env.ALWAYS);\r
- try {\r
- REQUEST rreq;\r
- try {\r
- RosettaData<REQUEST> data = roleRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- rreq = data.asObject();\r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN,CREATE_ROLE);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
-\r
- }\r
- Result<Void> rp = service.createRole(trans, rreq);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,roleRequestDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,CREATE_ROLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getRolesByName(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getRolesByName(AuthzTrans trans, HttpServletResponse resp, String role) {\r
- TimeTaken tt = trans.start(GET_ROLES_BY_NAME + ' ' + role, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<ROLES> rp = service.getRolesByName(trans, role);\r
- switch(rp.status) {\r
- case OK: \r
- RosettaData<ROLES> data = roleDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,roleDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_ROLES_BY_NAME);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getRolesByUser(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getRolesByUser(AuthzTrans trans,HttpServletResponse resp, String user) {\r
- TimeTaken tt = trans.start(GET_ROLES_BY_USER + ' ' + user, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<ROLES> rp = service.getRolesByUser(trans, user);\r
- switch(rp.status) {\r
- case OK: \r
- RosettaData<ROLES> data = roleDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,roleDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_ROLES_BY_USER, user);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getRolesByUser(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getRolesByNS(AuthzTrans trans,HttpServletResponse resp, String ns) {\r
- TimeTaken tt = trans.start(GET_ROLES_BY_NS + ' ' + ns, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<ROLES> rp = service.getRolesByNS(trans, ns);\r
- switch(rp.status) {\r
- case OK: \r
- if(!rp.isEmpty()) {\r
- RosettaData<ROLES> data = roleDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- } else {\r
- Question.logEncryptTrace(trans, NO_DATA);\r
- }\r
- setContentType(resp,roleDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_ROLES_BY_NS);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getRolesByNameOnly(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getRolesByNameOnly(AuthzTrans trans,HttpServletResponse resp, String nameOnly) {\r
- TimeTaken tt = trans.start(GET_ROLES_BY_NAME_ONLY + ' ' + nameOnly, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<ROLES> rp = service.getRolesByNameOnly(trans, nameOnly);\r
- switch(rp.status) {\r
- case OK: \r
- if(!rp.isEmpty()) {\r
- RosettaData<ROLES> data = roleDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- } else {\r
- Question.logEncryptTrace(trans, NO_DATA);\r
- }\r
- setContentType(resp,roleDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_ROLES_BY_NAME_ONLY);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getRolesByUser(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getRolesByPerm(AuthzTrans trans,HttpServletResponse resp, String type, String instance, String action) {\r
- TimeTaken tt = trans.start(GET_ROLES_BY_PERM + type +' '+instance+' '+action, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<ROLES> rp = service.getRolesByPerm(trans, type,instance,action);\r
- switch(rp.status) {\r
- case OK: \r
- RosettaData<ROLES> data = roleDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,roleDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_ROLES_BY_PERM);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /*\r
- * (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#updateDescription(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)\r
- */\r
- @Override\r
- public Result<Void> updateRoleDescription(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(UPDATE_ROLE_DESC, Env.SUB|Env.ALWAYS);\r
- try {\r
- REQUEST rreq;\r
- try {\r
- RosettaData<REQUEST> data = roleRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- rreq = data.asObject();\r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN,UPDATE_ROLE_DESC);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
-\r
- }\r
- Result<Void> rp = service.updateRoleDescription(trans, rreq);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,roleRequestDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return rp;\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,UPDATE_ROLE_DESC);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @Override\r
- public Result<Void> addPermToRole(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(ADD_PERM_TO_ROLE, Env.SUB|Env.ALWAYS);\r
- try {\r
- REQUEST rreq;\r
- try {\r
- RosettaData<REQUEST> data = rolePermRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- rreq = data.asObject();\r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN,ADD_PERM_TO_ROLE);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
-\r
- }\r
- Result<Void> rp = service.addPermToRole(trans, rreq);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,permsDF.getOutType());\r
- resp.getOutputStream().println();\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,ADD_PERM_TO_ROLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @Override\r
- public Result<Void> delPermFromRole(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(DELETE_PERM_FROM_ROLE, Env.SUB|Env.ALWAYS);\r
- try {\r
- REQUEST rreq;\r
- try {\r
- RosettaData<REQUEST> data = rolePermRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- rreq = data.asObject();\r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN,DELETE_PERM_FROM_ROLE);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
-\r
- }\r
- Result<Void> rp = service.delPermFromRole(trans, rreq);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,permsDF.getOutType());\r
- resp.getOutputStream().println();\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,DELETE_PERM_FROM_ROLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @Override\r
- public Result<Void> deleteRole(AuthzTrans trans, HttpServletResponse resp, String role) {\r
- TimeTaken tt = trans.start(DELETE_ROLE + ' ' + role, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<Void> rp = service.deleteRole(trans, role);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,DELETE_ROLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @Override\r
- public Result<Void> deleteRole(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(DELETE_ROLE, Env.SUB|Env.ALWAYS);\r
- try {\r
- REQUEST rreq;\r
- try {\r
- RosettaData<REQUEST> data = roleRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- rreq = data.asObject();\r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN,CREATE_ROLE);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
- }\r
-\r
- Result<Void> rp = service.deleteRole(trans, rreq);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,DELETE_ROLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- public static final String CREATE_CRED = "createUserCred";\r
- private static final String GET_CREDS_BY_NS = "getCredsByNS";\r
- private static final String GET_CREDS_BY_ID = "getCredsByID";\r
- public static final String UPDATE_CRED = "updateUserCred";\r
- public static final String EXTEND_CRED = "extendUserCred";\r
- public static final String DELETE_CRED = "deleteUserCred";\r
- public static final String DOES_CRED_MATCH = "doesCredMatch";\r
- public static final String VALIDATE_BASIC_AUTH = "validateBasicAuth";\r
-\r
-\r
-\r
- @Override\r
- /**\r
- * Create Credential\r
- * \r
- */\r
- public Result<Void> createUserCred(AuthzTrans trans, HttpServletRequest req) {\r
- TimeTaken tt = trans.start(CREATE_CRED, Env.SUB|Env.ALWAYS);\r
- try {\r
- RosettaData<REQUEST> data = credRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- return service.createUserCred(trans, data.asObject());\r
- } catch(APIException e) {\r
- trans.error().log(e,"Bad Input data");\r
- return Result.err(Status.ERR_BadData, e.getLocalizedMessage());\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,CREATE_CRED);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @Override\r
- public Result<Void> changeUserCred(AuthzTrans trans, HttpServletRequest req) {\r
- TimeTaken tt = trans.start(UPDATE_CRED, Env.SUB|Env.ALWAYS);\r
- try {\r
- RosettaData<REQUEST> data = credRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- return service.changeUserCred(trans, data.asObject());\r
- } catch(APIException e) {\r
- trans.error().log(e,"Bad Input data");\r
- return Result.err(Status.ERR_BadData, e.getLocalizedMessage());\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,UPDATE_CRED);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#extendUserCred(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletRequest, int)\r
- */\r
- @Override\r
- public Result<Void> extendUserCred(AuthzTrans trans, HttpServletRequest req, String days) {\r
- TimeTaken tt = trans.start(EXTEND_CRED, Env.SUB|Env.ALWAYS);\r
- try {\r
- RosettaData<REQUEST> data = credRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- return service.extendUserCred(trans, data.asObject(), days);\r
- } catch(APIException e) {\r
- trans.error().log(e,"Bad Input data");\r
- return Result.err(Status.ERR_BadData, e.getLocalizedMessage());\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,EXTEND_CRED);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @Override\r
- public Result<Void> getCredsByNS(AuthzTrans trans, HttpServletResponse resp, String ns) {\r
- TimeTaken tt = trans.start(GET_CREDS_BY_NS + ' ' + ns, Env.SUB|Env.ALWAYS);\r
- \r
- try {\r
- Result<USERS> ru = service.getCredsByNS(trans,ns);\r
- switch(ru.status) {\r
- case OK: \r
- RosettaData<USERS> data = usersDF.newData(trans).load(ru.value);\r
- if(Question.willSpecialLog(trans,trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,usersDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(ru);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_CREDS_BY_NS);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- \r
- }\r
- \r
- \r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getCredsByID(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getCredsByID(AuthzTrans trans, HttpServletResponse resp, String id) {\r
- TimeTaken tt = trans.start(GET_CREDS_BY_ID + ' ' + id, Env.SUB|Env.ALWAYS);\r
- \r
- try {\r
- Result<USERS> ru = service.getCredsByID(trans,id);\r
- switch(ru.status) {\r
- case OK: \r
- RosettaData<USERS> data = usersDF.newData(trans).load(ru.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,usersDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(ru);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_CREDS_BY_ID);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- \r
- }\r
-\r
- @Override\r
- public Result<Void> deleteUserCred(AuthzTrans trans, HttpServletRequest req) {\r
- TimeTaken tt = trans.start(DELETE_CRED, Env.SUB|Env.ALWAYS);\r
- try {\r
- RosettaData<REQUEST> data = credRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- return service.deleteUserCred(trans, data.asObject());\r
- } catch(APIException e) {\r
- trans.error().log(e,"Bad Input data");\r
- return Result.err(Status.ERR_BadData, e.getLocalizedMessage());\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,DELETE_CRED);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- } \r
- }\r
- \r
- \r
- @Override\r
- public Result<Date> doesCredentialMatch(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(DOES_CRED_MATCH, Env.SUB|Env.ALWAYS);\r
- try {\r
- RosettaData<REQUEST> data = credRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- return service.doesCredentialMatch(trans, data.asObject());\r
- } catch(APIException e) {\r
- trans.error().log(e,"Bad Input data");\r
- return Result.err(Status.ERR_BadData, e.getLocalizedMessage());\r
- } catch (IOException e) {\r
- trans.error().log(e,IN,DOES_CRED_MATCH);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- } \r
- }\r
-\r
-\r
- @Override\r
- public Result<Void> validBasicAuth(AuthzTrans trans, HttpServletResponse resp, String basicAuth) {\r
- TimeTaken tt = trans.start(VALIDATE_BASIC_AUTH, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<Date> result = service.validateBasicAuth(trans,basicAuth);\r
- switch(result.status){\r
- case OK:\r
- resp.getOutputStream().write(Chrono.utcStamp(result.value).getBytes());\r
- return Result.ok();\r
- }\r
- return Result.err(result);\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,VALIDATE_BASIC_AUTH);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getCertInfoByID(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getCertInfoByID(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp, String id) {\r
- TimeTaken tt = trans.start(GET_CERT_BY_ID, Env.SUB|Env.ALWAYS);\r
- try { \r
- Result<CERTS> rci = service.getCertInfoByID(trans,req,id);\r
- \r
- switch(rci.status) {\r
- case OK: \r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- RosettaData<CERTS> data = certsDF.newData(trans).load(rci.value);\r
- Question.logEncryptTrace(trans,data.asString());\r
- data.to(resp.getOutputStream());\r
- } else {\r
- certsDF.direct(trans, rci.value, resp.getOutputStream());\r
- }\r
- setContentType(resp,certsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rci);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_CERT_BY_ID);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- public static final String CREATE_DELEGATE = "createDelegate";\r
- public static final String UPDATE_DELEGATE = "updateDelegate";\r
- public static final String DELETE_DELEGATE = "deleteDelegate";\r
- public static final String GET_DELEGATE_USER = "getDelegatesByUser";\r
- public static final String GET_DELEGATE_DELG = "getDelegatesByDelegate";\r
- \r
- @Override\r
- public Result<Void> createDelegate(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(CREATE_DELEGATE, Env.SUB|Env.ALWAYS);\r
- try { \r
- Data<REQUEST> data = delgRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- return service.createDelegate(trans, data.asObject());\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,CREATE_DELEGATE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- @Override\r
- public Result<Void> updateDelegate(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(UPDATE_DELEGATE, Env.SUB|Env.ALWAYS);\r
- try { \r
- Data<REQUEST> data = delgRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- return service.updateDelegate(trans, data.asObject());\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,UPDATE_DELEGATE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- @Override\r
- public Result<Void> deleteDelegate(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(DELETE_DELEGATE, Env.SUB|Env.ALWAYS);\r
- try {\r
- Data<REQUEST> data = delgRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- return service.deleteDelegate(trans, data.asObject());\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,DELETE_DELEGATE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- @Override\r
- public Result<Void> deleteDelegate(AuthzTrans trans, String userName) {\r
- TimeTaken tt = trans.start(DELETE_DELEGATE + ' ' + userName, Env.SUB|Env.ALWAYS);\r
- try {\r
- return service.deleteDelegate(trans, userName);\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,DELETE_DELEGATE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- @Override\r
- public Result<Void> getDelegatesByUser(AuthzTrans trans, String user, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(GET_DELEGATE_USER, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<DELGS> rd = service.getDelegatesByUser(trans, user);\r
- \r
- switch(rd.status) {\r
- case OK: \r
- RosettaData<DELGS> data = delgDF.newData(trans).load(rd.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,delgDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rd);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_DELEGATE_USER);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @Override\r
- public Result<Void> getDelegatesByDelegate(AuthzTrans trans, String delegate, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(GET_DELEGATE_DELG, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<DELGS> rd = service.getDelegatesByDelegate(trans, delegate);\r
- switch(rd.status) {\r
- case OK: \r
- RosettaData<DELGS> data = delgDF.newData(trans).load(rd.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,delgDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rd);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_DELEGATE_DELG);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- private static final String REQUEST_USER_ROLE = "createUserRole";\r
- private static final String GET_USERROLES = "getUserRoles";\r
- private static final String GET_USERROLES_BY_ROLE = "getUserRolesByRole";\r
- private static final String GET_USERROLES_BY_USER = "getUserRolesByUser";\r
- private static final String SET_ROLES_FOR_USER = "setRolesForUser";\r
- private static final String SET_USERS_FOR_ROLE = "setUsersForRole";\r
- private static final String EXTEND_USER_ROLE = "extendUserRole";\r
- private static final String DELETE_USER_ROLE = "deleteUserRole";\r
- @Override\r
- public Result<Void> requestUserRole(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(REQUEST_USER_ROLE, Env.SUB|Env.ALWAYS);\r
- try {\r
- REQUEST request;\r
- try {\r
- Data<REQUEST> data = userRoleRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- request = data.asObject();\r
- } catch(APIException e) {\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
- }\r
- \r
- Result<Void> rp = service.createUserRole(trans,request);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,REQUEST_USER_ROLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- @Override\r
- public Result<Void> getUserInRole(AuthzTrans trans, HttpServletResponse resp, String user, String role) {\r
- TimeTaken tt = trans.start(GET_USERROLES + ' ' + user + '|' + role, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<USERS> ru = service.getUserInRole(trans,user,role);\r
- switch(ru.status) {\r
- case OK: \r
- RosettaData<USERS> data = usersDF.newData(trans).load(ru.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,usersDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(ru);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_USERROLES);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
-\r
- }\r
-\r
- @Override\r
- public Result<Void> getUserRolesByUser(AuthzTrans trans, HttpServletResponse resp, String user) {\r
- TimeTaken tt = trans.start(GET_USERROLES_BY_USER + ' ' + user, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<USERROLES> ru = service.getUserRolesByUser(trans,user);\r
- switch(ru.status) {\r
- case OK: \r
- RosettaData<USERROLES> data = userrolesDF.newData(trans).load(ru.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,usersDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(ru);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_USERROLES_BY_USER);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
-\r
- }\r
- \r
- @Override\r
- public Result<Void> getUserRolesByRole(AuthzTrans trans, HttpServletResponse resp, String role) {\r
- TimeTaken tt = trans.start(GET_USERROLES_BY_ROLE + ' ' + role, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<USERROLES> ru = service.getUserRolesByRole(trans,role);\r
- switch(ru.status) {\r
- case OK: \r
- RosettaData<USERROLES> data = userrolesDF.newData(trans).load(ru.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,usersDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(ru);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_USERROLES_BY_ROLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
-\r
- }\r
- \r
-\r
- @Override\r
- public Result<Void> resetUsersForRole(AuthzTrans trans, HttpServletResponse resp, HttpServletRequest req) {\r
- TimeTaken tt = trans.start(SET_USERS_FOR_ROLE, Env.SUB|Env.ALWAYS);\r
- try {\r
- REQUEST rreq;\r
- try {\r
- RosettaData<REQUEST> data = userRoleRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- rreq = data.asObject();\r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN, SET_USERS_FOR_ROLE);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
- }\r
- \r
- Result<Void> rp = service.resetUsersForRole(trans, rreq);\r
- \r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,SET_USERS_FOR_ROLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- \r
- }\r
-\r
- @Override\r
- public Result<Void> resetRolesForUser(AuthzTrans trans, HttpServletResponse resp, HttpServletRequest req) {\r
- TimeTaken tt = trans.start(SET_ROLES_FOR_USER, Env.SUB|Env.ALWAYS);\r
- try {\r
- REQUEST rreq;\r
- try {\r
- RosettaData<REQUEST> data = userRoleRequestDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- rreq = data.asObject();\r
- } catch(APIException e) {\r
- trans.error().log("Invalid Input",IN, SET_ROLES_FOR_USER);\r
- return Result.err(Status.ERR_BadData,"Invalid Input");\r
- }\r
- \r
- Result<Void> rp = service.resetRolesForUser(trans, rreq);\r
- \r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,SET_ROLES_FOR_USER);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- \r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#extendUserRoleExpiration(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse, java.lang.String, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> extendUserRoleExpiration(AuthzTrans trans, HttpServletResponse resp, String user, String role) {\r
- TimeTaken tt = trans.start(EXTEND_USER_ROLE + ' ' + user + ' ' + role, Env.SUB|Env.ALWAYS);\r
- try {\r
- return service.extendUserRole(trans,user,role);\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,EXTEND_USER_ROLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @Override\r
- public Result<Void> deleteUserRole(AuthzTrans trans, HttpServletResponse resp, String user, String role) {\r
- TimeTaken tt = trans.start(DELETE_USER_ROLE + ' ' + user + ' ' + role, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<Void> rp = service.deleteUserRole(trans,user,role);\r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,DELETE_USER_ROLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- private static final String UPDATE_APPROVAL = "updateApproval";\r
- private static final String GET_APPROVALS_BY_USER = "getApprovalsByUser.";\r
- private static final String GET_APPROVALS_BY_TICKET = "getApprovalsByTicket.";\r
- private static final String GET_APPROVALS_BY_APPROVER = "getApprovalsByApprover.";\r
- \r
- @Override\r
- public Result<Void> updateApproval(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) {\r
- TimeTaken tt = trans.start(UPDATE_APPROVAL, Env.SUB|Env.ALWAYS);\r
- try {\r
- Data<APPROVALS> data = approvalDF.newData().load(req.getInputStream());\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- Result<Void> rp = service.updateApproval(trans, data.asObject());\r
- \r
- switch(rp.status) {\r
- case OK: \r
- setContentType(resp,approvalDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,UPDATE_APPROVAL);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- @Override\r
- public Result<Void> getApprovalsByUser(AuthzTrans trans, HttpServletResponse resp, String user) {\r
- TimeTaken tt = trans.start(GET_APPROVALS_BY_USER + ' ' + user, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<APPROVALS> rp = service.getApprovalsByUser(trans, user);\r
- switch(rp.status) {\r
- case OK: \r
- RosettaData<APPROVALS> data = approvalDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
- data.to(resp.getOutputStream());\r
- \r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_APPROVALS_BY_USER, user);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @Override\r
- public Result<Void> getApprovalsByApprover(AuthzTrans trans, HttpServletResponse resp, String approver) {\r
- TimeTaken tt = trans.start(GET_APPROVALS_BY_APPROVER + ' ' + approver, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<APPROVALS> rp = service.getApprovalsByApprover(trans, approver);\r
- switch(rp.status) {\r
- case OK: \r
- RosettaData<APPROVALS> data = approvalDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_APPROVALS_BY_APPROVER,approver);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @Override\r
- public Result<Void> getApprovalsByTicket(AuthzTrans trans, HttpServletResponse resp, String ticket) {\r
- TimeTaken tt = trans.start(GET_APPROVALS_BY_TICKET, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<APPROVALS> rp = service.getApprovalsByTicket(trans, ticket);\r
- switch(rp.status) {\r
- case OK: \r
- RosettaData<APPROVALS> data = approvalDF.newData(trans).load(rp.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,permsDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rp);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_APPROVALS_BY_TICKET);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
-\r
- \r
- public static final String GET_USERS_PERMISSION = "getUsersByPermission";\r
- public static final String GET_USERS_ROLE = "getUsersByRole";\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getUsersByRole(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getUsersByRole(AuthzTrans trans, HttpServletResponse resp, String role) {\r
- TimeTaken tt = trans.start(GET_USERS_ROLE + ' ' + role, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<USERS> ru = service.getUsersByRole(trans,role);\r
- switch(ru.status) {\r
- case OK: \r
- RosettaData<USERS> data = usersDF.newData(trans).load(ru.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,usersDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(ru);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_USERS_ROLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getUsersByPermission(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String, java.lang.String, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getUsersByPermission(AuthzTrans trans, HttpServletResponse resp, \r
- String type, String instance, String action) {\r
- TimeTaken tt = trans.start(GET_USERS_PERMISSION + ' ' + type + ' ' + instance + ' ' +action, Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<USERS> ru = service.getUsersByPermission(trans,type,instance,action);\r
- switch(ru.status) {\r
- case OK: \r
- RosettaData<USERS> data = usersDF.newData(trans).load(ru.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,usersDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(ru);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_USERS_PERMISSION);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- \r
- public static final String GET_HISTORY_USER = "getHistoryByUser";\r
- public static final String GET_HISTORY_ROLE = "getHistoryByRole";\r
- public static final String GET_HISTORY_PERM = "getHistoryByPerm";\r
- public static final String GET_HISTORY_NS = "getHistoryByNS";\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getHistoryByUser(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)\r
- */\r
- @Override\r
- public Result<Void> getHistoryByUser(AuthzTrans trans, HttpServletResponse resp, String user, int[] yyyymm, final int sort) {\r
- StringBuilder sb = new StringBuilder();\r
- sb.append(GET_HISTORY_USER);\r
- sb.append(' ');\r
- sb.append(user);\r
- sb.append(" for ");\r
- boolean first = true;\r
- for(int i : yyyymm) {\r
- if(first) {\r
- first = false;\r
- } else {\r
- sb.append(',');\r
- }\r
- sb.append(i);\r
- }\r
- TimeTaken tt = trans.start(sb.toString(), Env.SUB|Env.ALWAYS);\r
-\r
- try {\r
- Result<HISTORY> rh = service.getHistoryByUser(trans,user,yyyymm,sort);\r
- switch(rh.status) {\r
- case OK: \r
- RosettaData<HISTORY> data = historyDF.newData(trans).load(rh.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,historyDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rh);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_HISTORY_USER);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getHistoryByRole(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String, int[])\r
- */\r
- @Override\r
- public Result<Void> getHistoryByRole(AuthzTrans trans, HttpServletResponse resp, String role, int[] yyyymm, final int sort) {\r
- StringBuilder sb = new StringBuilder();\r
- sb.append(GET_HISTORY_ROLE);\r
- sb.append(' ');\r
- sb.append(role);\r
- sb.append(" for ");\r
- boolean first = true;\r
- for(int i : yyyymm) {\r
- if(first) {\r
- first = false;\r
- } else {\r
- sb.append(',');\r
- }\r
- sb.append(i);\r
- }\r
- TimeTaken tt = trans.start(sb.toString(), Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<HISTORY> rh = service.getHistoryByRole(trans,role,yyyymm,sort);\r
- switch(rh.status) {\r
- case OK: \r
- RosettaData<HISTORY> data = historyDF.newData(trans).load(rh.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,historyDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rh);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_HISTORY_ROLE);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getHistoryByNS(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String, int[])\r
- */\r
- @Override\r
- public Result<Void> getHistoryByNS(AuthzTrans trans, HttpServletResponse resp, String ns, int[] yyyymm, final int sort) {\r
- StringBuilder sb = new StringBuilder();\r
- sb.append(GET_HISTORY_NS);\r
- sb.append(' ');\r
- sb.append(ns);\r
- sb.append(" for ");\r
- boolean first = true;\r
- for(int i : yyyymm) {\r
- if(first) {\r
- first = false;\r
- } else {\r
- sb.append(',');\r
- }\r
- sb.append(i);\r
- }\r
- TimeTaken tt = trans.start(sb.toString(), Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<HISTORY> rh = service.getHistoryByNS(trans,ns,yyyymm,sort);\r
- switch(rh.status) {\r
- case OK: \r
- RosettaData<HISTORY> data = historyDF.newData(trans).load(rh.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,historyDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rh);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_HISTORY_NS);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getHistoryByPerm(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String, int[])\r
- */\r
- @Override\r
- public Result<Void> getHistoryByPerm(AuthzTrans trans, HttpServletResponse resp, String perm, int[] yyyymm, final int sort) {\r
- StringBuilder sb = new StringBuilder();\r
- sb.append(GET_HISTORY_PERM);\r
- sb.append(' ');\r
- sb.append(perm);\r
- sb.append(" for ");\r
- boolean first = true;\r
- for(int i : yyyymm) {\r
- if(first) {\r
- first = false;\r
- } else {\r
- sb.append(',');\r
- }\r
- sb.append(i);\r
- }\r
- TimeTaken tt = trans.start(sb.toString(), Env.SUB|Env.ALWAYS);\r
- try {\r
- Result<HISTORY> rh = service.getHistoryByPerm(trans,perm,yyyymm,sort);\r
- switch(rh.status) {\r
- case OK: \r
- RosettaData<HISTORY> data = historyDF.newData(trans).load(rh.value);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,historyDF.getOutType());\r
- return Result.ok();\r
- default:\r
- return Result.err(rh);\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,GET_HISTORY_PERM);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- public final static String CACHE_CLEAR = "cacheClear "; \r
-// public final static String CACHE_VALIDATE = "validateCache";\r
- \r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#cacheClear(org.onap.aaf.authz.env.AuthzTrans, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> cacheClear(AuthzTrans trans, String cname) {\r
- TimeTaken tt = trans.start(CACHE_CLEAR + cname, Env.SUB|Env.ALWAYS);\r
- try {\r
- return service.cacheClear(trans,cname);\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,CACHE_CLEAR);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#cacheClear(org.onap.aaf.authz.env.AuthzTrans, java.lang.String, java.lang.Integer)\r
- */\r
- @Override\r
- public Result<Void> cacheClear(AuthzTrans trans, String cname, String segments) {\r
- TimeTaken tt = trans.start(CACHE_CLEAR + cname + ", segments[" + segments + ']', Env.SUB|Env.ALWAYS);\r
- try {\r
- String[] segs = segments.split("\\s*,\\s*");\r
- int isegs[] = new int[segs.length];\r
- for(int i=0;i<segs.length;++i) {\r
- try {\r
- isegs[i] = Integer.parseInt(segs[i]);\r
- } catch(NumberFormatException nfe) {\r
- isegs[i] = -1;\r
- }\r
- }\r
- return service.cacheClear(trans,cname, isegs);\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,CACHE_CLEAR);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#dbReset(org.onap.aaf.authz.env.AuthzTrans)\r
- */\r
- @Override\r
- public void dbReset(AuthzTrans trans) {\r
- service.dbReset(trans);\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getAPI(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse)\r
- */\r
- public final static String API_REPORT = "apiReport";\r
- @Override\r
- public Result<Void> getAPI(AuthzTrans trans, HttpServletResponse resp, RServlet<AuthzTrans> rservlet) {\r
- TimeTaken tt = trans.start(API_REPORT, Env.SUB);\r
- try {\r
- Api api = new Api();\r
- Api.Route ar;\r
- Method[] meths = AuthzCassServiceImpl.class.getDeclaredMethods();\r
- for(RouteReport rr : rservlet.routeReport()) {\r
- api.getRoute().add(ar = new Api.Route());\r
- ar.setMeth(rr.meth.name());\r
- ar.setPath(rr.path);\r
- ar.setDesc(rr.desc);\r
- ar.getContentType().addAll(rr.contextTypes);\r
- for(Method m : meths) {\r
- ApiDoc ad;\r
- if((ad = m.getAnnotation(ApiDoc.class))!=null &&\r
- rr.meth.equals(ad.method()) &&\r
- rr.path.equals(ad.path())) {\r
- for(String param : ad.params()) {\r
- ar.getParam().add(param);\r
- }\r
- for(String text : ad.text()) {\r
- ar.getComments().add(text);\r
- }\r
- ar.setExpected(ad.expectedCode());\r
- for(int ec : ad.errorCodes()) {\r
- ar.getExplicitErr().add(ec);\r
- }\r
- }\r
- }\r
- }\r
- RosettaData<Api> data = apiDF.newData(trans).load(api);\r
- if(Question.willSpecialLog(trans, trans.user())) {\r
- Question.logEncryptTrace(trans,data.asString());\r
- }\r
-\r
- data.to(resp.getOutputStream());\r
- setContentType(resp,apiDF.getOutType());\r
- return Result.ok();\r
-\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,API_REPORT);\r
- return Result.err(e);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
-\r
- public final static String API_EXAMPLE = "apiExample";\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.facade.AuthzFacade#getAPIExample(org.onap.aaf.authz.env.AuthzTrans, javax.servlet.http.HttpServletResponse, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> getAPIExample(AuthzTrans trans, HttpServletResponse resp, String nameOrContentType, boolean optional) {\r
- TimeTaken tt = trans.start(API_EXAMPLE, Env.SUB);\r
- try {\r
- String content =Examples.print(apiDF.getEnv(), nameOrContentType, optional); \r
- resp.getOutputStream().print(content);\r
- setContentType(resp,content.contains("<?xml")?TYPE.XML:TYPE.JSON);\r
- return Result.ok();\r
- } catch (Exception e) {\r
- trans.error().log(e,IN,API_EXAMPLE);\r
- return Result.err(Status.ERR_NotImplemented,e.getMessage());\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.facade;\r
-\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.service.AuthzService;\r
-\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Data;\r
-\r
-import aaf.v2_0.Approvals;\r
-import aaf.v2_0.Certs;\r
-import aaf.v2_0.Delgs;\r
-import aaf.v2_0.Error;\r
-import aaf.v2_0.History;\r
-import aaf.v2_0.Keys;\r
-import aaf.v2_0.Nss;\r
-import aaf.v2_0.Perms;\r
-import aaf.v2_0.Pkey;\r
-import aaf.v2_0.Request;\r
-import aaf.v2_0.Roles;\r
-import aaf.v2_0.UserRoles;\r
-import aaf.v2_0.Users;\r
-\r
-public class AuthzFacade_2_0 extends AuthzFacadeImpl<\r
- Nss,\r
- Perms,\r
- Pkey,\r
- Roles,\r
- Users,\r
- UserRoles,\r
- Delgs,\r
- Certs,\r
- Keys,\r
- Request,\r
- History,\r
- Error,\r
- Approvals>\r
-{\r
- public AuthzFacade_2_0(AuthzEnv env,\r
- AuthzService<Nss, Perms, Pkey, Roles, Users, UserRoles, Delgs, Certs, Keys, Request, History, Error, Approvals> service,\r
- Data.TYPE type) throws APIException {\r
- super(env, service, type);\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service;\r
-\r
-import java.io.IOException;\r
-import java.net.HttpURLConnection;\r
-import java.security.GeneralSecurityException;\r
-import java.util.ArrayList;\r
-import java.util.EnumSet;\r
-import java.util.List;\r
-import java.util.Properties;\r
-\r
-import org.onap.aaf.authz.cadi.DirectAAFLur;\r
-import org.onap.aaf.authz.cadi.DirectAAFUserPass;\r
-import org.onap.aaf.authz.cadi.DirectCertIdentity;\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.env.AuthzTransFilter;\r
-import org.onap.aaf.authz.facade.AuthzFacadeFactory;\r
-import org.onap.aaf.authz.facade.AuthzFacade_2_0;\r
-import org.onap.aaf.authz.org.OrganizationFactory;\r
-import org.onap.aaf.authz.server.AbsServer;\r
-import org.onap.aaf.authz.service.api.API_Api;\r
-import org.onap.aaf.authz.service.api.API_Approval;\r
-import org.onap.aaf.authz.service.api.API_Creds;\r
-import org.onap.aaf.authz.service.api.API_Delegate;\r
-import org.onap.aaf.authz.service.api.API_History;\r
-import org.onap.aaf.authz.service.api.API_Mgmt;\r
-import org.onap.aaf.authz.service.api.API_NS;\r
-import org.onap.aaf.authz.service.api.API_Perms;\r
-import org.onap.aaf.authz.service.api.API_Roles;\r
-import org.onap.aaf.authz.service.api.API_User;\r
-import org.onap.aaf.authz.service.api.API_UserRole;\r
-import org.onap.aaf.authz.service.mapper.Mapper.API;\r
-import org.onap.aaf.cssa.rserv.HttpMethods;\r
-import org.onap.aaf.dao.CassAccess;\r
-import org.onap.aaf.dao.aaf.cass.CacheInfoDAO;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-\r
-import com.att.aft.dme2.api.DME2Exception;\r
-//import com.att.aft.dme2.api.DME2FilterHolder;\r
-//import com.att.aft.dme2.api.DME2FilterHolder.RequestDispatcherType;\r
-import com.att.aft.dme2.api.DME2Manager;\r
-import com.att.aft.dme2.api.DME2Server;\r
-import com.att.aft.dme2.api.DME2ServerProperties;\r
-import com.att.aft.dme2.api.DME2ServiceHolder;\r
-import com.att.aft.dme2.api.util.DME2FilterHolder;\r
-import com.att.aft.dme2.api.util.DME2FilterHolder.RequestDispatcherType;\r
-import com.att.aft.dme2.api.util.DME2ServletHolder;\r
-import org.onap.aaf.cadi.CadiException;\r
-import org.onap.aaf.cadi.LocatorException;\r
-import org.onap.aaf.cadi.SecuritySetter;\r
-import org.onap.aaf.cadi.aaf.v2_0.AAFTrustChecker;\r
-import org.onap.aaf.cadi.config.Config;\r
-import org.onap.aaf.cadi.config.SecurityInfoC;\r
-import org.onap.aaf.cadi.http.HBasicAuthSS;\r
-import org.onap.aaf.cadi.http.HMangr;\r
-import org.onap.aaf.cadi.http.HX509SS;\r
-import org.onap.aaf.cadi.locator.DME2Locator;\r
-import org.onap.aaf.cadi.taf.basic.BasicHttpTaf;\r
-import org.onap.aaf.inno.env.APIException;\r
-import org.onap.aaf.inno.env.Data;\r
-import org.onap.aaf.inno.env.Env;\r
-import com.datastax.driver.core.Cluster;\r
-\r
-public class AuthAPI extends AbsServer {\r
-\r
- private static final String ORGANIZATION = "Organization.";\r
- private static final String DOMAIN = "openecomp.org";\r
-\r
-// TODO Add Service Metrics\r
-// private Metric serviceMetric;\r
- public final Question question;\r
-// private final SessionFilter sessionFilter;\r
- private AuthzFacade_2_0 facade;\r
- private AuthzFacade_2_0 facade_XML;\r
- private DirectAAFUserPass directAAFUserPass;\r
- \r
- /**\r
- * Construct AuthzAPI with all the Context Supporting Routes that Authz needs\r
- * \r
- * @param env\r
- * @param decryptor \r
- * @throws APIException \r
- */\r
- public AuthAPI(AuthzEnv env) throws Exception {\r
- super(env,"AAF");\r
- \r
- // Set "aaf_url" for peer communication based on Service DME2 URL\r
- env.setProperty(Config.AAF_URL, "https://DME2RESOLVE/"+env.getProperty("DMEServiceName"));\r
- \r
- // Setup Log Names\r
- env.setLog4JNames("log4j.properties","authz","authz|service","audit","init","trace");\r
-\r
- final Cluster cluster = org.onap.aaf.dao.CassAccess.cluster(env,null);\r
-\r
- // jg 4/2015 SessionFilter unneeded... DataStax already deals with Multithreading well\r
- \r
- // Setup Shutdown Hooks for Cluster and Pooled Sessions\r
- Runtime.getRuntime().addShutdownHook(new Thread() {\r
- @Override\r
- public void run() {\r
-// sessionFilter.destroy();\r
- cluster.close();\r
- }\r
- }); \r
- \r
- // Initialize Facade for all uses\r
- AuthzTrans trans = env.newTrans();\r
-\r
- // Initialize Organizations... otherwise, first pass may miss\r
- int org_size = ORGANIZATION.length();\r
- for(String n : env.existingStaticSlotNames()) {\r
- if(n.startsWith(ORGANIZATION)) {\r
- OrganizationFactory.obtain(env, n.substring(org_size));\r
- }\r
- }\r
- \r
- // Need Question for Security purposes (direct User/Authz Query in Filter)\r
- // Start Background Processing\r
- question = new Question(trans, cluster, CassAccess.KEYSPACE, true);\r
- \r
- DirectCertIdentity.set(question.certDAO);\r
- \r
- facade = AuthzFacadeFactory.v2_0(env,trans,Data.TYPE.JSON,question);\r
- facade_XML = AuthzFacadeFactory.v2_0(env,trans,Data.TYPE.XML,question);\r
-\r
- directAAFUserPass = new DirectAAFUserPass(\r
- trans.env(),question,trans.getProperty("Unknown"));\r
-\r
- \r
- // Print results and cleanup\r
- StringBuilder sb = new StringBuilder();\r
- trans.auditTrail(0, sb);\r
- if(sb.length()>0)env.init().log(sb);\r
- trans = null;\r
- sb = null;\r
-\r
- ////////////////////////////////////////////////////////////////////////////\r
- // Time Critical\r
- // These will always be evaluated first\r
- ////////////////////////////////////////////////////////////////////////\r
- API_Creds.timeSensitiveInit(env, this, facade,directAAFUserPass);\r
- API_Perms.timeSensitiveInit(this, facade);\r
- ////////////////////////////////////////////////////////////////////////\r
- // Service APIs\r
- ////////////////////////////////////////////////////////////////////////\r
- API_Creds.init(this, facade);\r
- API_UserRole.init(this, facade);\r
- API_Roles.init(this, facade);\r
- API_Perms.init(this, facade);\r
- API_NS.init(this, facade);\r
- API_User.init(this, facade);\r
- API_Delegate.init(this,facade);\r
- API_Approval.init(this, facade);\r
- API_History.init(this, facade);\r
-\r
- ////////////////////////////////////////////////////////////////////////\r
- // Management APIs\r
- ////////////////////////////////////////////////////////////////////////\r
- // There are several APIs around each concept, and it gets a bit too\r
- // long in this class to create. The initialization of these Management\r
- // APIs have therefore been pushed to StandAlone Classes with static\r
- // init functions\r
- API_Mgmt.init(this, facade);\r
- API_Api.init(this, facade);\r
- \r
- }\r
- \r
- /**\r
- * Setup XML and JSON implementations for each supported Version type\r
- * \r
- * We do this by taking the Code passed in and creating clones of these with the appropriate Facades and properties\r
- * to do Versions and Content switches\r
- * \r
- */\r
- public void route(HttpMethods meth, String path, API api, Code code) throws Exception {\r
- String version = "2.0";\r
- Class<?> respCls = facade.mapper().getClass(api); \r
- if(respCls==null) throw new Exception("Unknown class associated with " + api.getClass().getName() + ' ' + api.name());\r
- String application = applicationJSON(respCls, version);\r
-\r
- route(env,meth,path,code,application,"application/json;version=2.0","*/*");\r
- application = applicationXML(respCls, version);\r
- route(env,meth,path,code.clone(facade_XML,false),application,"text/xml;version=2.0");\r
- }\r
-\r
- /**\r
- * Start up AuthzAPI as DME2 Service\r
- * @param env\r
- * @param props\r
- * @throws Exception \r
- * @throws LocatorException \r
- * @throws CadiException \r
- * @throws NumberFormatException \r
- * @throws IOException \r
- * @throws GeneralSecurityException \r
- * @throws APIException \r
- */\r
- public void startDME2(Properties props) throws Exception {\r
- DME2Manager dme2 = new DME2Manager("AuthzServiceDME2Manager",props);\r
- String s = dme2.getStringProp(Config.AFT_DME2_SSL_INCLUDE_PROTOCOLS,null);\r
- env.init().log("DME2 Service TLS Protocols are set to",(s==null?"DME2 Default":s));\r
- \r
- DME2ServiceHolder svcHolder;\r
- List<DME2ServletHolder> slist = new ArrayList<DME2ServletHolder>();\r
- svcHolder = new DME2ServiceHolder();\r
- String serviceName = env.getProperty("DMEServiceName",null);\r
- if(serviceName!=null) {\r
- svcHolder.setServiceURI(serviceName);\r
- svcHolder.setManager(dme2);\r
- svcHolder.setContext("/");\r
- DME2ServletHolder srvHolder = new DME2ServletHolder(this, new String[]{"/authz","/authn","/mgmt"});\r
- srvHolder.setContextPath("/*");\r
- slist.add(srvHolder);\r
- \r
- EnumSet<RequestDispatcherType> edlist = EnumSet.of(\r
- RequestDispatcherType.REQUEST,\r
- RequestDispatcherType.FORWARD,\r
- RequestDispatcherType.ASYNC\r
- );\r
- \r
- List<DME2FilterHolder> flist = new ArrayList<DME2FilterHolder>();\r
-\r
- // Add DME2 Metrics\r
- // DME2 removed the Metrics Filter in 2.8.8.5\r
- // flist.add(new DME2FilterHolder(new DME2MetricsFilter(serviceName),"/*",edlist));\r
- \r
- // Note: Need CADI to fill out User for AuthTransFilter... so it's first\r
- // Make sure there is no AAF TAF configured for Filters\r
- env.setProperty(Config.AAF_URL,null);\r
-\r
- flist.add(\r
- new DME2FilterHolder(\r
- new AuthzTransFilter(env, null /* no connection to AAF... it is AAF */,\r
- new AAFTrustChecker((Env)env),\r
- new DirectAAFLur(env,question), // Note, this will be assigned by AuthzTransFilter to TrustChecker\r
- new BasicHttpTaf(env, directAAFUserPass,\r
- DOMAIN,Long.parseLong(env.getProperty(Config.AAF_CLEAN_INTERVAL, Config.AAF_CLEAN_INTERVAL_DEF)),\r
- false\r
- ) // Add specialty Direct TAF\r
- ),\r
- "/*", edlist));\r
-\r
- svcHolder.setFilters(flist);\r
- svcHolder.setServletHolders(slist);\r
- \r
- DME2Server dme2svr = dme2.getServer();\r
- \r
- String hostname = env.getProperty("HOSTNAME",null);\r
- if(hostname!=null) {\r
- //dme2svr.setHostname(hostname);\r
- hostname=null;\r
- }\r
- // dme2svr.setGracefulShutdownTimeMs(5000);\r
- \r
- env.init().log("Starting AAF Jetty/DME2 server...");\r
- dme2svr.start();\r
- try {\r
-// if(env.getProperty("NO_REGISTER",null)!=null)\r
- dme2.bindService(svcHolder);\r
- //env.init().log("DME2 is available as HTTPS on port:",dme2svr.getPort());\r
- \r
- // Start CacheInfo Listener\r
- HMangr hman = new HMangr(env, new DME2Locator(env, dme2,"https://DME2RESOLVE/"+serviceName,true /*remove self from cache*/));\r
- SecuritySetter<HttpURLConnection> ss;\r
- \r
-// InetAddress ip = InetAddress.getByName(dme2svr.getHostname());\r
- SecurityInfoC<HttpURLConnection> si = new SecurityInfoC<HttpURLConnection>(env);\r
- String mechID;\r
- if((mechID=env.getProperty(Config.AAF_MECHID))==null) {\r
- String alias = env.getProperty(Config.CADI_ALIAS);\r
- if(alias==null) {\r
- env.init().log(Config.CADI_ALIAS, "is required for AAF Authentication by Certificate. Alternately, set",Config.AAF_MECHID,"and",Config.AAF_MECHPASS);\r
- System.exit(1);\r
- }\r
- ss = new HX509SS(alias,si,true);\r
- env.init().log("X509 Certificate Client configured:", alias);\r
- } else {\r
- String pass = env.getProperty(Config.AAF_MECHPASS);\r
- if(pass==null) {\r
- env.init().log(Config.AAF_MECHPASS, "is required for AAF Authentication by ID/Pass");\r
- System.exit(1);\r
- }\r
- ss = new HBasicAuthSS(mechID,env.decrypt(pass, true),si,true);\r
- env.init().log("BasicAuth (ID/Pass) Client configured.");\r
- }\r
- \r
- //TODO Reenable Cache Update\r
- //CacheInfoDAO.startUpdate(env, hman, ss, dme2svr.getHostname(), dme2svr.getPort());\r
- \r
- while(true) { // Per DME2 Examples...\r
- Thread.sleep(5000);\r
- }\r
- } catch(DME2Exception e) { // Error binding service doesn't seem to stop DME2 or Process\r
- env.init().log(e,"DME2 Initialization Error");\r
- dme2svr.stop();\r
- System.exit(1);\r
- } catch(InterruptedException e) {\r
- env.init().log("AAF Jetty Server interrupted!");\r
- }\r
- } else {\r
- env.init().log("Properties must contain 'DMEServiceName'");\r
- }\r
- }\r
-\r
- public static void main(String[] args) {\r
- setup(AuthAPI.class,"authAPI.props");\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service;\r
-\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.DELETE;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.GET;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.POST;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.PUT;\r
-\r
-import java.io.IOException;\r
-import java.util.ArrayList;\r
-import java.util.Collection;\r
-import java.util.Collections;\r
-import java.util.Comparator;\r
-import java.util.Date;\r
-import java.util.GregorianCalendar;\r
-import java.util.HashMap;\r
-import java.util.HashSet;\r
-import java.util.List;\r
-import java.util.Map;\r
-import java.util.Set;\r
-import java.util.TreeMap;\r
-import java.util.UUID;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-\r
-import org.onap.aaf.authz.common.Define;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.org.Executor;\r
-import org.onap.aaf.authz.org.Organization;\r
-import org.onap.aaf.authz.org.Organization.Expiration;\r
-import org.onap.aaf.authz.org.Organization.Identity;\r
-import org.onap.aaf.authz.org.Organization.Policy;\r
-import org.onap.aaf.authz.service.mapper.Mapper;\r
-import org.onap.aaf.authz.service.mapper.Mapper.API;\r
-import org.onap.aaf.authz.service.validation.Validator;\r
-import org.onap.aaf.cssa.rserv.doc.ApiDoc;\r
-import org.onap.aaf.dao.DAOException;\r
-import org.onap.aaf.dao.aaf.cass.ApprovalDAO;\r
-import org.onap.aaf.dao.aaf.cass.CertDAO;\r
-import org.onap.aaf.dao.aaf.cass.CredDAO;\r
-import org.onap.aaf.dao.aaf.cass.DelegateDAO;\r
-import org.onap.aaf.dao.aaf.cass.FutureDAO;\r
-import org.onap.aaf.dao.aaf.cass.HistoryDAO;\r
-import org.onap.aaf.dao.aaf.cass.Namespace;\r
-import org.onap.aaf.dao.aaf.cass.NsDAO;\r
-import org.onap.aaf.dao.aaf.cass.NsSplit;\r
-import org.onap.aaf.dao.aaf.cass.NsType;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-import org.onap.aaf.dao.aaf.cass.UserRoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.NsDAO.Data;\r
-import org.onap.aaf.dao.aaf.hl.CassExecutor;\r
-import org.onap.aaf.dao.aaf.hl.Function;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-import org.onap.aaf.dao.aaf.hl.Question.Access;\r
-\r
-import org.onap.aaf.cadi.principal.BasicPrincipal;\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.TimeTaken;\r
-import org.onap.aaf.inno.env.util.Chrono;\r
-import org.onap.aaf.inno.env.util.Split;\r
-\r
-import aaf.v2_0.CredRequest;\r
-\r
-/**\r
- * AuthzCassServiceImpl implements AuthzCassService for \r
- * \r
- *\r
- * @param <NSS>\r
- * @param <PERMS>\r
- * @param <PERMKEY>\r
- * @param <ROLES>\r
- * @param <USERS>\r
- * @param <DELGS>\r
- * @param <REQUEST>\r
- * @param <HISTORY>\r
- * @param <ERR>\r
- * @param <APPROVALS>\r
- */\r
-public class AuthzCassServiceImpl <NSS,PERMS,PERMKEY,ROLES,USERS,USERROLES,DELGS,CERTS,KEYS,REQUEST,HISTORY,ERR,APPROVALS>\r
- implements AuthzService <NSS,PERMS,PERMKEY,ROLES,USERS,USERROLES,DELGS,CERTS,KEYS,REQUEST,HISTORY,ERR,APPROVALS> {\r
- \r
- private Mapper <NSS,PERMS,PERMKEY,ROLES,USERS,USERROLES,DELGS,CERTS,KEYS,REQUEST,HISTORY,ERR,APPROVALS> mapper;\r
- @Override\r
- public Mapper <NSS,PERMS,PERMKEY,ROLES,USERS,USERROLES,DELGS,CERTS,KEYS,REQUEST,HISTORY,ERR,APPROVALS> mapper() {return mapper;}\r
- \r
- private static final String ASTERIX = "*";\r
- private static final String CACHE = "cache";\r
-\r
- private final Question ques;\r
- private final Function func;\r
- \r
- public AuthzCassServiceImpl(AuthzTrans trans, Mapper<NSS,PERMS,PERMKEY,ROLES,USERS,USERROLES,DELGS,CERTS,KEYS,REQUEST,HISTORY,ERR,APPROVALS> mapper,Question question) {\r
- this.ques = question;\r
- func = new Function(trans, question);\r
- this.mapper = mapper;\r
- \r
- }\r
-\r
-/***********************************\r
- * NAMESPACE \r
- ***********************************/\r
- /**\r
- * createNS\r
- * @throws DAOException \r
- * @see org.onap.aaf.authz.service.AuthzService#createNS(org.onap.aaf.authz.env.AuthzTrans, java.lang.String, java.lang.String)\r
- */\r
- @ApiDoc( \r
- method = POST, \r
- path = "/authz/ns",\r
- params = {},\r
- expectedCode = 201,\r
- errorCodes = { 403,404,406,409 }, \r
- text = { "Namespace consists of: ",\r
- "<ul><li>name - What you want to call this Namespace</li>",\r
- "<li>responsible(s) - Person(s) who receive Notifications and approves Requests ",\r
- "regarding this Namespace. Companies have Policies as to who may take on ",\r
- "this Responsibility. Separate multiple identities with commas</li>",\r
- "<li>admin(s) - Person(s) who are allowed to make changes on the namespace, ",\r
- "including creating Roles, Permissions and Credentials. Separate multiple ",\r
- "identities with commas</li></ul>",\r
- "Note: Namespaces are dot-delimited (i.e. com.myCompany.myApp) and must be ",\r
- "created with parent credentials (i.e. To create com.myCompany.myApp, you must ",\r
- "be an admin of com.myCompany or com"\r
- }\r
- )\r
- @Override\r
- public Result<Void> createNS(final AuthzTrans trans, REQUEST from, NsType type) {\r
- final Result<Namespace> rnamespace = mapper.ns(trans, from);\r
- final Validator v = new Validator();\r
- if(v.ns(rnamespace).err()) { \r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- final Namespace namespace = rnamespace.value;\r
- final Result<NsDAO.Data> parentNs = ques.deriveNs(trans,namespace.name);\r
- if(parentNs.notOK()) {\r
- return Result.err(parentNs);\r
- }\r
- \r
- if(namespace.name.lastIndexOf('.')<0) { // Root Namespace... Function will check if allowed\r
- return func.createNS(trans, namespace, false);\r
- }\r
- \r
- Result<FutureDAO.Data> fd = mapper.future(trans, NsDAO.TABLE,from,namespace,true, \r
- new Mapper.Memo() {\r
- @Override\r
- public String get() {\r
- return "Create Namespace [" + namespace.name + ']';\r
- }\r
- },\r
- new MayChange() {\r
- private Result<NsDAO.Data> rnd;\r
- @Override\r
- public Result<?> mayChange() {\r
- if(rnd==null) {\r
- rnd = ques.mayUser(trans, trans.user(), parentNs.value,Access.write);\r
- }\r
- return rnd;\r
- }\r
- });\r
- switch(fd.status) {\r
- case OK:\r
- Result<List<Identity>> rfc = func.createFuture(trans, fd.value, namespace.name, trans.user(),parentNs.value, "C");\r
- if(rfc.isOK()) {\r
- return Result.err(Status.ACC_Future, "NS [%s] is saved for future processing",namespace.name);\r
- } else { \r
- return Result.err(rfc);\r
- }\r
- case Status.ACC_Now:\r
- return func.createNS(trans, namespace, false);\r
- default:\r
- return Result.err(fd);\r
- }\r
- }\r
- \r
- @ApiDoc(\r
- method = POST, \r
- path = "/authz/ns/:ns/admin/:id",\r
- params = { "ns|string|true",\r
- "id|string|true" \r
- },\r
- expectedCode = 201,\r
- errorCodes = { 403,404,406,409 }, \r
- text = { "Add an Identity :id to the list of Admins for the Namespace :ns", \r
- "Note: :id must be fully qualified (i.e. ab1234@csp.att.com)" }\r
- )\r
- @Override\r
- public Result<Void> addAdminNS(AuthzTrans trans, String ns, String id) {\r
- return func.addUserRole(trans, id, ns,Question.ADMIN);\r
- }\r
-\r
- @ApiDoc(\r
- method = DELETE, \r
- path = "/authz/ns/:ns/admin/:id",\r
- params = { "ns|string|true",\r
- "id|string|true" \r
- },\r
- expectedCode = 200,\r
- errorCodes = { 403,404 }, \r
- text = { "Remove an Identity :id from the list of Admins for the Namespace :ns",\r
- "Note: :id must be fully qualified (i.e. ab1234@csp.att.com)" }\r
- )\r
- @Override\r
- public Result<Void> delAdminNS(AuthzTrans trans, String ns, String id) {\r
- return func.delAdmin(trans,ns,id);\r
- }\r
-\r
- @ApiDoc(\r
- method = POST, \r
- path = "/authz/ns/:ns/responsible/:id",\r
- params = { "ns|string|true",\r
- "id|string|true" \r
- },\r
- expectedCode = 201,\r
- errorCodes = { 403,404,406,409 }, \r
- text = { "Add an Identity :id to the list of Responsibles for the Namespace :ns",\r
- "Note: :id must be fully qualified (i.e. ab1234@csp.att.com)" }\r
- )\r
- @Override\r
- public Result<Void> addResponsibleNS(AuthzTrans trans, String ns, String id) {\r
- return func.addUserRole(trans,id,ns,Question.OWNER);\r
- }\r
-\r
- @ApiDoc(\r
- method = DELETE, \r
- path = "/authz/ns/:ns/responsible/:id",\r
- params = { "ns|string|true",\r
- "id|string|true" \r
- },\r
- expectedCode = 200,\r
- errorCodes = { 403,404 }, \r
- text = { "Remove an Identity :id to the list of Responsibles for the Namespace :ns",\r
- "Note: :id must be fully qualified (i.e. ab1234@csp.att.com)",\r
- "Note: A namespace must have at least 1 responsible party"\r
- }\r
- )\r
- @Override\r
- public Result<Void> delResponsibleNS(AuthzTrans trans, String ns, String id) {\r
- return func.delOwner(trans,ns,id);\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.service.AuthzService#applyModel(org.onap.aaf.authz.env.AuthzTrans, java.lang.Object)\r
- */\r
- @ApiDoc(\r
- method = POST, \r
- path = "/authz/ns/:ns/attrib/:key/:value",\r
- params = { "ns|string|true",\r
- "key|string|true",\r
- "value|string|true"},\r
- expectedCode = 201,\r
- errorCodes = { 403,404,406,409 }, \r
- text = { \r
- "Create an attribute in the Namespace",\r
- "You must be given direct permission for key by AAF"\r
- }\r
- )\r
- @Override\r
- public Result<Void> createNsAttrib(AuthzTrans trans, String ns, String key, String value) {\r
- TimeTaken tt = trans.start("Create NsAttrib " + ns + ':' + key + ':' + value, Env.SUB);\r
- try {\r
- // Check inputs\r
- final Validator v = new Validator();\r
- if(v.ns(ns).err() ||\r
- v.key(key).err() ||\r
- v.value(value).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- // Check if exists already\r
- Result<List<Data>> rlnsd = ques.nsDAO.read(trans, ns);\r
- if(rlnsd.notOKorIsEmpty()) {\r
- return Result.err(rlnsd);\r
- }\r
- NsDAO.Data nsd = rlnsd.value.get(0);\r
-\r
- // Check for Existence\r
- if(nsd.attrib.get(key)!=null) {\r
- return Result.err(Status.ERR_ConflictAlreadyExists, "NS Property %s:%s exists", ns, key);\r
- }\r
- \r
- // Check if User may put\r
- if(!ques.isGranted(trans, trans.user(), Define.ROOT_NS, Question.ATTRIB, \r
- ":"+trans.org().getDomain()+".*:"+key, Access.write.name())) {\r
- return Result.err(Status.ERR_Denied, "%s may not create NS Attrib [%s:%s]", trans.user(),ns, key);\r
- }\r
-\r
- // Add Attrib\r
- nsd.attrib.put(key, value);\r
- ques.nsDAO.dao().attribAdd(trans,ns,key,value);\r
- return Result.ok();\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- @ApiDoc(\r
- method = GET, \r
- path = "/authz/ns/attrib/:key",\r
- params = { "key|string|true" },\r
- expectedCode = 200,\r
- errorCodes = { 403,404 }, \r
- text = { \r
- "Read Attributes for Namespace"\r
- }\r
- )\r
- @Override\r
- public Result<KEYS> readNsByAttrib(AuthzTrans trans, String key) {\r
- // Check inputs\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("Key",key).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- // May Read\r
- if(!ques.isGranted(trans, trans.user(), Define.ROOT_NS, Question.ATTRIB, \r
- ":"+trans.org().getDomain()+".*:"+key, Question.READ)) {\r
- return Result.err(Status.ERR_Denied,"%s may not read NS by Attrib '%s'",trans.user(),key);\r
- }\r
-\r
- Result<Set<String>> rsd = ques.nsDAO.dao().readNsByAttrib(trans, key);\r
- if(rsd.notOK()) {\r
- return Result.err(rsd);\r
- }\r
- return mapper().keys(rsd.value);\r
- }\r
-\r
-\r
- @ApiDoc(\r
- method = PUT, \r
- path = "/authz/ns/:ns/attrib/:key/:value",\r
- params = { "ns|string|true",\r
- "key|string|true"},\r
- expectedCode = 200,\r
- errorCodes = { 403,404 }, \r
- text = { \r
- "Update Value on an existing attribute in the Namespace",\r
- "You must be given direct permission for key by AAF"\r
- }\r
- )\r
- @Override\r
- public Result<?> updateNsAttrib(AuthzTrans trans, String ns, String key, String value) {\r
- TimeTaken tt = trans.start("Update NsAttrib " + ns + ':' + key + ':' + value, Env.SUB);\r
- try {\r
- // Check inputs\r
- final Validator v = new Validator();\r
- if(v.ns(ns).err() ||\r
- v.key(key).err() ||\r
- v.value(value).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- // Check if exists already (NS must exist)\r
- Result<List<Data>> rlnsd = ques.nsDAO.read(trans, ns);\r
- if(rlnsd.notOKorIsEmpty()) {\r
- return Result.err(rlnsd);\r
- }\r
- NsDAO.Data nsd = rlnsd.value.get(0);\r
-\r
- // Check for Existence\r
- if(nsd.attrib.get(key)==null) {\r
- return Result.err(Status.ERR_NotFound, "NS Property %s:%s exists", ns, key);\r
- }\r
- \r
- // Check if User may put\r
- if(!ques.isGranted(trans, trans.user(), Define.ROOT_NS, Question.ATTRIB, \r
- ":"+trans.org().getDomain()+".*:"+key, Access.write.name())) {\r
- return Result.err(Status.ERR_Denied, "%s may not create NS Attrib [%s:%s]", trans.user(),ns, key);\r
- }\r
-\r
- // Add Attrib\r
- nsd.attrib.put(key, value);\r
-\r
- return ques.nsDAO.update(trans,nsd);\r
- \r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @ApiDoc(\r
- method = DELETE, \r
- path = "/authz/ns/:ns/attrib/:key",\r
- params = { "ns|string|true",\r
- "key|string|true"},\r
- expectedCode = 200,\r
- errorCodes = { 403,404 }, \r
- text = { \r
- "Delete an attribute in the Namespace",\r
- "You must be given direct permission for key by AAF"\r
- }\r
- )\r
- @Override\r
- public Result<Void> deleteNsAttrib(AuthzTrans trans, String ns, String key) {\r
- TimeTaken tt = trans.start("Delete NsAttrib " + ns + ':' + key, Env.SUB);\r
- try {\r
- // Check inputs\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("NS",ns).err() ||\r
- v.nullOrBlank("Key",key).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- // Check if exists already\r
- Result<List<Data>> rlnsd = ques.nsDAO.read(trans, ns);\r
- if(rlnsd.notOKorIsEmpty()) {\r
- return Result.err(rlnsd);\r
- }\r
- NsDAO.Data nsd = rlnsd.value.get(0);\r
-\r
- // Check for Existence\r
- if(nsd.attrib.get(key)==null) {\r
- return Result.err(Status.ERR_NotFound, "NS Property [%s:%s] does not exist", ns, key);\r
- }\r
- \r
- // Check if User may del\r
- if(!ques.isGranted(trans, trans.user(), Define.ROOT_NS, "attrib", ":com.att.*:"+key, Access.write.name())) {\r
- return Result.err(Status.ERR_Denied, "%s may not delete NS Attrib [%s:%s]", trans.user(),ns, key);\r
- }\r
-\r
- // Add Attrib\r
- nsd.attrib.remove(key);\r
- ques.nsDAO.dao().attribRemove(trans,ns,key);\r
- return Result.ok();\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @ApiDoc(\r
- method = GET, \r
- path = "/authz/nss/:id",\r
- params = { "id|string|true" },\r
- expectedCode = 200,\r
- errorCodes = { 404,406 }, \r
- text = { \r
- "Lists the Admin(s), Responsible Party(s), Role(s), Permission(s)",\r
- "Credential(s) and Expiration of Credential(s) in Namespace :id",\r
- }\r
- )\r
- @Override\r
- public Result<NSS> getNSbyName(AuthzTrans trans, String ns) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("NS", ns).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- Result<List<NsDAO.Data>> rlnd = ques.nsDAO.read(trans, ns);\r
- if(rlnd.isOK()) {\r
- if(rlnd.isEmpty()) {\r
- return Result.err(Status.ERR_NotFound, "No data found for %s",ns);\r
- }\r
- Result<NsDAO.Data> rnd = ques.mayUser(trans, trans.user(), rlnd.value.get(0), Access.read);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd); \r
- }\r
- \r
- \r
- Namespace namespace = new Namespace(rnd.value);\r
- Result<List<String>> rd = func.getOwners(trans, namespace.name, false);\r
- if(rd.isOK()) {\r
- namespace.owner = rd.value;\r
- }\r
- rd = func.getAdmins(trans, namespace.name, false);\r
- if(rd.isOK()) {\r
- namespace.admin = rd.value;\r
- }\r
- \r
- NSS nss = mapper.newInstance(API.NSS);\r
- return mapper.nss(trans, namespace, nss);\r
- } else {\r
- return Result.err(rlnd);\r
- }\r
- }\r
-\r
- @ApiDoc(\r
- method = GET, \r
- path = "/authz/nss/admin/:id",\r
- params = { "id|string|true" },\r
- expectedCode = 200,\r
- errorCodes = { 403,404 }, \r
- text = { "Lists all Namespaces where Identity :id is an Admin", \r
- "Note: :id must be fully qualified (i.e. ab1234@csp.att.com)" \r
- }\r
- )\r
- @Override\r
- public Result<NSS> getNSbyAdmin(AuthzTrans trans, String user, boolean full) {\r
- final Validator v = new Validator();\r
- if (v.nullOrBlank("User", user).err()) {\r
- return Result.err(Status.ERR_BadData, v.errs());\r
- }\r
- \r
- Result<Collection<Namespace>> rn = loadNamepace(trans, user, ".admin", full);\r
- if(rn.notOK()) {\r
- return Result.err(rn);\r
- }\r
- if (rn.isEmpty()) {\r
- return Result.err(Status.ERR_NotFound, "[%s] is not an admin for any namespaces",user); \r
- }\r
- NSS nss = mapper.newInstance(API.NSS);\r
- // Note: "loadNamespace" already validates view of Namespace\r
- return mapper.nss(trans, rn.value, nss);\r
-\r
- }\r
-\r
- @ApiDoc(\r
- method = GET, \r
- path = "/authz/nss/either/:id",\r
- params = { "id|string|true" },\r
- expectedCode = 200,\r
- errorCodes = { 403,404 }, \r
- text = { "Lists all Namespaces where Identity :id is either an Admin or an Owner", \r
- "Note: :id must be fully qualified (i.e. ab1234@csp.att.com)" \r
- }\r
- )\r
- @Override\r
- public Result<NSS> getNSbyEither(AuthzTrans trans, String user, boolean full) {\r
- final Validator v = new Validator();\r
- if (v.nullOrBlank("User", user).err()) {\r
- return Result.err(Status.ERR_BadData, v.errs());\r
- }\r
- \r
- Result<Collection<Namespace>> rn = loadNamepace(trans, user, null, full);\r
- if(rn.notOK()) {\r
- return Result.err(rn);\r
- }\r
- if (rn.isEmpty()) {\r
- return Result.err(Status.ERR_NotFound, "[%s] is not an admin or owner for any namespaces",user); \r
- }\r
- NSS nss = mapper.newInstance(API.NSS);\r
- // Note: "loadNamespace" already validates view of Namespace\r
- return mapper.nss(trans, rn.value, nss);\r
- }\r
-\r
- private Result<Collection<Namespace>> loadNamepace(AuthzTrans trans, String user, String endsWith, boolean full) {\r
- Result<List<UserRoleDAO.Data>> urd = ques.userRoleDAO.readByUser(trans, user);\r
- if(urd.notOKorIsEmpty()) {\r
- return Result.err(urd);\r
- }\r
- Map<String, Namespace> lm = new HashMap<String,Namespace>();\r
- Map<String, Namespace> other = full || endsWith==null?null:new TreeMap<String,Namespace>();\r
- for(UserRoleDAO.Data urdd : urd.value) {\r
- if(full) {\r
- if(endsWith==null || urdd.role.endsWith(endsWith)) {\r
- RoleDAO.Data rd = RoleDAO.Data.decode(urdd);\r
- Result<NsDAO.Data> nsd = ques.mayUser(trans, user, rd, Access.read);\r
- if(nsd.isOK()) {\r
- Namespace namespace = lm.get(nsd.value.name);\r
- if(namespace==null) {\r
- namespace = new Namespace(nsd.value);\r
- lm.put(namespace.name,namespace);\r
- }\r
- Result<List<String>> rls = func.getAdmins(trans, namespace.name, false);\r
- if(rls.isOK()) {\r
- namespace.admin=rls.value;\r
- }\r
- \r
- rls = func.getOwners(trans, namespace.name, false);\r
- if(rls.isOK()) {\r
- namespace.owner=rls.value;\r
- }\r
- }\r
- }\r
- } else { // Shortened version. Only Namespace Info available from Role.\r
- if(Question.ADMIN.equals(urdd.rname) || Question.OWNER.equals(urdd.rname)) {\r
- RoleDAO.Data rd = RoleDAO.Data.decode(urdd);\r
- Result<NsDAO.Data> nsd = ques.mayUser(trans, user, rd, Access.read);\r
- if(nsd.isOK()) {\r
- Namespace namespace = lm.get(nsd.value.name);\r
- if(namespace==null) {\r
- if(other!=null) {\r
- namespace = other.remove(nsd.value.name);\r
- }\r
- if(namespace==null) {\r
- namespace = new Namespace(nsd.value);\r
- namespace.admin=new ArrayList<String>();\r
- namespace.owner=new ArrayList<String>();\r
- }\r
- if(endsWith==null || urdd.role.endsWith(endsWith)) {\r
- lm.put(namespace.name,namespace);\r
- } else { \r
- other.put(namespace.name,namespace);\r
- }\r
- }\r
- if(Question.OWNER.equals(urdd.rname)) {\r
- namespace.owner.add(urdd.user);\r
- } else {\r
- namespace.admin.add(urdd.user);\r
- }\r
- }\r
- }\r
- }\r
- }\r
- return Result.ok(lm.values());\r
- }\r
-\r
- @ApiDoc(\r
- method = GET, \r
- path = "/authz/nss/responsible/:id",\r
- params = { "id|string|true" },\r
- expectedCode = 200,\r
- errorCodes = { 403,404 }, \r
- text = { "Lists all Namespaces where Identity :id is a Responsible Party", \r
- "Note: :id must be fully qualified (i.e. ab1234@csp.att.com)"\r
- }\r
- )\r
- @Override\r
- public Result<NSS> getNSbyResponsible(AuthzTrans trans, String user, boolean full) {\r
- final Validator v = new Validator();\r
- if (v.nullOrBlank("User", user).err()) {\r
- return Result.err(Status.ERR_BadData, v.errs());\r
- }\r
- Result<Collection<Namespace>> rn = loadNamepace(trans, user, ".owner",full);\r
- if(rn.notOK()) {\r
- return Result.err(rn);\r
- }\r
- if (rn.isEmpty()) {\r
- return Result.err(Status.ERR_NotFound, "[%s] is not an owner for any namespaces",user); \r
- }\r
- NSS nss = mapper.newInstance(API.NSS);\r
- // Note: "loadNamespace" prevalidates\r
- return mapper.nss(trans, rn.value, nss);\r
- }\r
- \r
- @ApiDoc(\r
- method = GET, \r
- path = "/authz/nss/children/:id",\r
- params = { "id|string|true" },\r
- expectedCode = 200,\r
- errorCodes = { 403,404 }, \r
- text = { "Lists all Child Namespaces of Namespace :id", \r
- "Note: This is not a cached read"\r
- }\r
- )\r
- @Override\r
- public Result<NSS> getNSsChildren(AuthzTrans trans, String parent) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("NS", parent).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- Result<NsDAO.Data> rnd = ques.deriveNs(trans, parent);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd);\r
- }\r
- rnd = ques.mayUser(trans, trans.user(), rnd.value, Access.read);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd); \r
- }\r
-\r
- Set<Namespace> lm = new HashSet<Namespace>();\r
- Result<List<NsDAO.Data>> rlnd = ques.nsDAO.dao().getChildren(trans, parent);\r
- if(rlnd.isOK()) {\r
- if(rlnd.isEmpty()) {\r
- return Result.err(Status.ERR_NotFound, "No data found for %s",parent);\r
- }\r
- for(NsDAO.Data ndd : rlnd.value) {\r
- Namespace namespace = new Namespace(ndd);\r
- Result<List<String>> rls = func.getAdmins(trans, namespace.name, false);\r
- if(rls.isOK()) {\r
- namespace.admin=rls.value;\r
- }\r
- \r
- rls = func.getOwners(trans, namespace.name, false);\r
- if(rls.isOK()) {\r
- namespace.owner=rls.value;\r
- }\r
-\r
- lm.add(namespace);\r
- }\r
- NSS nss = mapper.newInstance(API.NSS);\r
- return mapper.nss(trans,lm, nss);\r
- } else {\r
- return Result.err(rlnd);\r
- }\r
- }\r
-\r
-\r
- @ApiDoc(\r
- method = PUT, \r
- path = "/authz/ns",\r
- params = {},\r
- expectedCode = 200,\r
- errorCodes = { 403,404,406 }, \r
- text = { "Replace the Current Description of a Namespace with a new one"\r
- }\r
- )\r
- @Override\r
- public Result<Void> updateNsDescription(AuthzTrans trans, REQUEST from) {\r
- final Result<Namespace> nsd = mapper.ns(trans, from);\r
- final Validator v = new Validator();\r
- if(v.ns(nsd).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- if(v.nullOrBlank("description", nsd.value.description).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- Namespace namespace = nsd.value;\r
- Result<List<NsDAO.Data>> rlnd = ques.nsDAO.read(trans, namespace.name);\r
- \r
- if(rlnd.notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_NotFound, "Namespace [%s] does not exist",namespace.name);\r
- }\r
- \r
- if (ques.mayUser(trans, trans.user(), rlnd.value.get(0), Access.write).notOK()) {\r
- return Result.err(Status.ERR_Denied, "You do not have approval to change %s",namespace.name);\r
- }\r
-\r
- Result<Void> rdr = ques.nsDAO.dao().addDescription(trans, namespace.name, namespace.description);\r
- if(rdr.isOK()) {\r
- return Result.ok();\r
- } else {\r
- return Result.err(rdr);\r
- }\r
- }\r
- \r
- /**\r
- * deleteNS\r
- * @throws DAOException \r
- * @see org.onap.aaf.authz.service.AuthzService#deleteNS(org.onap.aaf.authz.env.AuthzTrans, java.lang.String, java.lang.String)\r
- */\r
- @ApiDoc(\r
- method = DELETE, \r
- path = "/authz/ns/:ns",\r
- params = { "ns|string|true" },\r
- expectedCode = 200,\r
- errorCodes = { 403,404,424 }, \r
- text = { "Delete the Namespace :ns. Namespaces cannot normally be deleted when there ",\r
- "are still credentials associated with them, but they can be deleted by setting ",\r
- "the \"force\" property. To do this: Add 'force=true' as a query parameter",\r
- "<p>WARNING: Using force will delete all credentials attached to this namespace. Use with care.</p>"\r
- + "if the \"force\" property is set to 'force=move', then Permissions and Roles are not deleted,"\r
- + "but are retained, and assigned to the Parent Namespace. 'force=move' is not permitted "\r
- + "at or below Application Scope"\r
- }\r
- )\r
- @Override\r
- public Result<Void> deleteNS(AuthzTrans trans, String ns) {\r
- return func.deleteNS(trans, ns);\r
- }\r
-\r
-\r
-/***********************************\r
- * PERM \r
- ***********************************/\r
-\r
- /*\r
- * (non-Javadoc)\r
- * @see org.onap.aaf.authz.service.AuthzService#createOrUpdatePerm(org.onap.aaf.authz.env.AuthzTrans, java.lang.Object, boolean, java.lang.String, java.lang.String, java.lang.String, java.util.List, java.util.List)\r
- */\r
- @ApiDoc( \r
- method = POST, \r
- path = "/authz/perm",\r
- params = {},\r
- expectedCode = 201,\r
- errorCodes = {403,404,406,409}, \r
- text = { "Permission consists of:",\r
- "<ul><li>type - a Namespace qualified identifier specifying what kind of resource "\r
- + "is being protected</li>",\r
- "<li>instance - a key, possibly multi-dimensional, that identifies a specific "\r
- + " instance of the type</li>",\r
- "<li>action - what kind of action is allowed</li></ul>",\r
- "Note: instance and action can be an *"\r
- }\r
- )\r
- @Override\r
- public Result<Void> createPerm(final AuthzTrans trans,REQUEST rreq) { \r
- final Result<PermDAO.Data> newPd = mapper.perm(trans, rreq);\r
- final Validator v = new Validator(trans);\r
- if(v.perm(newPd).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- Result<FutureDAO.Data> fd = mapper.future(trans, PermDAO.TABLE, rreq, newPd.value,false,\r
- new Mapper.Memo() {\r
- @Override\r
- public String get() {\r
- return "Create Permission [" + \r
- newPd.value.fullType() + '|' + \r
- newPd.value.instance + '|' + \r
- newPd.value.action + ']';\r
- }\r
- },\r
- new MayChange() {\r
- private Result<NsDAO.Data> nsd;\r
- @Override\r
- public Result<?> mayChange() {\r
- if(nsd==null) {\r
- nsd = ques.mayUser(trans, trans.user(), newPd.value, Access.write);\r
- }\r
- return nsd;\r
- }\r
- });\r
- Result<List<NsDAO.Data>> nsr = ques.nsDAO.read(trans, newPd.value.ns);\r
- if(nsr.notOKorIsEmpty()) {\r
- return Result.err(nsr);\r
- }\r
- switch(fd.status) {\r
- case OK:\r
- Result<List<Identity>> rfc = func.createFuture(trans,fd.value, \r
- newPd.value.fullType() + '|' + newPd.value.instance + '|' + newPd.value.action,\r
- trans.user(),\r
- nsr.value.get(0),\r
- "C");\r
- if(rfc.isOK()) {\r
- return Result.err(Status.ACC_Future, "Perm [%s.%s|%s|%s] is saved for future processing",\r
- newPd.value.ns,\r
- newPd.value.type,\r
- newPd.value.instance,\r
- newPd.value.action);\r
- } else {\r
- return Result.err(rfc);\r
- }\r
- case Status.ACC_Now:\r
- return func.createPerm(trans, newPd.value, true);\r
- default:\r
- return Result.err(fd);\r
- } \r
- }\r
-\r
- @ApiDoc( \r
- method = GET, \r
- path = "/authz/perms/:type",\r
- params = {"type|string|true"},\r
- expectedCode = 200,\r
- errorCodes = { 404,406 }, \r
- text = { "List All Permissions that match the :type element of the key" }\r
- )\r
- @Override\r
- public Result<PERMS> getPermsByType(AuthzTrans trans, final String permType) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("PermType", permType).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- Result<List<PermDAO.Data>> rlpd = ques.getPermsByType(trans, permType);\r
- if(rlpd.notOK()) {\r
- return Result.err(rlpd);\r
- }\r
-\r
-// We don't have instance & action for mayUserView... do we want to loop through all returned here as well as in mapper?\r
-// Result<NsDAO.Data> r;\r
-// if((r = ques.mayUserViewPerm(trans, trans.user(), permType)).notOK())return Result.err(r);\r
- \r
- PERMS perms = mapper.newInstance(API.PERMS);\r
- if(!rlpd.isEmpty()) {\r
- // Note: Mapper will restrict what can be viewed\r
- return mapper.perms(trans, rlpd.value, perms, true);\r
- }\r
- return Result.ok(perms);\r
- }\r
- \r
- @ApiDoc( \r
- method = GET, \r
- path = "/authz/perms/:type/:instance/:action",\r
- params = {"type|string|true",\r
- "instance|string|true",\r
- "action|string|true"},\r
- expectedCode = 200,\r
- errorCodes = { 404,406 }, \r
- text = { "List Permissions that match key; :type, :instance and :action" }\r
- )\r
- @Override\r
- public Result<PERMS> getPermsByName(AuthzTrans trans, String type, String instance, String action) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("PermType", type).err()\r
- || v.nullOrBlank("PermInstance", instance).err()\r
- || v.nullOrBlank("PermAction", action).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- Result<List<PermDAO.Data>> rlpd = ques.getPermsByName(trans, type, instance, action);\r
- if(rlpd.notOK()) {\r
- return Result.err(rlpd);\r
- }\r
-\r
- PERMS perms = mapper.newInstance(API.PERMS);\r
- if(!rlpd.isEmpty()) {\r
- // Note: Mapper will restrict what can be viewed\r
- return mapper.perms(trans, rlpd.value, perms, true);\r
- }\r
- return Result.ok(perms);\r
- }\r
-\r
- @ApiDoc( \r
- method = GET, \r
- path = "/authz/perms/user/:user",\r
- params = {"user|string|true"},\r
- expectedCode = 200,\r
- errorCodes = { 404,406 }, \r
- text = { "List All Permissions that match user :user",\r
- "<p>'user' must be expressed as full identity (ex: id@full.domain.com)</p>"}\r
- )\r
- @Override\r
- public Result<PERMS> getPermsByUser(AuthzTrans trans, String user) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("User", user).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- Result<List<PermDAO.Data>> rlpd = ques.getPermsByUser(trans, user, trans.forceRequested());\r
- if(rlpd.notOK()) {\r
- return Result.err(rlpd);\r
- }\r
- \r
- PERMS perms = mapper.newInstance(API.PERMS);\r
- \r
- if(rlpd.isEmpty()) {\r
- return Result.ok(perms);\r
- }\r
- // Note: Mapper will restrict what can be viewed\r
- // if user is the same as that which is looked up, no filtering is required\r
- return mapper.perms(trans, rlpd.value, \r
- perms, \r
- !user.equals(trans.user()));\r
- }\r
- \r
- @ApiDoc( \r
- method = POST, \r
- path = "/authz/perms/user/:user",\r
- params = {"user|string|true"},\r
- expectedCode = 200,\r
- errorCodes = { 404,406 }, \r
- text = { "List All Permissions that match user :user",\r
- "<p>'user' must be expressed as full identity (ex: id@full.domain.com)</p>",\r
- "",\r
- "Present Queries as one or more Permissions (see ContentType Links below for format).",\r
- "",\r
- "If the Caller is Granted this specific Permission, and the Permission is valid",\r
- " for the User, it will be included in response Permissions, along with",\r
- " all the normal permissions on the 'GET' version of this call. If it is not",\r
- " valid, or Caller does not have permission to see, it will be removed from the list",\r
- "",\r
- " *Note: This design allows you to make one call for all expected permissions",\r
- " The permission to be included MUST be:",\r
- " <user namespace>.access|:<ns|role|perm>[:key]|<create|read|write>",\r
- " examples:",\r
- " com.att.myns.access|:ns|write",\r
- " com.att.myns.access|:role:myrole|create",\r
- " com.att.myns.access|:perm:mytype:myinstance:myaction|read",\r
- ""\r
- }\r
- )\r
- @Override\r
- public Result<PERMS> getPermsByUser(AuthzTrans trans, PERMS _perms, String user) {\r
- PERMS perms = _perms;\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("User", user).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- //////////////\r
- Result<List<PermDAO.Data>> rlpd = ques.getPermsByUser(trans, user,trans.forceRequested());\r
- if(rlpd.notOK()) {\r
- return Result.err(rlpd);\r
- }\r
- \r
- /*//TODO \r
- 1) See if allowed to query\r
- 2) See if User is allowed\r
- */\r
- Result<List<PermDAO.Data>> in = mapper.perms(trans, perms);\r
- if(in.isOKhasData()) {\r
- List<PermDAO.Data> out = rlpd.value;\r
- boolean ok;\r
- for(PermDAO.Data pdd : in.value) {\r
- ok = false;\r
- if("access".equals(pdd.type)) {\r
- Access access = Access.valueOf(pdd.action);\r
- String[] mdkey = Split.splitTrim(':',pdd.instance);\r
- if(mdkey.length>1) {\r
- String type = mdkey[1];\r
- if("role".equals(type)) {\r
- if(mdkey.length>2) {\r
- RoleDAO.Data rdd = new RoleDAO.Data();\r
- rdd.ns=pdd.ns;\r
- rdd.name=mdkey[2];\r
- ok = ques.mayUser(trans, trans.user(), rdd, Access.read).isOK() && ques.mayUser(trans, user, rdd , access).isOK();\r
- }\r
- } else if("perm".equals(type)) {\r
- if(mdkey.length>4) { // also need instance/action\r
- PermDAO.Data p = new PermDAO.Data();\r
- p.ns=pdd.ns;\r
- p.type=mdkey[2];\r
- p.instance=mdkey[3];\r
- p.action=mdkey[4];\r
- ok = ques.mayUser(trans, trans.user(), p, Access.read).isOK() && ques.mayUser(trans, user, p , access).isOK();\r
- }\r
- } else if("ns".equals(type)) {\r
- NsDAO.Data ndd = new NsDAO.Data();\r
- ndd.name=pdd.ns;\r
- ok = ques.mayUser(trans, trans.user(), ndd, Access.read).isOK() && ques.mayUser(trans, user, ndd , access).isOK();\r
- }\r
- }\r
- }\r
- if(ok) {\r
- out.add(pdd);\r
- }\r
- }\r
- } \r
- \r
- perms = mapper.newInstance(API.PERMS);\r
- if(rlpd.isEmpty()) {\r
- return Result.ok(perms);\r
- }\r
- // Note: Mapper will restrict what can be viewed\r
- // if user is the same as that which is looked up, no filtering is required\r
- return mapper.perms(trans, rlpd.value, \r
- perms, \r
- !user.equals(trans.user()));\r
- }\r
- \r
- @ApiDoc( \r
- method = GET, \r
- path = "/authz/perms/role/:role",\r
- params = {"role|string|true"},\r
- expectedCode = 200,\r
- errorCodes = { 404,406 }, \r
- text = { "List All Permissions that are granted to :role" }\r
- )\r
- @Override\r
- public Result<PERMS> getPermsByRole(AuthzTrans trans,String role) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("Role", role).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans, ques,role);\r
- if(rrdd.notOK()) {\r
- return Result.err(rrdd);\r
- }\r
-\r
- Result<NsDAO.Data> r = ques.mayUser(trans, trans.user(), rrdd.value, Access.read);\r
- if(r.notOK()) {\r
- return Result.err(r);\r
- }\r
-\r
- PERMS perms = mapper.newInstance(API.PERMS);\r
-\r
- Result<List<PermDAO.Data>> rlpd = ques.getPermsByRole(trans, role, trans.forceRequested());\r
- if(rlpd.isOKhasData()) {\r
- // Note: Mapper will restrict what can be viewed\r
- return mapper.perms(trans, rlpd.value, perms, true);\r
- }\r
- return Result.ok(perms);\r
- }\r
-\r
- @ApiDoc( \r
- method = GET, \r
- path = "/authz/perms/ns/:ns",\r
- params = {"ns|string|true"},\r
- expectedCode = 200,\r
- errorCodes = { 404,406 }, \r
- text = { "List All Permissions that are in Namespace :ns" }\r
- )\r
- @Override\r
- public Result<PERMS> getPermsByNS(AuthzTrans trans,String ns) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("NS", ns).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- Result<NsDAO.Data> rnd = ques.deriveNs(trans, ns);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd);\r
- }\r
-\r
- rnd = ques.mayUser(trans, trans.user(), rnd.value, Access.read);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd); \r
- }\r
- \r
- Result<List<PermDAO.Data>> rlpd = ques.permDAO.readNS(trans, ns);\r
- if(rlpd.notOK()) {\r
- return Result.err(rlpd);\r
- }\r
-\r
- PERMS perms = mapper.newInstance(API.PERMS);\r
- if(!rlpd.isEmpty()) {\r
- // Note: Mapper will restrict what can be viewed\r
- return mapper.perms(trans, rlpd.value,perms, true);\r
- }\r
- return Result.ok(perms);\r
- }\r
- \r
- @ApiDoc( \r
- method = PUT, \r
- path = "/authz/perm/:type/:instance/:action",\r
- params = {"type|string|true",\r
- "instance|string|true",\r
- "action|string|true"},\r
- expectedCode = 200,\r
- errorCodes = { 404,406, 409 }, \r
- text = { "Rename the Permission referenced by :type :instance :action, and "\r
- + "rename (copy/delete) to the Permission described in PermRequest" }\r
- )\r
- @Override\r
- public Result<Void> renamePerm(final AuthzTrans trans,REQUEST rreq, String origType, String origInstance, String origAction) {\r
- final Result<PermDAO.Data> newPd = mapper.perm(trans, rreq);\r
- final Validator v = new Validator(trans);\r
- if(v.perm(newPd).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- if (ques.mayUser(trans, trans.user(), newPd.value,Access.write).notOK()) {\r
- return Result.err(Status.ERR_Denied, "You do not have approval to change Permission [%s.%s|%s|%s]",\r
- newPd.value.ns,newPd.value.type,newPd.value.instance,newPd.value.action);\r
- }\r
- \r
- Result<NsSplit> nss = ques.deriveNsSplit(trans, origType);\r
- Result<List<PermDAO.Data>> origRlpd = ques.permDAO.read(trans, nss.value.ns, nss.value.name, origInstance, origAction); \r
- \r
- if(origRlpd.notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_PermissionNotFound, \r
- "Permission [%s|%s|%s] does not exist",\r
- origType,origInstance,origAction);\r
- }\r
- \r
- PermDAO.Data origPd = origRlpd.value.get(0);\r
-\r
- if (!origPd.ns.equals(newPd.value.ns)) {\r
- return Result.err(Status.ERR_Denied, "Cannot change namespace with rename command. " +\r
- "<new type> must start with [" + origPd.ns + "]");\r
- }\r
- \r
- if ( origPd.type.equals(newPd.value.type) && \r
- origPd.action.equals(newPd.value.action) && \r
- origPd.instance.equals(newPd.value.instance) ) {\r
- return Result.err(Status.ERR_ConflictAlreadyExists, "New Permission must be different than original permission");\r
- }\r
- \r
- Set<String> origRoles = origPd.roles(false);\r
- if (!origRoles.isEmpty()) {\r
- Set<String> roles = newPd.value.roles(true);\r
- for (String role : origPd.roles) {\r
- roles.add(role); \r
- }\r
- } \r
- \r
- newPd.value.description = origPd.description;\r
- \r
- Result<Void> rv = null;\r
- \r
- rv = func.createPerm(trans, newPd.value, false);\r
- if (rv.isOK()) {\r
- rv = func.deletePerm(trans, origPd, true, false);\r
- }\r
- return rv;\r
- }\r
- \r
- @ApiDoc( \r
- method = PUT, \r
- path = "/authz/perm",\r
- params = {},\r
- expectedCode = 200,\r
- errorCodes = { 404,406 }, \r
- text = { "Add Description Data to Perm" }\r
- )\r
- @Override\r
- public Result<Void> updatePermDescription(AuthzTrans trans, REQUEST from) {\r
- final Result<PermDAO.Data> pd = mapper.perm(trans, from);\r
- final Validator v = new Validator(trans);\r
- if(v.perm(pd).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- if(v.nullOrBlank("description", pd.value.description).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- final PermDAO.Data perm = pd.value;\r
- if(ques.permDAO.read(trans, perm.ns, perm.type, perm.instance,perm.action).notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_NotFound, "Permission [%s.%s|%s|%s] does not exist",\r
- perm.ns,perm.type,perm.instance,perm.action);\r
- }\r
-\r
- if (ques.mayUser(trans, trans.user(), perm, Access.write).notOK()) {\r
- return Result.err(Status.ERR_Denied, "You do not have approval to change Permission [%s.%s|%s|%s]",\r
- perm.ns,perm.type,perm.instance,perm.action);\r
- }\r
-\r
- Result<List<NsDAO.Data>> nsr = ques.nsDAO.read(trans, pd.value.ns);\r
- if(nsr.notOKorIsEmpty()) {\r
- return Result.err(nsr);\r
- }\r
-\r
- Result<Void> rdr = ques.permDAO.addDescription(trans, perm.ns, perm.type, perm.instance,\r
- perm.action, perm.description);\r
- if(rdr.isOK()) {\r
- return Result.ok();\r
- } else {\r
- return Result.err(rdr);\r
- }\r
-\r
- }\r
- \r
- @ApiDoc(\r
- method = PUT,\r
- path = "/authz/role/perm",\r
- params = {},\r
- expectedCode = 201,\r
- errorCodes = {403,404,406,409},\r
- text = { "Set a permission's roles to roles given" }\r
- )\r
-\r
- @Override\r
- public Result<Void> resetPermRoles(final AuthzTrans trans, REQUEST rreq) {\r
- final Result<PermDAO.Data> updt = mapper.permFromRPRequest(trans, rreq);\r
- if(updt.notOKorIsEmpty()) {\r
- return Result.err(updt);\r
- }\r
-\r
- final Validator v = new Validator(trans);\r
- if(v.perm(updt).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- Result<NsDAO.Data> nsd = ques.mayUser(trans, trans.user(), updt.value, Access.write);\r
- if (nsd.notOK()) {\r
- return Result.err(nsd);\r
- }\r
-\r
- // Read full set to get CURRENT values\r
- Result<List<PermDAO.Data>> rcurr = ques.permDAO.read(trans, \r
- updt.value.ns, \r
- updt.value.type, \r
- updt.value.instance, \r
- updt.value.action);\r
- \r
- if(rcurr.notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_PermissionNotFound, \r
- "Permission [%s.%s|%s|%s] does not exist",\r
- updt.value.ns,updt.value.type,updt.value.instance,updt.value.action);\r
- }\r
- \r
- // Create a set of Update Roles, which are in Internal Format\r
- Set<String> updtRoles = new HashSet<String>();\r
- Result<NsSplit> nss;\r
- for(String role : updt.value.roles(false)) {\r
- nss = ques.deriveNsSplit(trans, role);\r
- if(nss.isOK()) {\r
- updtRoles.add(nss.value.ns + '|' + nss.value.name);\r
- } else {\r
- trans.error().log(nss.errorString());\r
- }\r
- }\r
-\r
- Result<Void> rv = null;\r
- \r
- for(PermDAO.Data curr : rcurr.value) {\r
- Set<String> currRoles = curr.roles(false);\r
- // must add roles to this perm, and add this perm to each role \r
- // in the update, but not in the current \r
- for (String role : updtRoles) {\r
- if (!currRoles.contains(role)) {\r
- Result<RoleDAO.Data> key = RoleDAO.Data.decode(trans, ques, role);\r
- if(key.isOKhasData()) {\r
- Result<List<RoleDAO.Data>> rrd = ques.roleDAO.read(trans, key.value);\r
- if(rrd.isOKhasData()) {\r
- for(RoleDAO.Data r : rrd.value) {\r
- rv = func.addPermToRole(trans, r, curr, false);\r
- if (rv.notOK() && rv.status!=Result.ERR_ConflictAlreadyExists) {\r
- return Result.err(rv);\r
- }\r
- }\r
- } else {\r
- return Result.err(rrd);\r
- }\r
- }\r
- }\r
- }\r
- // similarly, must delete roles from this perm, and delete this perm from each role\r
- // in the update, but not in the current\r
- for (String role : currRoles) {\r
- if (!updtRoles.contains(role)) {\r
- Result<RoleDAO.Data> key = RoleDAO.Data.decode(trans, ques, role);\r
- if(key.isOKhasData()) {\r
- Result<List<RoleDAO.Data>> rdd = ques.roleDAO.read(trans, key.value);\r
- if(rdd.isOKhasData()) {\r
- for(RoleDAO.Data r : rdd.value) {\r
- rv = func.delPermFromRole(trans, r, curr, true);\r
- if (rv.notOK() && rv.status!=Status.ERR_PermissionNotFound) {\r
- return Result.err(rv);\r
- }\r
- }\r
- }\r
- }\r
- }\r
- } \r
- } \r
- return rv==null?Result.ok():rv; \r
- }\r
- \r
- @ApiDoc( \r
- method = DELETE,\r
- path = "/authz/perm",\r
- params = {},\r
- expectedCode = 200,\r
- errorCodes = { 404,406 }, \r
- text = { "Delete the Permission referenced by PermKey.",\r
- "You cannot normally delete a permission which is still granted to roles,",\r
- "however the \"force\" property allows you to do just that. To do this: Add",\r
- "'force=true' as a query parameter.",\r
- "<p>WARNING: Using force will ungrant this permission from all roles. Use with care.</p>" }\r
- )\r
- @Override\r
- public Result<Void> deletePerm(final AuthzTrans trans, REQUEST from) {\r
- Result<PermDAO.Data> pd = mapper.perm(trans, from);\r
- if(pd.notOK()) {\r
- return Result.err(pd);\r
- }\r
- final Validator v = new Validator(trans);\r
- if(v.nullOrBlank(pd.value).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- final PermDAO.Data perm = pd.value;\r
- if (ques.permDAO.read(trans, perm).notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_PermissionNotFound, "Permission [%s.%s|%s|%s] does not exist",\r
- perm.ns,perm.type,perm.instance,perm.action );\r
- }\r
-\r
- Result<FutureDAO.Data> fd = mapper.future(trans,PermDAO.TABLE,from,perm,false,\r
- new Mapper.Memo() {\r
- @Override\r
- public String get() {\r
- return "Delete Permission [" + perm.fullPerm() + ']';\r
- }\r
- },\r
- new MayChange() {\r
- private Result<NsDAO.Data> nsd;\r
- @Override\r
- public Result<?> mayChange() {\r
- if(nsd==null) {\r
- nsd = ques.mayUser(trans, trans.user(), perm, Access.write);\r
- }\r
- return nsd;\r
- }\r
- });\r
- \r
- switch(fd.status) {\r
- case OK:\r
- Result<List<NsDAO.Data>> nsr = ques.nsDAO.read(trans, perm.ns);\r
- if(nsr.notOKorIsEmpty()) {\r
- return Result.err(nsr);\r
- }\r
- \r
- Result<List<Identity>> rfc = func.createFuture(trans, fd.value, \r
- perm.encode(), trans.user(),nsr.value.get(0),"D");\r
- if(rfc.isOK()) {\r
- return Result.err(Status.ACC_Future, "Perm Deletion [%s] is saved for future processing",perm.encode());\r
- } else { \r
- return Result.err(rfc);\r
- }\r
- case Status.ACC_Now:\r
- return func.deletePerm(trans,perm,trans.forceRequested(), false);\r
- default:\r
- return Result.err(fd);\r
- } \r
- } \r
- \r
- @ApiDoc( \r
- method = DELETE,\r
- path = "/authz/perm/:name/:type/:action",\r
- params = {"type|string|true",\r
- "instance|string|true",\r
- "action|string|true"},\r
- expectedCode = 200,\r
- errorCodes = { 404,406 }, \r
- text = { "Delete the Permission referenced by :type :instance :action",\r
- "You cannot normally delete a permission which is still granted to roles,",\r
- "however the \"force\" property allows you to do just that. To do this: Add",\r
- "'force=true' as a query parameter",\r
- "<p>WARNING: Using force will ungrant this permission from all roles. Use with care.</p>"}\r
- )\r
- @Override\r
- public Result<Void> deletePerm(AuthzTrans trans, String type, String instance, String action) {\r
- final Validator v = new Validator(trans);\r
- if(v.nullOrBlank("Type",type)\r
- .nullOrBlank("Instance",instance)\r
- .nullOrBlank("Action",action)\r
- .err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- Result<PermDAO.Data> pd = ques.permFrom(trans, type, instance, action);\r
- if(pd.isOK()) {\r
- return func.deletePerm(trans, pd.value, trans.forceRequested(), false);\r
- } else {\r
- return Result.err(pd);\r
- }\r
- }\r
-\r
-/***********************************\r
- * ROLE \r
- ***********************************/\r
- @ApiDoc(\r
- method = POST,\r
- path = "/authz/role",\r
- params = {},\r
- expectedCode = 201,\r
- errorCodes = {403,404,406,409},\r
- text = {\r
-\r
- "Roles are part of Namespaces",\r
- "Examples:",\r
- "<ul><li> org.osaaf - A Possible root Namespace for maintaining AAF</li>",\r
- "Roles do not include implied permissions for an App. Instead, they contain explicit Granted Permissions by any Namespace in AAF (See Permissions)",\r
- "Restrictions on Role Names:",\r
- "<ul><li>Must start with valid Namespace name, terminated by . (dot/period)</li>",\r
- "<li>Allowed Characters are a-zA-Z0-9._-</li>",\r
- "<li>role names are Case Sensitive</li></ul>",\r
- "The right questions to ask for defining and populating a Role in AAF, therefore, are:",\r
- "<ul><li>'What Job Function does this represent?'</li>",\r
- "<li>'Does this person perform this Job Function?'</li></ul>" }\r
- )\r
-\r
- @Override\r
- public Result<Void> createRole(final AuthzTrans trans, REQUEST from) {\r
- final Result<RoleDAO.Data> rd = mapper.role(trans, from);\r
- final Validator v = new Validator(trans);\r
- if(v.role(rd).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- final RoleDAO.Data role = rd.value;\r
- if(ques.roleDAO.read(trans, role.ns, role.name).isOKhasData()) {\r
- return Result.err(Status.ERR_ConflictAlreadyExists, "Role [" + role.fullName() + "] already exists");\r
- }\r
-\r
- Result<FutureDAO.Data> fd = mapper.future(trans,RoleDAO.TABLE,from,role,false,\r
- new Mapper.Memo() {\r
- @Override\r
- public String get() {\r
- return "Create Role [" + \r
- rd.value.fullName() + \r
- ']';\r
- }\r
- },\r
- new MayChange() {\r
- private Result<NsDAO.Data> nsd;\r
- @Override\r
- public Result<?> mayChange() {\r
- if(nsd==null) {\r
- nsd = ques.mayUser(trans, trans.user(), role, Access.write);\r
- }\r
- return nsd;\r
- }\r
- });\r
- \r
- Result<List<NsDAO.Data>> nsr = ques.nsDAO.read(trans, rd.value.ns);\r
- if(nsr.notOKorIsEmpty()) {\r
- return Result.err(nsr);\r
- }\r
-\r
- switch(fd.status) {\r
- case OK:\r
- Result<List<Identity>> rfc = func.createFuture(trans, fd.value, \r
- role.encode(), trans.user(),nsr.value.get(0),"C");\r
- if(rfc.isOK()) {\r
- return Result.err(Status.ACC_Future, "Role [%s.%s] is saved for future processing",\r
- rd.value.ns,\r
- rd.value.name);\r
- } else { \r
- return Result.err(rfc);\r
- }\r
- case Status.ACC_Now:\r
- Result<RoleDAO.Data> rdr = ques.roleDAO.create(trans, role);\r
- if(rdr.isOK()) {\r
- return Result.ok();\r
- } else {\r
- return Result.err(rdr);\r
- }\r
- default:\r
- return Result.err(fd);\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.service.AuthzService#getRolesByName(org.onap.aaf.authz.env.AuthzTrans, java.lang.String)\r
- */\r
- @ApiDoc(\r
- method = GET,\r
- path = "/authz/roles/:role",\r
- params = {"role|string|true"}, \r
- expectedCode = 200,\r
- errorCodes = {404,406},\r
- text = { "List Roles that match :role",\r
- "Note: You must have permission to see any given role"\r
- }\r
- )\r
- @Override\r
- public Result<ROLES> getRolesByName(AuthzTrans trans, String role) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("Role", role).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- // Determine if User can ask this question\r
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans, ques, role);\r
- if(rrdd.isOKhasData()) {\r
- Result<NsDAO.Data> r;\r
- if((r = ques.mayUser(trans, trans.user(), rrdd.value, Access.read)).notOK()) {\r
- return Result.err(r);\r
- }\r
- } else {\r
- return Result.err(rrdd);\r
- }\r
- \r
- // Look up data\r
- Result<List<RoleDAO.Data>> rlrd = ques.getRolesByName(trans, role);\r
- if(rlrd.isOK()) {\r
- // Note: Mapper will restrict what can be viewed\r
- ROLES roles = mapper.newInstance(API.ROLES);\r
- return mapper.roles(trans, rlrd.value, roles, true);\r
- } else {\r
- return Result.err(rlrd);\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.service.AuthzService#getRolesByUser(org.onap.aaf.authz.env.AuthzTrans, java.lang.String)\r
- */\r
- @ApiDoc(\r
- method = GET,\r
- path = "/authz/roles/user/:name",\r
- params = {"name|string|true"},\r
- expectedCode = 200,\r
- errorCodes = {404,406},\r
- text = { "List all Roles that match user :name",\r
- "'user' must be expressed as full identity (ex: id@full.domain.com)",\r
- "Note: You must have permission to see any given role"\r
- }\r
- )\r
-\r
- @Override\r
- public Result<ROLES> getRolesByUser(AuthzTrans trans, String user) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("User", user).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- ROLES roles = mapper.newInstance(API.ROLES);\r
- // Get list of roles per user, then add to Roles as we go\r
- Result<List<RoleDAO.Data>> rlrd;\r
- Result<List<UserRoleDAO.Data>> rlurd = ques.userRoleDAO.readByUser(trans, user);\r
- if(rlurd.isOKhasData()) {\r
- for(UserRoleDAO.Data urd : rlurd.value ) {\r
- rlrd = ques.roleDAO.read(trans, urd.ns,urd.rname);\r
- // Note: Mapper will restrict what can be viewed\r
- // if user is the same as that which is looked up, no filtering is required\r
- if(rlrd.isOKhasData()) {\r
- mapper.roles(trans, rlrd.value,roles, !user.equals(trans.user()));\r
- }\r
- }\r
- }\r
- return Result.ok(roles);\r
- }\r
-\r
- /*\r
- * (non-Javadoc)\r
- * @see org.onap.aaf.authz.service.AuthzService#getRolesByNS(org.onap.aaf.authz.env.AuthzTrans, java.lang.String)\r
- */\r
- @ApiDoc(\r
- method = GET,\r
- path = "/authz/roles/ns/:ns",\r
- params = {"ns|string|true"},\r
- expectedCode = 200,\r
- errorCodes = {404,406},\r
- text = { "List all Roles for the Namespace :ns", \r
- "Note: You must have permission to see any given role"\r
- }\r
- )\r
-\r
- @Override\r
- public Result<ROLES> getRolesByNS(AuthzTrans trans, String ns) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("NS", ns).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- // check if user is allowed to view NS\r
- Result<NsDAO.Data> rnsd = ques.deriveNs(trans, ns); \r
- if(rnsd.notOK()) {\r
- return Result.err(rnsd); \r
- }\r
- rnsd = ques.mayUser(trans, trans.user(), rnsd.value, Access.read);\r
- if(rnsd.notOK()) {\r
- return Result.err(rnsd); \r
- }\r
-\r
- TimeTaken tt = trans.start("MAP Roles by NS to Roles", Env.SUB);\r
- try {\r
- ROLES roles = mapper.newInstance(API.ROLES);\r
- // Get list of roles per user, then add to Roles as we go\r
- Result<List<RoleDAO.Data>> rlrd = ques.roleDAO.readNS(trans, ns);\r
- if(rlrd.isOK()) {\r
- if(!rlrd.isEmpty()) {\r
- // Note: Mapper doesn't need to restrict what can be viewed, because we did it already.\r
- mapper.roles(trans,rlrd.value,roles,false);\r
- }\r
- return Result.ok(roles);\r
- } else {\r
- return Result.err(rlrd);\r
- }\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /*\r
- * (non-Javadoc)\r
- * @see org.onap.aaf.authz.service.AuthzService#getRolesByNS(org.onap.aaf.authz.env.AuthzTrans, java.lang.String)\r
- */\r
- @ApiDoc(\r
- method = GET,\r
- path = "/authz/roles/name/:name",\r
- params = {"name|string|true"},\r
- expectedCode = 200,\r
- errorCodes = {404,406},\r
- text = { "List all Roles for only the Name of Role (without Namespace)", \r
- "Note: You must have permission to see any given role"\r
- }\r
- )\r
- @Override\r
- public Result<ROLES> getRolesByNameOnly(AuthzTrans trans, String name) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("Name", name).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- // User Mapper to make sure user is allowed to view NS\r
-\r
- TimeTaken tt = trans.start("MAP Roles by Name to Roles", Env.SUB);\r
- try {\r
- ROLES roles = mapper.newInstance(API.ROLES);\r
- // Get list of roles per user, then add to Roles as we go\r
- Result<List<RoleDAO.Data>> rlrd = ques.roleDAO.readName(trans, name);\r
- if(rlrd.isOK()) {\r
- if(!rlrd.isEmpty()) {\r
- // Note: Mapper will restrict what can be viewed\r
- mapper.roles(trans,rlrd.value,roles,true);\r
- }\r
- return Result.ok(roles);\r
- } else {\r
- return Result.err(rlrd);\r
- }\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @ApiDoc(\r
- method = GET,\r
- path = "/authz/roles/perm/:type/:instance/:action",\r
- params = {"type|string|true",\r
- "instance|string|true",\r
- "action|string|true"},\r
- expectedCode = 200,\r
- errorCodes = {404,406},\r
- text = { "Find all Roles containing the given Permission." +\r
- "Permission consists of:",\r
- "<ul><li>type - a Namespace qualified identifier specifying what kind of resource "\r
- + "is being protected</li>",\r
- "<li>instance - a key, possibly multi-dimensional, that identifies a specific "\r
- + " instance of the type</li>",\r
- "<li>action - what kind of action is allowed</li></ul>",\r
- "Notes: instance and action can be an *",\r
- " You must have permission to see any given role"\r
- }\r
- )\r
-\r
- @Override\r
- public Result<ROLES> getRolesByPerm(AuthzTrans trans, String type, String instance, String action) {\r
- final Validator v = new Validator(trans);\r
- if(v.permType(type,null)\r
- .permInstance(instance)\r
- .permAction(action)\r
- .err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- TimeTaken tt = trans.start("Map Perm Roles Roles", Env.SUB);\r
- try {\r
- ROLES roles = mapper.newInstance(API.ROLES);\r
- // Get list of roles per user, then add to Roles as we go\r
- Result<NsSplit> nsSplit = ques.deriveNsSplit(trans, type);\r
- if(nsSplit.isOK()) {\r
- PermDAO.Data pdd = new PermDAO.Data(nsSplit.value, instance, action);\r
- Result<?> res;\r
- if((res=ques.mayUser(trans, trans.user(), pdd, Question.Access.read)).notOK()) {\r
- return Result.err(res);\r
- }\r
- \r
- Result<List<PermDAO.Data>> pdlr = ques.permDAO.read(trans, pdd);\r
- if(pdlr.isOK())for(PermDAO.Data pd : pdlr.value) {\r
- Result<List<RoleDAO.Data>> rlrd;\r
- for(String r : pd.roles) {\r
- Result<String[]> rs = RoleDAO.Data.decodeToArray(trans, ques, r);\r
- if(rs.isOK()) {\r
- rlrd = ques.roleDAO.read(trans, rs.value[0],rs.value[1]);\r
- // Note: Mapper will restrict what can be viewed\r
- if(rlrd.isOKhasData()) {\r
- mapper.roles(trans,rlrd.value,roles,true);\r
- }\r
- }\r
- }\r
- }\r
- }\r
- return Result.ok(roles);\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @ApiDoc(\r
- method = PUT,\r
- path = "/authz/role",\r
- params = {},\r
- expectedCode = 200,\r
- errorCodes = {404,406},\r
- text = { "Add Description Data to a Role" }\r
- )\r
-\r
- @Override\r
- public Result<Void> updateRoleDescription(AuthzTrans trans, REQUEST from) {\r
- final Result<RoleDAO.Data> rd = mapper.role(trans, from);\r
- final Validator v = new Validator(trans);\r
- if(v.role(rd).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- } {\r
- if(v.nullOrBlank("description", rd.value.description).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- }\r
- final RoleDAO.Data role = rd.value;\r
- if(ques.roleDAO.read(trans, role.ns, role.name).notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_NotFound, "Role [" + role.fullName() + "] does not exist");\r
- }\r
-\r
- if (ques.mayUser(trans, trans.user(), role, Access.write).notOK()) {\r
- return Result.err(Status.ERR_Denied, "You do not have approval to change " + role.fullName());\r
- }\r
-\r
- Result<List<NsDAO.Data>> nsr = ques.nsDAO.read(trans, rd.value.ns);\r
- if(nsr.notOKorIsEmpty()) {\r
- return Result.err(nsr);\r
- }\r
-\r
- Result<Void> rdr = ques.roleDAO.addDescription(trans, role.ns, role.name, role.description);\r
- if(rdr.isOK()) {\r
- return Result.ok();\r
- } else {\r
- return Result.err(rdr);\r
- }\r
-\r
- }\r
- \r
- @ApiDoc(\r
- method = POST,\r
- path = "/authz/role/perm",\r
- params = {},\r
- expectedCode = 201,\r
- errorCodes = {403,404,406,409},\r
- text = { "Grant a Permission to a Role",\r
- "Permission consists of:", \r
- "<ul><li>type - a Namespace qualified identifier specifying what kind of resource "\r
- + "is being protected</li>",\r
- "<li>instance - a key, possibly multi-dimensional, that identifies a specific "\r
- + " instance of the type</li>",\r
- "<li>action - what kind of action is allowed</li></ul>",\r
- "Note: instance and action can be an *",\r
- "Note: Using the \"force\" property will create the Permission, if it doesn't exist AND the requesting " +\r
- " ID is allowed to create. It will then grant",\r
- " the permission to the role in one step. To do this: add 'force=true' as a query parameter."\r
- }\r
- )\r
-\r
- @Override\r
- public Result<Void> addPermToRole(final AuthzTrans trans, REQUEST rreq) {\r
- // Translate Request into Perm and Role Objects\r
- final Result<PermDAO.Data> rpd = mapper.permFromRPRequest(trans, rreq);\r
- if(rpd.notOKorIsEmpty()) {\r
- return Result.err(rpd);\r
- }\r
- final Result<RoleDAO.Data> rrd = mapper.roleFromRPRequest(trans, rreq);\r
- if(rrd.notOKorIsEmpty()) {\r
- return Result.err(rrd);\r
- }\r
- \r
- // Validate Role and Perm values\r
- final Validator v = new Validator(trans);\r
- if(v.perm(rpd.value)\r
- .role(rrd.value)\r
- .err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- Result<List<RoleDAO.Data>> rlrd = ques.roleDAO.read(trans, rrd.value.ns, rrd.value.name);\r
- if(rlrd.notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_RoleNotFound, "Role [%s] does not exist", rrd.value.fullName());\r
- }\r
- \r
- // Check Status of Data in DB (does it exist)\r
- Result<List<PermDAO.Data>> rlpd = ques.permDAO.read(trans, rpd.value.ns, \r
- rpd.value.type, rpd.value.instance, rpd.value.action);\r
- PermDAO.Data createPerm = null; // if not null, create first\r
- if(rlpd.notOKorIsEmpty()) { // Permission doesn't exist\r
- if(trans.forceRequested()) {\r
- // Remove roles from perm data object so we just create the perm here\r
- createPerm = rpd.value;\r
- createPerm.roles.clear();\r
- } else {\r
- return Result.err(Status.ERR_PermissionNotFound,"Permission [%s.%s|%s|%s] does not exist", \r
- rpd.value.ns,rpd.value.type,rpd.value.instance,rpd.value.action);\r
- }\r
- } else {\r
- if (rlpd.value.get(0).roles(false).contains(rrd.value.encode())) {\r
- return Result.err(Status.ERR_ConflictAlreadyExists,\r
- "Permission [%s.%s|%s|%s] already granted to Role [%s.%s]",\r
- rpd.value.ns,rpd.value.type,rpd.value.instance,rpd.value.action,\r
- rrd.value.ns,rrd.value.name\r
- );\r
- }\r
- }\r
-\r
- \r
- Result<FutureDAO.Data> fd = mapper.future(trans, PermDAO.TABLE, rreq, rpd.value,true, // Allow grants to create Approvals\r
- new Mapper.Memo() {\r
- @Override\r
- public String get() {\r
- return "Grant Permission [" + rpd.value.fullPerm() + ']' +\r
- " to Role [" + rrd.value.fullName() + "]";\r
- }\r
- },\r
- new MayChange() {\r
- private Result<NsDAO.Data> nsd;\r
- @Override\r
- public Result<?> mayChange() {\r
- if(nsd==null) {\r
- nsd = ques.mayUser(trans, trans.user(), rpd.value, Access.write);\r
- }\r
- return nsd;\r
- }\r
- });\r
- Result<List<NsDAO.Data>> nsr = ques.nsDAO.read(trans, rpd.value.ns);\r
- if(nsr.notOKorIsEmpty()) {\r
- return Result.err(nsr);\r
- }\r
- switch(fd.status) {\r
- case OK:\r
- Result<List<Identity>> rfc = func.createFuture(trans,fd.value, \r
- rpd.value.fullPerm(),\r
- trans.user(),\r
- nsr.value.get(0),\r
- "G");\r
- if(rfc.isOK()) {\r
- return Result.err(Status.ACC_Future, "Perm [%s.%s|%s|%s] is saved for future processing",\r
- rpd.value.ns,\r
- rpd.value.type,\r
- rpd.value.instance,\r
- rpd.value.action);\r
- } else { \r
- return Result.err(rfc);\r
- }\r
- case Status.ACC_Now:\r
- Result<Void> rv = null;\r
- if(createPerm!=null) {// has been validated for creating\r
- rv = func.createPerm(trans, createPerm, false);\r
- }\r
- if(rv==null || rv.isOK()) {\r
- rv = func.addPermToRole(trans, rrd.value, rpd.value, false);\r
- }\r
- return rv;\r
- default:\r
- return Result.err(fd);\r
- }\r
- \r
- }\r
-\r
- /**\r
- * Create a RoleDAO.Data\r
- * @param trans\r
- * @param roleFullName\r
- * @return\r
- */\r
- @ApiDoc(\r
- method = DELETE,\r
- path = "/authz/role/:role/perm",\r
- params = {"role|string|true"},\r
- expectedCode = 200,\r
- errorCodes = {404,406},\r
- text = { "Ungrant a permission from Role :role" }\r
- )\r
-\r
- @Override\r
- public Result<Void> delPermFromRole(final AuthzTrans trans, REQUEST rreq) {\r
- final Result<PermDAO.Data> updt = mapper.permFromRPRequest(trans, rreq);\r
- if(updt.notOKorIsEmpty()) {\r
- return Result.err(updt);\r
- }\r
- final Result<RoleDAO.Data> rrd = mapper.roleFromRPRequest(trans, rreq);\r
- if(rrd.notOKorIsEmpty()) {\r
- return Result.err(rrd);\r
- }\r
- \r
- final Validator v = new Validator(trans);\r
- if(v.nullOrBlank(updt.value)\r
- .nullOrBlank(rrd.value)\r
- .err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- Result<List<PermDAO.Data>> rlpd = ques.permDAO.read(trans, updt.value.ns, updt.value.type, \r
- updt.value.instance, updt.value.action);\r
- \r
- if(rlpd.notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_PermissionNotFound, \r
- "Permission [%s.%s|%s|%s] does not exist",\r
- updt.value.ns,updt.value.type,updt.value.instance,updt.value.action);\r
- }\r
- \r
- Result<FutureDAO.Data> fd = mapper.future(trans, PermDAO.TABLE, rreq, updt.value,true, // allow ungrants requests\r
- new Mapper.Memo() {\r
- @Override\r
- public String get() {\r
- return "Ungrant Permission [" + updt.value.fullPerm() + ']' +\r
- " from Role [" + rrd.value.fullName() + "]";\r
- }\r
- },\r
- new MayChange() {\r
- private Result<NsDAO.Data> nsd;\r
- @Override\r
- public Result<?> mayChange() {\r
- if(nsd==null) {\r
- nsd = ques.mayUser(trans, trans.user(), updt.value, Access.write);\r
- }\r
- return nsd;\r
- }\r
- });\r
- Result<List<NsDAO.Data>> nsr = ques.nsDAO.read(trans, updt.value.ns);\r
- if(nsr.notOKorIsEmpty()) {\r
- return Result.err(nsr);\r
- }\r
- switch(fd.status) {\r
- case OK:\r
- Result<List<Identity>> rfc = func.createFuture(trans,fd.value, \r
- updt.value.fullPerm(),\r
- trans.user(),\r
- nsr.value.get(0),\r
- "UG"\r
- );\r
- if(rfc.isOK()) {\r
- return Result.err(Status.ACC_Future, "Perm [%s.%s|%s|%s] is saved for future processing",\r
- updt.value.ns,\r
- updt.value.type,\r
- updt.value.instance,\r
- updt.value.action);\r
- } else {\r
- return Result.err(rfc);\r
- }\r
- case Status.ACC_Now:\r
- return func.delPermFromRole(trans, rrd.value, updt.value, false);\r
- default:\r
- return Result.err(fd);\r
- }\r
- }\r
- \r
- @ApiDoc(\r
- method = DELETE,\r
- path = "/authz/role/:role",\r
- params = {"role|string|true"},\r
- expectedCode = 200,\r
- errorCodes = {404,406},\r
- text = { "Delete the Role named :role"}\r
- )\r
-\r
- @Override\r
- public Result<Void> deleteRole(AuthzTrans trans, String role) {\r
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans,ques,role);\r
- if(rrdd.isOKhasData()) {\r
- final Validator v = new Validator(trans);\r
- if(v.nullOrBlank(rrdd.value).err()) { \r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- return func.deleteRole(trans, rrdd.value, false, false);\r
- } else {\r
- return Result.err(rrdd);\r
- }\r
- }\r
-\r
- @ApiDoc(\r
- method = DELETE,\r
- path = "/authz/role",\r
- params = {},\r
- expectedCode = 200,\r
- errorCodes = { 404,406 },\r
- text = { "Delete the Role referenced by RoleKey",\r
- "You cannot normally delete a role which still has permissions granted or users assigned to it,",\r
- "however the \"force\" property allows you to do just that. To do this: Add 'force=true'",\r
- "as a query parameter.",\r
- "<p>WARNING: Using force will remove all users and permission from this role. Use with care.</p>"}\r
- )\r
-\r
- @Override\r
- public Result<Void> deleteRole(final AuthzTrans trans, REQUEST from) {\r
- final Result<RoleDAO.Data> rd = mapper.role(trans, from);\r
- final Validator v = new Validator(trans);\r
- if(rd==null) {\r
- return Result.err(Status.ERR_BadData,"Request does not contain Role");\r
- }\r
- if(v.nullOrBlank(rd.value).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- final RoleDAO.Data role = rd.value;\r
- if(ques.roleDAO.read(trans, role).notOKorIsEmpty() && !trans.forceRequested()) {\r
- return Result.err(Status.ERR_RoleNotFound, "Role [" + role.fullName() + "] does not exist");\r
- }\r
-\r
- Result<FutureDAO.Data> fd = mapper.future(trans,RoleDAO.TABLE,from,role,false,\r
- new Mapper.Memo() {\r
- @Override\r
- public String get() {\r
- return "Delete Role [" + role.fullName() + ']' \r
- + " and all attached user roles";\r
- }\r
- },\r
- new MayChange() {\r
- private Result<NsDAO.Data> nsd;\r
- @Override\r
- public Result<?> mayChange() {\r
- if(nsd==null) {\r
- nsd = ques.mayUser(trans, trans.user(), role, Access.write);\r
- }\r
- return nsd;\r
- }\r
- });\r
- \r
- switch(fd.status) {\r
- case OK:\r
- Result<List<NsDAO.Data>> nsr = ques.nsDAO.read(trans, rd.value.ns);\r
- if(nsr.notOKorIsEmpty()) {\r
- return Result.err(nsr);\r
- }\r
- \r
- Result<List<Identity>> rfc = func.createFuture(trans, fd.value, \r
- role.encode(), trans.user(),nsr.value.get(0),"D");\r
- if(rfc.isOK()) {\r
- return Result.err(Status.ACC_Future, "Role Deletion [%s.%s] is saved for future processing",\r
- rd.value.ns,\r
- rd.value.name);\r
- } else { \r
- return Result.err(rfc);\r
- }\r
- case Status.ACC_Now:\r
- return func.deleteRole(trans,role,trans.forceRequested(), true /*preapproved*/);\r
- default:\r
- return Result.err(fd);\r
- }\r
-\r
- }\r
-\r
-/***********************************\r
- * CRED \r
- ***********************************/\r
- private class MayCreateCred implements MayChange {\r
- private Result<NsDAO.Data> nsd;\r
- private AuthzTrans trans;\r
- private CredDAO.Data cred;\r
- private Executor exec;\r
- \r
- public MayCreateCred(AuthzTrans trans, CredDAO.Data cred, Executor exec) {\r
- this.trans = trans;\r
- this.cred = cred;\r
- this.exec = exec;\r
- }\r
-\r
- @Override\r
- public Result<?> mayChange() {\r
- if(nsd==null) {\r
- nsd = ques.validNSOfDomain(trans, cred.id);\r
- }\r
- // is Ns of CredID valid?\r
- if(nsd.isOK()) {\r
- try {\r
- // Check Org Policy\r
- if(trans.org().validate(trans,Policy.CREATE_MECHID, exec, cred.id)==null) {\r
- return Result.ok(); \r
- } else {\r
- Result<?> rmc = ques.mayUser(trans, trans.user(), nsd.value, Access.write);\r
- if(rmc.isOKhasData()) {\r
- return rmc;\r
- }\r
- }\r
- } catch (Exception e) {\r
- trans.warn().log(e);\r
- }\r
- } else {\r
- trans.warn().log(nsd.errorString());\r
- }\r
- return Result.err(Status.ERR_Denied,"%s is not allowed to create %s in %s",trans.user(),cred.id,cred.ns);\r
- }\r
- }\r
-\r
- private class MayChangeCred implements MayChange {\r
- \r
- private Result<NsDAO.Data> nsd;\r
- private AuthzTrans trans;\r
- private CredDAO.Data cred;\r
- public MayChangeCred(AuthzTrans trans, CredDAO.Data cred) {\r
- this.trans = trans;\r
- this.cred = cred;\r
- }\r
-\r
- @Override\r
- public Result<?> mayChange() {\r
- // User can change himself (but not create)\r
- if(trans.user().equals(cred.id)) {\r
- return Result.ok();\r
- }\r
- if(nsd==null) {\r
- nsd = ques.validNSOfDomain(trans, cred.id);\r
- }\r
- // Get the Namespace\r
- if(nsd.isOK()) {\r
- if(ques.mayUser(trans, trans.user(), nsd.value,Access.write).isOK()) {\r
- return Result.ok();\r
- }\r
- String user[] = Split.split('.',trans.user());\r
- if(user.length>2) {\r
- String company = user[user.length-1] + '.' + user[user.length-2];\r
- if(ques.isGranted(trans, trans.user(), Define.ROOT_NS,"password",company,"reset")) {\r
- return Result.ok();\r
- }\r
- }\r
- }\r
- return Result.err(Status.ERR_Denied,"%s is not allowed to change %s in %s",trans.user(),cred.id,cred.ns);\r
- }\r
-\r
- }\r
-\r
- private final long DAY_IN_MILLIS = 24*3600*1000;\r
- \r
- @ApiDoc( \r
- method = POST, \r
- path = "/authn/cred",\r
- params = {},\r
- expectedCode = 201,\r
- errorCodes = {403,404,406,409}, \r
- text = { "A credential consists of:",\r
- "<ul><li>id - the ID to create within AAF. The domain is in reverse",\r
- "order of Namespace (i.e. Users of Namespace com.att.myapp would be",\r
- "AB1234@myapp.att.com</li>",\r
- "<li>password - Company Policy Compliant Password</li></ul>",\r
- "Note: AAF does support multiple credentials with the same ID.",\r
- "Check with your organization if you have this implemented."\r
- }\r
- )\r
- @Override\r
- public Result<Void> createUserCred(final AuthzTrans trans, REQUEST from) {\r
- final String cmdDescription = ("Create User Credential");\r
- TimeTaken tt = trans.start(cmdDescription, Env.SUB);\r
- \r
- try {\r
- Result<CredDAO.Data> rcred = mapper.cred(trans, from, true);\r
- if(rcred.isOKhasData()) {\r
- rcred = ques.userCredSetup(trans, rcred.value);\r
- \r
- final Validator v = new Validator();\r
- \r
- if(v.cred(trans.org(),rcred,true).err()) { // Note: Creates have stricter Validations \r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
-\r
- // 2016-4 JG, New Behavior - If MechID is not registered with Org, deny creation\r
- Identity mechID = null;\r
- Organization org = trans.org();\r
- try {\r
- mechID = org.getIdentity(trans, rcred.value.id);\r
- } catch (Exception e1) {\r
- trans.error().log(e1,rcred.value.id,"cannot be validated at this time");\r
- }\r
- if(mechID==null || !mechID.isFound()) { \r
- return Result.err(Status.ERR_Policy,"MechIDs must be registered with %s before provisioning in AAF",org.getName());\r
- }\r
-\r
- Result<List<NsDAO.Data>> nsr = ques.nsDAO.read(trans, rcred.value.ns);\r
- if(nsr.notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_NsNotFound,"Cannot provision %s on non-existent Namespace %s",mechID.id(),rcred.value.ns);\r
- }\r
-\r
- boolean firstID = false;\r
- MayChange mc;\r
- \r
- CassExecutor exec = new CassExecutor(trans, func);\r
- Result<List<CredDAO.Data>> rlcd = ques.credDAO.readID(trans, rcred.value.id);\r
- if (rlcd.isOKhasData()) {\r
- if (!org.canHaveMultipleCreds(rcred.value.id)) {\r
- return Result.err(Status.ERR_ConflictAlreadyExists, "Credential exists");\r
- }\r
- for (CredDAO.Data curr : rlcd.value) {\r
- if (Chrono.dateOnlyStamp(curr.expires).equals(Chrono.dateOnlyStamp(rcred.value.expires)) && curr.type==rcred.value.type) {\r
- return Result.err(Status.ERR_ConflictAlreadyExists, "Credential with same Expiration Date exists, use 'reset'");\r
- }\r
- } \r
- } else {\r
- try {\r
- // 2016-04-12 JG If Caller is the Sponsor and is also an Owner of NS, allow without special Perm\r
- String theMechID = rcred.value.id;\r
- Boolean otherMechIDs = false;\r
- // find out if this is the only mechID. other MechIDs mean special handling (not automated)\r
- for(CredDAO.Data cd : ques.credDAO.readNS(trans,nsr.value.get(0).name).value) {\r
- if(!cd.id.equals(theMechID)) {\r
- otherMechIDs = true;\r
- break;\r
- }\r
- }\r
- String reason;\r
- // We can say "ID does not exist" here\r
- if((reason=org.validate(trans, Policy.CREATE_MECHID, exec, theMechID,trans.user(),otherMechIDs.toString()))!=null) {\r
- return Result.err(Status.ERR_Denied, reason); \r
- }\r
- firstID=true;\r
- } catch (Exception e) {\r
- return Result.err(e);\r
- }\r
- }\r
- \r
- mc = new MayCreateCred(trans, rcred.value, exec);\r
- \r
- final CredDAO.Data cdd = rcred.value;\r
- Result<FutureDAO.Data> fd = mapper.future(trans,CredDAO.TABLE,from, rcred.value,false, // may want to enable in future.\r
- new Mapper.Memo() {\r
- @Override\r
- public String get() {\r
- return cmdDescription + " [" + \r
- cdd.id + '|' \r
- + cdd.type + '|' \r
- + cdd.expires + ']';\r
- }\r
- },\r
- mc);\r
- \r
- switch(fd.status) {\r
- case OK:\r
- Result<List<Identity>> rfc = func.createFuture(trans, fd.value, \r
- rcred.value.id + '|' + rcred.value.type.toString() + '|' + rcred.value.expires,\r
- trans.user(), nsr.value.get(0), "C");\r
- if(rfc.isOK()) {\r
- return Result.err(Status.ACC_Future, "Credential Request [%s|%s|%s] is saved for future processing",\r
- rcred.value.id,\r
- Integer.toString(rcred.value.type),\r
- rcred.value.expires.toString());\r
- } else { \r
- return Result.err(rfc);\r
- }\r
- case Status.ACC_Now:\r
- try {\r
- if(firstID) {\r
- // && !nsr.value.get(0).isAdmin(trans.getUserPrincipal().getName())) {\r
- Result<List<String>> admins = func.getAdmins(trans, nsr.value.get(0).name, false);\r
- // OK, it's a first ID, and not by NS Admin, so let's set TempPassword length\r
- // Note, we only do this on First time, because of possibility of \r
- // prematurely expiring a production id\r
- if(admins.isOKhasData() && !admins.value.contains(trans.user())) {\r
- rcred.value.expires = org.expiration(null, Expiration.TempPassword).getTime();\r
- }\r
- }\r
- } catch (Exception e) {\r
- trans.error().log(e, "While setting expiration to TempPassword");\r
- }\r
- Result<?>udr = ques.credDAO.create(trans, rcred.value);\r
- if(udr.isOK()) {\r
- return Result.ok();\r
- }\r
- return Result.err(udr);\r
- default:\r
- return Result.err(fd);\r
- }\r
-\r
- } else {\r
- return Result.err(rcred);\r
- }\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @ApiDoc( \r
- method = GET, \r
- path = "/authn/creds/ns/:ns",\r
- params = {"ns|string|true"},\r
- expectedCode = 200,\r
- errorCodes = {403,404,406}, \r
- text = { "Return all IDs in Namespace :ns"\r
- }\r
- )\r
- @Override\r
- public Result<USERS> getCredsByNS(AuthzTrans trans, String ns) {\r
- final Validator v = new Validator();\r
- if(v.ns(ns).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- // check if user is allowed to view NS\r
- Result<NsDAO.Data> rnd = ques.deriveNs(trans,ns);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd); \r
- }\r
- rnd = ques.mayUser(trans, trans.user(), rnd.value, Access.read);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd); \r
- }\r
- \r
- TimeTaken tt = trans.start("MAP Creds by NS to Creds", Env.SUB);\r
- try { \r
- USERS users = mapper.newInstance(API.USERS);\r
- Result<List<CredDAO.Data>> rlcd = ques.credDAO.readNS(trans, ns);\r
- \r
- if(rlcd.isOK()) {\r
- if(!rlcd.isEmpty()) {\r
- return mapper.cred(rlcd.value, users);\r
- }\r
- return Result.ok(users); \r
- } else {\r
- return Result.err(rlcd);\r
- }\r
- } finally {\r
- tt.done();\r
- }\r
- \r
- }\r
-\r
- @ApiDoc( \r
- method = GET, \r
- path = "/authn/creds/id/:ns",\r
- params = {"id|string|true"},\r
- expectedCode = 200,\r
- errorCodes = {403,404,406}, \r
- text = { "Return all IDs in for ID"\r
- ,"(because IDs are multiple, due to multiple Expiration Dates)"\r
- }\r
- )\r
- @Override\r
- public Result<USERS> getCredsByID(AuthzTrans trans, String id) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("ID",id).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- String ns = Question.domain2ns(id);\r
- // check if user is allowed to view NS\r
- Result<NsDAO.Data> rnd = ques.deriveNs(trans,ns);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd); \r
- }\r
- rnd = ques.mayUser(trans, trans.user(), rnd.value, Access.read);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd); \r
- }\r
- \r
- TimeTaken tt = trans.start("MAP Creds by ID to Creds", Env.SUB);\r
- try { \r
- USERS users = mapper.newInstance(API.USERS);\r
- Result<List<CredDAO.Data>> rlcd = ques.credDAO.readID(trans, id);\r
- \r
- if(rlcd.isOK()) {\r
- if(!rlcd.isEmpty()) {\r
- return mapper.cred(rlcd.value, users);\r
- }\r
- return Result.ok(users); \r
- } else {\r
- return Result.err(rlcd);\r
- }\r
- } finally {\r
- tt.done();\r
- }\r
- \r
- }\r
-\r
- @ApiDoc( \r
- method = GET, \r
- path = "/authn/certs/id/:id",\r
- params = {"id|string|true"},\r
- expectedCode = 200,\r
- errorCodes = {403,404,406}, \r
- text = { "Return Cert Info for ID"\r
- }\r
- )\r
- @Override\r
- public Result<CERTS> getCertInfoByID(AuthzTrans trans, HttpServletRequest req, String id) {\r
- TimeTaken tt = trans.start("Get Cert Info by ID", Env.SUB);\r
- try { \r
- CERTS certs = mapper.newInstance(API.CERTS);\r
- Result<List<CertDAO.Data>> rlcd = ques.certDAO.readID(trans, id);\r
- \r
- if(rlcd.isOK()) {\r
- if(!rlcd.isEmpty()) {\r
- return mapper.cert(rlcd.value, certs);\r
- }\r
- return Result.ok(certs); \r
- } else { \r
- return Result.err(rlcd);\r
- }\r
- } finally {\r
- tt.done();\r
- }\r
-\r
- }\r
-\r
- @ApiDoc( \r
- method = PUT, \r
- path = "/authn/cred",\r
- params = {},\r
- expectedCode = 200,\r
- errorCodes = {300,403,404,406}, \r
- text = { "Reset a Credential Password. If multiple credentials exist for this",\r
- "ID, you will need to specify which entry you are resetting in the",\r
- "CredRequest object"\r
- }\r
- )\r
- @Override\r
- public Result<Void> changeUserCred(final AuthzTrans trans, REQUEST from) {\r
- final String cmdDescription = "Update User Credential";\r
- TimeTaken tt = trans.start(cmdDescription, Env.SUB);\r
- try {\r
- Result<CredDAO.Data> rcred = mapper.cred(trans, from, true);\r
- if(rcred.isOKhasData()) {\r
- rcred = ques.userCredSetup(trans, rcred.value);\r
- \r
- final Validator v = new Validator();\r
- \r
- if(v.cred(trans.org(),rcred,false).err()) {// Note: Creates have stricter Validations \r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- Result<List<CredDAO.Data>> rlcd = ques.credDAO.readID(trans, rcred.value.id);\r
- if(rlcd.notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_UserNotFound, "Credential does not exist");\r
- } \r
- \r
- MayChange mc = new MayChangeCred(trans, rcred.value);\r
- Result<?> rmc = mc.mayChange(); \r
- if (rmc.notOK()) {\r
- return Result.err(rmc);\r
- }\r
- \r
- Result<Integer> ri = selectEntryIfMultiple((CredRequest)from, rlcd.value);\r
- if(ri.notOK()) {\r
- return Result.err(ri);\r
- }\r
- int entry = ri.value;\r
- \r
- \r
- final CredDAO.Data cred = rcred.value;\r
- \r
- Result<FutureDAO.Data> fd = mapper.future(trans,CredDAO.TABLE,from, rcred.value,false,\r
- new Mapper.Memo() {\r
- @Override\r
- public String get() {\r
- return cmdDescription + " [" + \r
- cred.id + '|' \r
- + cred.type + '|' \r
- + cred.expires + ']';\r
- }\r
- },\r
- mc);\r
- \r
- Result<List<NsDAO.Data>> nsr = ques.nsDAO.read(trans, rcred.value.ns);\r
- if(nsr.notOKorIsEmpty()) {\r
- return Result.err(nsr);\r
- }\r
- \r
- switch(fd.status) {\r
- case OK:\r
- Result<List<Identity>> rfc = func.createFuture(trans, fd.value, \r
- rcred.value.id + '|' + rcred.value.type.toString() + '|' + rcred.value.expires,\r
- trans.user(), nsr.value.get(0), "U");\r
- if(rfc.isOK()) {\r
- return Result.err(Status.ACC_Future, "Credential Request [%s|%s|%s]",\r
- rcred.value.id,\r
- Integer.toString(rcred.value.type),\r
- rcred.value.expires.toString());\r
- } else { \r
- return Result.err(rfc);\r
- }\r
- case Status.ACC_Now:\r
- Result<?>udr = null;\r
- // If we are Resetting Password on behalf of someone else (am not the Admin)\r
- // use TempPassword Expiration time.\r
- Expiration exp;\r
- if(ques.isAdmin(trans, trans.user(), nsr.value.get(0).name)) {\r
- exp = Expiration.Password;\r
- } else {\r
- exp = Expiration.TempPassword;\r
- }\r
- \r
- Organization org = trans.org();\r
- // If user resets password in same day, we will have a primary key conflict, so subtract 1 day\r
- if (rlcd.value.get(entry).expires.equals(rcred.value.expires) \r
- && rlcd.value.get(entry).type==rcred.value.type) {\r
- GregorianCalendar gc = org.expiration(null, exp,rcred.value.id);\r
- gc = Chrono.firstMomentOfDay(gc);\r
- gc.set(GregorianCalendar.HOUR_OF_DAY, org.startOfDay()); \r
- rcred.value.expires = new Date(gc.getTimeInMillis() - DAY_IN_MILLIS);\r
- } else {\r
- rcred.value.expires = org.expiration(null,exp).getTime();\r
- }\r
- \r
- udr = ques.credDAO.create(trans, rcred.value);\r
- if(udr.isOK()) {\r
- udr = ques.credDAO.delete(trans, rlcd.value.get(entry),false);\r
- }\r
- if (udr.isOK()) {\r
- return Result.ok();\r
- }\r
- \r
- return Result.err(udr);\r
- default:\r
- return Result.err(fd);\r
- }\r
- } else {\r
- return Result.err(rcred);\r
- }\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- /*\r
- * Codify the way to get Either Choice Needed or actual Integer from Credit Request\r
- */\r
- private Result<Integer> selectEntryIfMultiple(final CredRequest cr, List<CredDAO.Data> lcd) {\r
- int entry = 0;\r
- if (lcd.size() > 1) {\r
- String inputOption = cr.getEntry();\r
- if (inputOption == null) {\r
- String message = selectCredFromList(lcd, false);\r
- String[] variables = buildVariables(lcd);\r
- return Result.err(Status.ERR_ChoiceNeeded, message, variables);\r
- } else {\r
- entry = Integer.parseInt(inputOption) - 1;\r
- }\r
- if (entry < 0 || entry >= lcd.size()) {\r
- return Result.err(Status.ERR_BadData, "User chose invalid credential selection");\r
- }\r
- }\r
- return Result.ok(entry);\r
- }\r
- \r
- @ApiDoc( \r
- method = PUT, \r
- path = "/authn/cred/:days",\r
- params = {"days|string|true"},\r
- expectedCode = 200,\r
- errorCodes = {300,403,404,406}, \r
- text = { "Extend a Credential Expiration Date. The intention of this API is",\r
- "to avoid an outage in PROD due to a Credential expiring before it",\r
- "can be configured correctly. Measures are being put in place ",\r
- "so that this is not abused."\r
- }\r
- )\r
- @Override\r
- public Result<Void> extendUserCred(final AuthzTrans trans, REQUEST from, String days) {\r
- TimeTaken tt = trans.start("Extend User Credential", Env.SUB);\r
- try {\r
- Result<CredDAO.Data> cred = mapper.cred(trans, from, false);\r
- Organization org = trans.org();\r
- final Validator v = new Validator();\r
- if(v.notOK(cred).err() || \r
- v.nullOrBlank(cred.value.id, "Invalid ID").err() ||\r
- v.user(org,cred.value.id).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- try {\r
- String reason;\r
- if ((reason=org.validate(trans, Policy.MAY_EXTEND_CRED_EXPIRES, new CassExecutor(trans,func)))!=null) {\r
- return Result.err(Status.ERR_Policy,reason);\r
- }\r
- } catch (Exception e) {\r
- String msg;\r
- trans.error().log(e, msg="Could not contact Organization for User Validation");\r
- return Result.err(Status.ERR_Denied, msg);\r
- }\r
- \r
- // Get the list of Cred Entries\r
- Result<List<CredDAO.Data>> rlcd = ques.credDAO.readID(trans, cred.value.id);\r
- if(rlcd.notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_UserNotFound, "Credential does not exist");\r
- }\r
-\r
- //Need to do the "Pick Entry" mechanism\r
- Result<Integer> ri = selectEntryIfMultiple((CredRequest)from, rlcd.value);\r
- if(ri.notOK()) {\r
- return Result.err(ri);\r
- }\r
-\r
- CredDAO.Data found = rlcd.value.get(ri.value);\r
- CredDAO.Data cd = cred.value;\r
- // Copy over the cred\r
- cd.cred = found.cred;\r
- cd.type = found.type;\r
- cd.expires = org.expiration(null, Expiration.ExtendPassword,days).getTime();\r
- \r
- cred = ques.credDAO.create(trans, cd);\r
- if(cred.isOK()) {\r
- return Result.ok();\r
- }\r
- return Result.err(cred);\r
- } finally {\r
- tt.done();\r
- }\r
- } \r
-\r
- private String[] buildVariables(List<CredDAO.Data> value) {\r
- // ensure credentials are sorted so we can fully automate Cred regression test\r
- Collections.sort(value, new Comparator<CredDAO.Data>() {\r
- @Override\r
- public int compare(CredDAO.Data cred1, CredDAO.Data cred2) {\r
- return cred1.expires.compareTo(cred2.expires);\r
- } \r
- });\r
- String [] vars = new String[value.size()+1];\r
- vars[0]="Choice";\r
- for (int i = 0; i < value.size(); i++) {\r
- vars[i+1] = value.get(i).id + " " + value.get(i).type \r
- + " |" + value.get(i).expires;\r
- }\r
- return vars;\r
- }\r
- \r
- private String selectCredFromList(List<CredDAO.Data> value, boolean isDelete) {\r
- StringBuilder errMessage = new StringBuilder();\r
- String userPrompt = isDelete?"Select which cred to delete (set force=true to delete all):":"Select which cred to update:";\r
- int numSpaces = value.get(0).id.length() - "Id".length();\r
- \r
- errMessage.append(userPrompt + '\n');\r
- errMessage.append(" Id");\r
- for (int i = 0; i < numSpaces; i++) {\r
- errMessage.append(' ');\r
- }\r
- errMessage.append(" Type Expires" + '\n');\r
- for(int i=0;i<value.size();++i) {\r
- errMessage.append(" %s\n");\r
- }\r
- errMessage.append("Run same command again with chosen entry as last parameter");\r
- \r
- return errMessage.toString();\r
- \r
- }\r
-\r
- @ApiDoc( \r
- method = DELETE, \r
- path = "/authn/cred",\r
- params = {},\r
- expectedCode = 200,\r
- errorCodes = {300,403,404,406}, \r
- text = { "Delete a Credential. If multiple credentials exist for this",\r
- "ID, you will need to specify which entry you are deleting in the",\r
- "CredRequest object."\r
- }\r
- )\r
- @Override\r
- public Result<Void> deleteUserCred(AuthzTrans trans, REQUEST from) {\r
- final Result<CredDAO.Data> cred = mapper.cred(trans, from, false);\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("cred", cred.value.id).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- Result<List<CredDAO.Data>> rlcd = ques.credDAO.readID(trans, cred.value.id);\r
- if(rlcd.notOKorIsEmpty()) {\r
- // Empty Creds should have no user_roles.\r
- Result<List<UserRoleDAO.Data>> rlurd = ques.userRoleDAO.readByUser(trans, cred.value.id);\r
- if(rlurd.isOK()) {\r
- for(UserRoleDAO.Data data : rlurd.value) {\r
- ques.userRoleDAO.delete(trans, data, false);\r
- }\r
- }\r
- return Result.err(Status.ERR_UserNotFound, "Credential does not exist");\r
- }\r
- boolean isLastCred = rlcd.value.size()==1;\r
- \r
- MayChange mc = new MayChangeCred(trans,cred.value);\r
- Result<?> rmc = mc.mayChange(); \r
- if (rmc.notOK()) {\r
- return Result.err(rmc);\r
- }\r
- \r
- int entry = 0;\r
- if(!trans.forceRequested()) {\r
- if (rlcd.value.size() > 1) {\r
- CredRequest cr = (CredRequest)from;\r
- String inputOption = cr.getEntry();\r
- if (inputOption == null) {\r
- String message = selectCredFromList(rlcd.value, true);\r
- String[] variables = buildVariables(rlcd.value);\r
- return Result.err(Status.ERR_ChoiceNeeded, message, variables);\r
- } else {\r
- try {\r
- entry = Integer.parseInt(inputOption) - 1;\r
- } catch(NumberFormatException e) {\r
- return Result.err(Status.ERR_BadData, "User chose invalid credential selection");\r
- }\r
- }\r
- isLastCred = (entry==-1)?true:false;\r
- } else {\r
- isLastCred = true;\r
- }\r
- if (entry < -1 || entry >= rlcd.value.size()) {\r
- return Result.err(Status.ERR_BadData, "User chose invalid credential selection");\r
- }\r
- }\r
- \r
- Result<FutureDAO.Data> fd = mapper.future(trans,CredDAO.TABLE,from,cred.value,false, \r
- new Mapper.Memo() {\r
- @Override\r
- public String get() {\r
- return "Delete Credential [" + \r
- cred.value.id + \r
- ']';\r
- }\r
- },\r
- mc);\r
- \r
- Result<List<NsDAO.Data>> nsr = ques.nsDAO.read(trans, cred.value.ns);\r
- if(nsr.notOKorIsEmpty()) {\r
- return Result.err(nsr);\r
- }\r
- \r
- switch(fd.status) {\r
- case OK:\r
- Result<List<Identity>> rfc = func.createFuture(trans, fd.value, cred.value.id,\r
- trans.user(), nsr.value.get(0),"D");\r
- \r
- if(rfc.isOK()) {\r
- return Result.err(Status.ACC_Future, "Credential Delete [%s] is saved for future processing",cred.value.id);\r
- } else { \r
- return Result.err(rfc);\r
- }\r
- case Status.ACC_Now:\r
- Result<?>udr = null;\r
- if (!trans.forceRequested()) {\r
- if(entry<0 || entry >= rlcd.value.size()) {\r
- return Result.err(Status.ERR_BadData,"Invalid Choice [" + entry + "] chosen for Delete [%s] is saved for future processing",cred.value.id);\r
- }\r
- udr = ques.credDAO.delete(trans, rlcd.value.get(entry),false);\r
- } else {\r
- for (CredDAO.Data curr : rlcd.value) {\r
- udr = ques.credDAO.delete(trans, curr, false);\r
- if (udr.notOK()) {\r
- return Result.err(udr);\r
- }\r
- }\r
- }\r
- if(isLastCred) {\r
- Result<List<UserRoleDAO.Data>> rlurd = ques.userRoleDAO.readByUser(trans, cred.value.id);\r
- if(rlurd.isOK()) {\r
- for(UserRoleDAO.Data data : rlurd.value) {\r
- ques.userRoleDAO.delete(trans, data, false);\r
- }\r
- }\r
- }\r
- if (udr.isOK()) {\r
- return Result.ok();\r
- }\r
- return Result.err(udr);\r
- default:\r
- return Result.err(fd);\r
- }\r
- \r
- }\r
-\r
-\r
- @Override\r
- public Result<Date> doesCredentialMatch(AuthzTrans trans, REQUEST credReq) {\r
- TimeTaken tt = trans.start("Does Credential Match", Env.SUB);\r
- try {\r
- // Note: Mapper assigns RAW type\r
- Result<CredDAO.Data> data = mapper.cred(trans, credReq,false);\r
- if(data.notOKorIsEmpty()) {\r
- return Result.err(data);\r
- }\r
- CredDAO.Data cred = data.value; // of the Mapped Cred\r
- return ques.doesUserCredMatch(trans, cred.id, cred.cred.array());\r
-\r
- } catch (DAOException e) {\r
- trans.error().log(e,"Error looking up cred");\r
- return Result.err(Status.ERR_Denied,"Credential does not match");\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
-\r
- @ApiDoc( \r
- method = GET, \r
- path = "/authn/basicAuth",\r
- params = {},\r
- expectedCode = 200,\r
- errorCodes = { 403 }, \r
- text = { "Validate a Password using BasicAuth Base64 encoded Header. This HTTP/S call is intended as a fast"\r
- + " User/Password lookup for Security Frameworks, and responds 200 if it passes BasicAuth "\r
- + "security, and 403 if it does not." }\r
- )\r
- private void basicAuth() {\r
- // This is a place holder for Documentation. The real BasicAuth API does not call Service.\r
- }\r
- \r
- @ApiDoc( \r
- method = POST, \r
- path = "/authn/validate",\r
- params = {},\r
- expectedCode = 200,\r
- errorCodes = { 403 }, \r
- text = { "Validate a Credential given a Credential Structure. This is a more comprehensive validation, can "\r
- + "do more than BasicAuth as Credential types exp" }\r
- )\r
- @Override\r
- public Result<Date> validateBasicAuth(AuthzTrans trans, String basicAuth) {\r
- //TODO how to make sure people don't use this in browsers? Do we care?\r
- TimeTaken tt = trans.start("Validate Basic Auth", Env.SUB);\r
- try {\r
- BasicPrincipal bp = new BasicPrincipal(basicAuth,trans.org().getRealm());\r
- Result<Date> rq = ques.doesUserCredMatch(trans, bp.getName(), bp.getCred());\r
- // Note: Only want to log problem, don't want to send back to end user\r
- if(rq.isOK()) {\r
- return rq;\r
- } else {\r
- trans.audit().log(rq.errorString());\r
- }\r
- } catch (Exception e) {\r
- trans.warn().log(e);\r
- } finally {\r
- tt.done();\r
- }\r
- return Result.err(Status.ERR_Denied,"Bad Basic Auth");\r
- }\r
-\r
-/***********************************\r
- * USER-ROLE \r
- ***********************************/\r
- @ApiDoc( \r
- method = POST, \r
- path = "/authz/userRole",\r
- params = {},\r
- expectedCode = 201,\r
- errorCodes = {403,404,406,409}, \r
- text = { "Create a UserRole relationship (add User to Role)",\r
- "A UserRole is an object Representation of membership of a Role for limited time.",\r
- "If a shorter amount of time for Role ownership is required, use the 'End' field.",\r
- "** Note: Owners of Namespaces will be required to revalidate users in these roles ",\r
- "before Expirations expire. Namespace owners will be notified by email."\r
- }\r
- )\r
- @Override\r
- public Result<Void> createUserRole(final AuthzTrans trans, REQUEST from) {\r
- TimeTaken tt = trans.start("Create UserRole", Env.SUB);\r
- try {\r
- Result<UserRoleDAO.Data> urr = mapper.userRole(trans, from);\r
- if(urr.notOKorIsEmpty()) {\r
- return Result.err(urr);\r
- }\r
- final UserRoleDAO.Data userRole = urr.value;\r
- \r
- final Validator v = new Validator();\r
- if(v.user_role(userRole).err() ||\r
- v.user(trans.org(), userRole.user).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
-\r
- \r
- // Check if user can change first\r
- Result<FutureDAO.Data> fd = mapper.future(trans,UserRoleDAO.TABLE,from,urr.value,true, // may request Approvals\r
- new Mapper.Memo() {\r
- @Override\r
- public String get() {\r
- return "Add User [" + userRole.user + "] to Role [" + \r
- userRole.role + \r
- ']';\r
- }\r
- },\r
- new MayChange() {\r
- private Result<NsDAO.Data> nsd;\r
- @Override\r
- public Result<?> mayChange() {\r
- if(nsd==null) {\r
- RoleDAO.Data r = RoleDAO.Data.decode(userRole);\r
- nsd = ques.mayUser(trans, trans.user(), r, Access.write);\r
- }\r
- return nsd;\r
- }\r
- });\r
- Result<NsDAO.Data> nsr = ques.deriveNs(trans, userRole.role);\r
- if(nsr.notOKorIsEmpty()) {\r
- return Result.err(nsr);\r
- }\r
-\r
- switch(fd.status) {\r
- case OK:\r
- Result<List<Identity>> rfc = func.createFuture(trans, fd.value, userRole.user+'|'+userRole.ns + '.' + userRole.rname, \r
- userRole.user, nsr.value, "C");\r
- if(rfc.isOK()) {\r
- return Result.err(Status.ACC_Future, "UserRole [%s - %s.%s] is saved for future processing",\r
- userRole.user,\r
- userRole.ns,\r
- userRole.rname);\r
- } else { \r
- return Result.err(rfc);\r
- }\r
- case Status.ACC_Now:\r
- return func.addUserRole(trans, userRole);\r
- default:\r
- return Result.err(fd);\r
- }\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- \r
- /**\r
- * getUserRolesByRole\r
- */\r
- @ApiDoc(\r
- method = GET,\r
- path = "/authz/userRoles/role/:role",\r
- params = {"role|string|true"},\r
- expectedCode = 200,\r
- errorCodes = {404,406},\r
- text = { "List all Users that are attached to Role specified in :role",\r
- }\r
- )\r
- @Override\r
- public Result<USERROLES> getUserRolesByRole(AuthzTrans trans, String role) {\r
- final Validator v = new Validator(trans);\r
- if(v.nullOrBlank("Role",role).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- Result<RoleDAO.Data> rrdd;\r
- rrdd = RoleDAO.Data.decode(trans,ques,role);\r
- if(rrdd.notOK()) {\r
- return Result.err(rrdd);\r
- }\r
- // May Requester see result?\r
- Result<NsDAO.Data> ns = ques.mayUser(trans,trans.user(), rrdd.value,Access.read);\r
- if (ns.notOK()) {\r
- return Result.err(ns);\r
- }\r
- \r
- // boolean filter = true; \r
- // if (ns.value.isAdmin(trans.user()) || ns.value.isResponsible(trans.user()))\r
- // filter = false;\r
- \r
- // Get list of roles per user, then add to Roles as we go\r
- HashSet<UserRoleDAO.Data> userSet = new HashSet<UserRoleDAO.Data>();\r
- Result<List<UserRoleDAO.Data>> rlurd = ques.userRoleDAO.readByRole(trans, role);\r
- if(rlurd.isOK()) {\r
- for(UserRoleDAO.Data data : rlurd.value) {\r
- userSet.add(data);\r
- }\r
- }\r
- \r
- @SuppressWarnings("unchecked")\r
- USERROLES users = (USERROLES) mapper.newInstance(API.USER_ROLES);\r
- // Checked for permission\r
- mapper.userRoles(trans, userSet, users);\r
- return Result.ok(users);\r
- }\r
- /**\r
- * getUserRolesByRole\r
- */\r
- @ApiDoc(\r
- method = GET,\r
- path = "/authz/userRoles/user/:user",\r
- params = {"role|string|true"},\r
- expectedCode = 200,\r
- errorCodes = {404,406},\r
- text = { "List all UserRoles for :user",\r
- }\r
- )\r
- @Override\r
- public Result<USERROLES> getUserRolesByUser(AuthzTrans trans, String user) {\r
- final Validator v = new Validator(trans);\r
- if(v.nullOrBlank("User",user).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- // Get list of roles per user, then add to Roles as we go\r
- Result<List<UserRoleDAO.Data>> rlurd = ques.userRoleDAO.readByUser(trans, user);\r
- if(rlurd.notOK()) { \r
- return Result.err(rlurd);\r
- }\r
- @SuppressWarnings("unchecked")\r
- USERROLES users = (USERROLES) mapper.newInstance(API.USER_ROLES);\r
- // Checked for permission\r
- mapper.userRoles(trans, rlurd.value, users);\r
- return Result.ok(users);\r
- }\r
-\r
- \r
- @ApiDoc( \r
- method = PUT, \r
- path = "/authz/userRole/user",\r
- params = {},\r
- expectedCode = 200,\r
- errorCodes = {403,404,406}, \r
- text = { "Set a User's roles to the roles specified in the UserRoleRequest object.",\r
- "WARNING: Roles supplied will be the ONLY roles attached to this user",\r
- "If no roles are supplied, user's roles are reset."\r
- }\r
- )\r
- @Override\r
- public Result<Void> resetRolesForUser(AuthzTrans trans, REQUEST rreq) {\r
- Result<UserRoleDAO.Data> rurdd = mapper.userRole(trans, rreq);\r
- final Validator v = new Validator();\r
- if(rurdd.notOKorIsEmpty()) {\r
- return Result.err(rurdd);\r
- }\r
- if (v.user(trans.org(), rurdd.value.user).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- Set<String> currRoles = new HashSet<String>();\r
- Result<List<UserRoleDAO.Data>> rlurd = ques.userRoleDAO.readByUser(trans, rurdd.value.user);\r
- if(rlurd.isOK()) {\r
- for(UserRoleDAO.Data data : rlurd.value) {\r
- currRoles.add(data.role);\r
- }\r
- }\r
- \r
- Result<Void> rv = null;\r
- String[] roles;\r
- if(rurdd.value.role==null) {\r
- roles = new String[0];\r
- } else {\r
- roles = rurdd.value.role.split(",");\r
- }\r
- \r
- for (String role : roles) { \r
- if (v.role(role).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans, ques, role);\r
- if(rrdd.notOK()) {\r
- return Result.err(rrdd);\r
- }\r
- \r
- rurdd.value.role(rrdd.value);\r
- \r
- Result<NsDAO.Data> nsd = ques.mayUser(trans, trans.user(), rrdd.value,Access.write);\r
- if (nsd.notOK()) {\r
- return Result.err(nsd);\r
- }\r
- Result<NsDAO.Data> nsr = ques.deriveNs(trans, role);\r
- if(nsr.notOKorIsEmpty()) {\r
- return Result.err(nsr); \r
- }\r
- \r
- if(currRoles.contains(role)) {\r
- currRoles.remove(role);\r
- } else {\r
- rv = func.addUserRole(trans, rurdd.value);\r
- if (rv.notOK()) {\r
- return rv;\r
- }\r
- }\r
- }\r
- \r
- for (String role : currRoles) {\r
- rurdd.value.role(trans,ques,role);\r
- rv = ques.userRoleDAO.delete(trans, rurdd.value, true);\r
- if(rv.notOK()) {\r
- trans.info().log(rurdd.value.user,"/",rurdd.value.role, "expected to be deleted, but does not exist");\r
- // return rv; // if it doesn't exist, don't error out\r
- }\r
-\r
- }\r
- \r
- return Result.ok(); \r
- \r
- }\r
- \r
- @ApiDoc( \r
- method = PUT, \r
- path = "/authz/userRole/role",\r
- params = {},\r
- expectedCode = 200,\r
- errorCodes = {403,404,406}, \r
- text = { "Set a Role's users to the users specified in the UserRoleRequest object.",\r
- "WARNING: Users supplied will be the ONLY users attached to this role",\r
- "If no users are supplied, role's users are reset."\r
- }\r
- )\r
- @Override\r
- public Result<Void> resetUsersForRole(AuthzTrans trans, REQUEST rreq) {\r
- Result<UserRoleDAO.Data> rurdd = mapper.userRole(trans, rreq);\r
- if(rurdd.notOKorIsEmpty()) {\r
- return Result.err(rurdd);\r
- }\r
- final Validator v = new Validator();\r
- if (v.user_role(rurdd.value).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- RoleDAO.Data rd = RoleDAO.Data.decode(rurdd.value);\r
-\r
- Result<NsDAO.Data> nsd = ques.mayUser(trans, trans.user(), rd, Access.write);\r
- if (nsd.notOK()) {\r
- return Result.err(nsd);\r
- }\r
-\r
- Result<NsDAO.Data> nsr = ques.deriveNs(trans, rurdd.value.role);\r
- if(nsr.notOKorIsEmpty()) {\r
- return Result.err(nsr); \r
- }\r
-\r
- Set<String> currUsers = new HashSet<String>();\r
- Result<List<UserRoleDAO.Data>> rlurd = ques.userRoleDAO.readByRole(trans, rurdd.value.role);\r
- if(rlurd.isOK()) { \r
- for(UserRoleDAO.Data data : rlurd.value) {\r
- currUsers.add(data.user);\r
- }\r
- }\r
- \r
- // found when connected remotely to DEVL, can't replicate locally\r
- // inconsistent errors with cmd: role user setTo [nothing]\r
- // deleteUserRole --> read --> get --> cacheIdx(?)\r
- // sometimes returns idx for last added user instead of user passed in\r
- // cache bug? \r
- \r
- \r
- Result<Void> rv = null;\r
- String[] users = {};\r
- if (rurdd.value.user != null) {\r
- users = rurdd.value.user.split(",");\r
- }\r
- \r
- for (String user : users) { \r
- if (v.user(trans.org(), user).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- rurdd.value.user = user;\r
-\r
- if(currUsers.contains(user)) {\r
- currUsers.remove(user);\r
- } else {\r
- rv = func.addUserRole(trans, rurdd.value);\r
- if (rv.notOK()) { \r
- return rv;\r
- }\r
- }\r
- }\r
- \r
- for (String user : currUsers) {\r
- rurdd.value.user = user; \r
- rv = ques.userRoleDAO.delete(trans, rurdd.value, true);\r
- if(rv.notOK()) {\r
- trans.info().log(rurdd.value, "expected to be deleted, but not exists");\r
- return rv;\r
- }\r
- } \r
- \r
- return Result.ok(); \r
- }\r
- \r
- @ApiDoc(\r
- method = GET,\r
- path = "/authz/userRole/extend/:user/:role",\r
- params = { "user|string|true",\r
- "role|string|true"\r
- },\r
- expectedCode = 200,\r
- errorCodes = {403,404,406},\r
- text = { "Extend the Expiration of this User Role by the amount set by Organization",\r
- "Requestor must be allowed to modify the role"\r
- }\r
- )\r
- @Override\r
- public Result<Void> extendUserRole(AuthzTrans trans, String user, String role) {\r
- Organization org = trans.org();\r
- final Validator v = new Validator();\r
- if(v.user(org, user)\r
- .role(role)\r
- .err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans,ques,role);\r
- if(rrdd.notOK()) {\r
- return Result.err(rrdd);\r
- }\r
- \r
- Result<NsDAO.Data> rcr = ques.mayUser(trans, trans.user(), rrdd.value, Access.write);\r
- boolean mayNotChange;\r
- if((mayNotChange = rcr.notOK()) && !trans.futureRequested()) {\r
- return Result.err(rcr);\r
- }\r
- \r
- Result<List<UserRoleDAO.Data>> rr = ques.userRoleDAO.read(trans, user,role);\r
- if(rr.notOK()) {\r
- return Result.err(rr);\r
- }\r
- for(UserRoleDAO.Data userRole : rr.value) {\r
- if(mayNotChange) { // Function exited earlier if !trans.futureRequested\r
- FutureDAO.Data fto = new FutureDAO.Data();\r
- fto.target=UserRoleDAO.TABLE;\r
- fto.memo = "Extend User ["+userRole.user+"] in Role ["+userRole.role+"]";\r
- GregorianCalendar now = new GregorianCalendar();\r
- fto.start = now.getTime();\r
- fto.expires = org.expiration(now, Expiration.Future).getTime();\r
- try {\r
- fto.construct = userRole.bytify();\r
- } catch (IOException e) {\r
- trans.error().log(e, "Error while bytifying UserRole for Future");\r
- return Result.err(e);\r
- }\r
-\r
- Result<List<Identity>> rfc = func.createFuture(trans, fto, \r
- userRole.user+'|'+userRole.role, userRole.user, rcr.value, "U");\r
- if(rfc.isOK()) {\r
- return Result.err(Status.ACC_Future, "UserRole [%s - %s] is saved for future processing",\r
- userRole.user,\r
- userRole.role);\r
- } else {\r
- return Result.err(rfc);\r
- }\r
- } else {\r
- return func.extendUserRole(trans, userRole, false);\r
- }\r
- }\r
- return Result.err(Result.ERR_NotFound,"This user and role doesn't exist");\r
- }\r
-\r
- @ApiDoc( \r
- method = DELETE, \r
- path = "/authz/userRole/:user/:role",\r
- params = { "user|string|true",\r
- "role|string|true"\r
- },\r
- expectedCode = 200,\r
- errorCodes = {403,404,406}, \r
- text = { "Remove Role :role from User :user."\r
- }\r
- )\r
- @Override\r
- public Result<Void> deleteUserRole(AuthzTrans trans, String usr, String role) {\r
- Validator val = new Validator();\r
- if(val.nullOrBlank("User", usr)\r
- .nullOrBlank("Role", role).err()) {\r
- return Result.err(Status.ERR_BadData, val.errs());\r
- }\r
-\r
- boolean mayNotChange;\r
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans,ques,role);\r
- if(rrdd.notOK()) {\r
- return Result.err(rrdd);\r
- }\r
- \r
- RoleDAO.Data rdd = rrdd.value;\r
- // Make sure we don't delete the last owner\r
- if(Question.OWNER.equals(rdd.name) && ques.countOwner(trans, usr, rdd.ns)<=1) {\r
- return Result.err(Status.ERR_Denied,"You may not delete the last Owner of " + rdd.ns );\r
- }\r
- \r
- Result<NsDAO.Data> rns = ques.mayUser(trans, trans.user(), rdd, Access.write);\r
- if(mayNotChange=rns.notOK()) {\r
- if(!trans.futureRequested()) {\r
- return Result.err(rns);\r
- }\r
- }\r
-\r
- Result<List<UserRoleDAO.Data>> rulr;\r
- if((rulr=ques.userRoleDAO.read(trans, usr, role)).notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_UserRoleNotFound, "User [ "+usr+" ] is not "\r
- + "Assigned to the Role [ " + role + " ]");\r
- }\r
-\r
- UserRoleDAO.Data userRole = rulr.value.get(0);\r
- if(mayNotChange) { // Function exited earlier if !trans.futureRequested\r
- FutureDAO.Data fto = new FutureDAO.Data();\r
- fto.target=UserRoleDAO.TABLE;\r
- fto.memo = "Remove User ["+userRole.user+"] from Role ["+userRole.role+"]";\r
- GregorianCalendar now = new GregorianCalendar();\r
- fto.start = now.getTime();\r
- fto.expires = trans.org().expiration(now, Expiration.Future).getTime();\r
-\r
- Result<List<Identity>> rfc = func.createFuture(trans, fto, \r
- userRole.user+'|'+userRole.role, userRole.user, rns.value, "D");\r
- if(rfc.isOK()) {\r
- return Result.err(Status.ACC_Future, "UserRole [%s - %s] is saved for future processing", \r
- userRole.user,\r
- userRole.role);\r
- } else { \r
- return Result.err(rfc);\r
- }\r
- } else {\r
- return ques.userRoleDAO.delete(trans, rulr.value.get(0), false);\r
- }\r
- }\r
-\r
- @ApiDoc( \r
- method = GET, \r
- path = "/authz/userRole/:user/:role",\r
- params = {"user|string|true",\r
- "role|string|true"},\r
- expectedCode = 200,\r
- errorCodes = {403,404,406}, \r
- text = { "Returns the User (with Expiration date from listed User/Role) if it exists"\r
- }\r
- )\r
- @Override\r
- public Result<USERS> getUserInRole(AuthzTrans trans, String user, String role) {\r
- final Validator v = new Validator();\r
- if(v.role(role).nullOrBlank("User", user).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
-// Result<NsDAO.Data> ns = ques.deriveNs(trans, role);\r
-// if (ns.notOK()) return Result.err(ns);\r
-// \r
-// Result<NsDAO.Data> rnd = ques.mayUser(trans, trans.user(), ns.value, Access.write);\r
- // May calling user see by virtue of the Role\r
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans, ques, role);\r
- if(rrdd.notOK()) {\r
- return Result.err(rrdd);\r
- }\r
- Result<NsDAO.Data> rnd = ques.mayUser(trans, trans.user(), rrdd.value,Access.read);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd); \r
- }\r
- \r
- HashSet<UserRoleDAO.Data> userSet = new HashSet<UserRoleDAO.Data>();\r
- Result<List<UserRoleDAO.Data>> rlurd = ques.userRoleDAO.readUserInRole(trans, user, role);\r
- if(rlurd.isOK()) {\r
- for(UserRoleDAO.Data data : rlurd.value) {\r
- userSet.add(data);\r
- }\r
- }\r
- \r
- @SuppressWarnings("unchecked")\r
- USERS users = (USERS) mapper.newInstance(API.USERS);\r
- mapper.users(trans, userSet, users);\r
- return Result.ok(users);\r
- }\r
-\r
- @ApiDoc( \r
- method = GET, \r
- path = "/authz/users/role/:role",\r
- params = {"user|string|true",\r
- "role|string|true"},\r
- expectedCode = 200,\r
- errorCodes = {403,404,406}, \r
- text = { "Returns the User (with Expiration date from listed User/Role) if it exists"\r
- }\r
- )\r
- @Override\r
- public Result<USERS> getUsersByRole(AuthzTrans trans, String role) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("Role",role).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
-// Result<NsDAO.Data> ns = ques.deriveNs(trans, role);\r
-// if (ns.notOK()) return Result.err(ns);\r
-// \r
-// Result<NsDAO.Data> rnd = ques.mayUser(trans, trans.user(), ns.value, Access.write);\r
- // May calling user see by virtue of the Role\r
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans, ques, role);\r
- if(rrdd.notOK()) {\r
- return Result.err(rrdd);\r
- }\r
- Result<NsDAO.Data> rnd = ques.mayUser(trans, trans.user(), rrdd.value,Access.read);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd); \r
- }\r
- \r
- HashSet<UserRoleDAO.Data> userSet = new HashSet<UserRoleDAO.Data>();\r
- Result<List<UserRoleDAO.Data>> rlurd = ques.userRoleDAO.readByRole(trans, role);\r
- if(rlurd.isOK()) { \r
- for(UserRoleDAO.Data data : rlurd.value) {\r
- userSet.add(data);\r
- }\r
- }\r
- \r
- @SuppressWarnings("unchecked")\r
- USERS users = (USERS) mapper.newInstance(API.USERS);\r
- mapper.users(trans, userSet, users);\r
- return Result.ok(users);\r
- }\r
-\r
- /**\r
- * getUsersByPermission\r
- */\r
- @ApiDoc(\r
- method = GET,\r
- path = "/authz/users/perm/:type/:instance/:action",\r
- params = { "type|string|true",\r
- "instance|string|true",\r
- "action|string|true"\r
- },\r
- expectedCode = 200,\r
- errorCodes = {404,406},\r
- text = { "List all Users that have Permission specified by :type :instance :action",\r
- }\r
- )\r
- @Override\r
- public Result<USERS> getUsersByPermission(AuthzTrans trans, String type, String instance, String action) {\r
- final Validator v = new Validator(trans);\r
- if(v.nullOrBlank("Type",type)\r
- .nullOrBlank("Instance",instance)\r
- .nullOrBlank("Action",action) \r
- .err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- Result<NsSplit> nss = ques.deriveNsSplit(trans, type);\r
- if(nss.notOK()) {\r
- return Result.err(nss);\r
- }\r
- \r
- Result<List<NsDAO.Data>> nsd = ques.nsDAO.read(trans, nss.value.ns);\r
- if (nsd.notOK()) {\r
- return Result.err(nsd);\r
- }\r
- \r
- boolean allInstance = ASTERIX.equals(instance);\r
- boolean allAction = ASTERIX.equals(action);\r
- // Get list of roles per Permission, \r
- // Then loop through Roles to get Users\r
- // Note: Use Sets to avoid processing or responding with Duplicates\r
- Set<String> roleUsed = new HashSet<String>();\r
- Set<UserRoleDAO.Data> userSet = new HashSet<UserRoleDAO.Data>();\r
- \r
- if(!nss.isEmpty()) {\r
- Result<List<PermDAO.Data>> rlp = ques.permDAO.readByType(trans, nss.value.ns, nss.value.name);\r
- if(rlp.isOKhasData()) {\r
- for(PermDAO.Data pd : rlp.value) {\r
- if((allInstance || pd.instance.equals(instance)) && \r
- (allAction || pd.action.equals(action))) {\r
- if(ques.mayUser(trans, trans.user(),pd,Access.read).isOK()) {\r
- for(String role : pd.roles) {\r
- if(!roleUsed.contains(role)) { // avoid evaluating Role many times\r
- roleUsed.add(role);\r
- Result<List<UserRoleDAO.Data>> rlurd = ques.userRoleDAO.readByRole(trans, role.replace('|', '.'));\r
- if(rlurd.isOKhasData()) {\r
- for(UserRoleDAO.Data urd : rlurd.value) {\r
- userSet.add(urd);\r
- }\r
- }\r
- }\r
- }\r
- }\r
- }\r
- }\r
- }\r
- }\r
- @SuppressWarnings("unchecked")\r
- USERS users = (USERS) mapper.newInstance(API.USERS);\r
- mapper.users(trans, userSet, users);\r
- return Result.ok(users);\r
- }\r
-\r
- /***********************************\r
- * HISTORY \r
- ***********************************/ \r
- @Override\r
- public Result<HISTORY> getHistoryByUser(final AuthzTrans trans, String user, final int[] yyyymm, final int sort) { \r
- final Validator v = new Validator(trans);\r
- if(v.nullOrBlank("User",user).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- Result<NsDAO.Data> rnd;\r
- // Users may look at their own data\r
- if(trans.user().equals(user)) {\r
- // Users may look at their own data\r
- } else {\r
- int at = user.indexOf('@');\r
- if(at>=0 && trans.org().getRealm().equals(user.substring(at+1))) {\r
- NsDAO.Data nsd = new NsDAO.Data();\r
- nsd.name = Question.domain2ns(user);\r
- rnd = ques.mayUser(trans, trans.user(), nsd, Access.read);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd);\r
- }\r
- } else {\r
- rnd = ques.validNSOfDomain(trans, user);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd);\r
- }\r
-\r
- rnd = ques.mayUser(trans, trans.user(), rnd.value, Access.read);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd);\r
- }\r
- }\r
- }\r
- Result<List<HistoryDAO.Data>> resp = ques.historyDAO.readByUser(trans, user, yyyymm);\r
- if(resp.notOK()) {\r
- return Result.err(resp);\r
- }\r
- return mapper.history(trans, resp.value,sort);\r
- }\r
-\r
- @Override\r
- public Result<HISTORY> getHistoryByRole(AuthzTrans trans, String role, int[] yyyymm, final int sort) {\r
- final Validator v = new Validator(trans);\r
- if(v.nullOrBlank("Role",role).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans, ques, role);\r
- if(rrdd.notOK()) {\r
- return Result.err(rrdd);\r
- }\r
- \r
- Result<NsDAO.Data> rnd = ques.mayUser(trans, trans.user(), rrdd.value, Access.read);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd);\r
- }\r
- Result<List<HistoryDAO.Data>> resp = ques.historyDAO.readBySubject(trans, role, "role", yyyymm); \r
- if(resp.notOK()) {\r
- return Result.err(resp);\r
- }\r
- return mapper.history(trans, resp.value,sort);\r
- }\r
-\r
- @Override\r
- public Result<HISTORY> getHistoryByPerm(AuthzTrans trans, String type, int[] yyyymm, final int sort) {\r
- final Validator v = new Validator(trans);\r
- if(v.nullOrBlank("Type",type)\r
- .err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- // May user see Namespace of Permission (since it's only one piece... we can't check for "is permission part of")\r
- Result<NsDAO.Data> rnd = ques.deriveNs(trans,type);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd);\r
- }\r
- \r
- rnd = ques.mayUser(trans, trans.user(), rnd.value, Access.read);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd); \r
- }\r
- Result<List<HistoryDAO.Data>> resp = ques.historyDAO.readBySubject(trans, type, "perm", yyyymm);\r
- if(resp.notOK()) {\r
- return Result.err(resp);\r
- }\r
- return mapper.history(trans, resp.value,sort);\r
- }\r
-\r
- @Override\r
- public Result<HISTORY> getHistoryByNS(AuthzTrans trans, String ns, int[] yyyymm, final int sort) {\r
- final Validator v = new Validator(trans);\r
- if(v.nullOrBlank("NS",ns)\r
- .err()) { \r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- Result<NsDAO.Data> rnd = ques.deriveNs(trans,ns);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd);\r
- }\r
- rnd = ques.mayUser(trans, trans.user(), rnd.value, Access.read);\r
- if(rnd.notOK()) {\r
- return Result.err(rnd); \r
- }\r
-\r
- Result<List<HistoryDAO.Data>> resp = ques.historyDAO.readBySubject(trans, ns, "ns", yyyymm);\r
- if(resp.notOK()) {\r
- return Result.err(resp);\r
- }\r
- return mapper.history(trans, resp.value,sort);\r
- }\r
-\r
-/***********************************\r
- * DELEGATE \r
- ***********************************/\r
- @Override\r
- public Result<Void> createDelegate(final AuthzTrans trans, REQUEST base) {\r
- return createOrUpdateDelegate(trans, base, Question.Access.create);\r
- }\r
-\r
- @Override\r
- public Result<Void> updateDelegate(AuthzTrans trans, REQUEST base) {\r
- return createOrUpdateDelegate(trans, base, Question.Access.write);\r
- }\r
-\r
-\r
- private Result<Void> createOrUpdateDelegate(final AuthzTrans trans, REQUEST base, final Access access) {\r
- final Result<DelegateDAO.Data> rd = mapper.delegate(trans, base);\r
- final Validator v = new Validator();\r
- if(v.delegate(trans.org(),rd).err()) { \r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- final DelegateDAO.Data dd = rd.value;\r
- \r
- Result<List<DelegateDAO.Data>> ddr = ques.delegateDAO.read(trans, dd);\r
- if(access==Access.create && ddr.isOKhasData()) {\r
- return Result.err(Status.ERR_ConflictAlreadyExists, "[%s] already delegates to [%s]", dd.user, ddr.value.get(0).delegate);\r
- } else if(access!=Access.create && ddr.notOKorIsEmpty()) { \r
- return Result.err(Status.ERR_NotFound, "[%s] does not have a Delegate Record to [%s].",dd.user,access.name());\r
- }\r
- Result<Void> rv = ques.mayUser(trans, dd, access);\r
- if(rv.notOK()) {\r
- return rv;\r
- }\r
- \r
- Result<FutureDAO.Data> fd = mapper.future(trans,DelegateDAO.TABLE,base, dd, false, \r
- new Mapper.Memo() {\r
- @Override\r
- public String get() {\r
- StringBuilder sb = new StringBuilder();\r
- sb.append(access.name());\r
- sb.setCharAt(0, Character.toUpperCase(sb.charAt(0)));\r
- sb.append("Delegate ");\r
- sb.append(access==Access.create?"[":"to [");\r
- sb.append(rd.value.delegate);\r
- sb.append("] for [");\r
- sb.append(rd.value.user);\r
- sb.append(']');\r
- return sb.toString();\r
- }\r
- },\r
- new MayChange() {\r
- @Override\r
- public Result<?> mayChange() {\r
- return Result.ok(); // Validate in code above\r
- }\r
- });\r
- \r
- switch(fd.status) {\r
- case OK:\r
- Result<List<Identity>> rfc = func.createFuture(trans, fd.value, \r
- dd.user, trans.user(),null, access==Access.create?"C":"U");\r
- if(rfc.isOK()) { \r
- return Result.err(Status.ACC_Future, "Delegate for [%s]",\r
- dd.user);\r
- } else { \r
- return Result.err(rfc);\r
- }\r
- case Status.ACC_Now:\r
- if(access==Access.create) {\r
- Result<DelegateDAO.Data> rdr = ques.delegateDAO.create(trans, dd);\r
- if(rdr.isOK()) {\r
- return Result.ok();\r
- } else {\r
- return Result.err(rdr);\r
- }\r
- } else {\r
- return ques.delegateDAO.update(trans, dd);\r
- }\r
- default:\r
- return Result.err(fd);\r
- }\r
- }\r
-\r
- @Override\r
- public Result<Void> deleteDelegate(AuthzTrans trans, REQUEST base) {\r
- final Result<DelegateDAO.Data> rd = mapper.delegate(trans, base);\r
- final Validator v = new Validator();\r
- if(v.notOK(rd).nullOrBlank("User", rd.value.user).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- Result<List<DelegateDAO.Data>> ddl;\r
- if((ddl=ques.delegateDAO.read(trans, rd.value)).notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_DelegateNotFound,"Cannot delete non-existent Delegate");\r
- }\r
- final DelegateDAO.Data dd = ddl.value.get(0);\r
- Result<Void> rv = ques.mayUser(trans, dd, Access.write);\r
- if(rv.notOK()) {\r
- return rv;\r
- }\r
- \r
- return ques.delegateDAO.delete(trans, dd, false);\r
- }\r
-\r
- @Override\r
- public Result<Void> deleteDelegate(AuthzTrans trans, String userName) {\r
- DelegateDAO.Data dd = new DelegateDAO.Data();\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("User", userName).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- dd.user = userName;\r
- Result<List<DelegateDAO.Data>> ddl;\r
- if((ddl=ques.delegateDAO.read(trans, dd)).notOKorIsEmpty()) {\r
- return Result.err(Status.ERR_DelegateNotFound,"Cannot delete non-existent Delegate");\r
- }\r
- dd = ddl.value.get(0);\r
- Result<Void> rv = ques.mayUser(trans, dd, Access.write);\r
- if(rv.notOK()) {\r
- return rv;\r
- }\r
- \r
- return ques.delegateDAO.delete(trans, dd, false);\r
- }\r
- \r
- @Override\r
- public Result<DELGS> getDelegatesByUser(AuthzTrans trans, String user) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("User", user).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- DelegateDAO.Data ddd = new DelegateDAO.Data();\r
- ddd.user = user;\r
- ddd.delegate = null;\r
- Result<Void> rv = ques.mayUser(trans, ddd, Access.read);\r
- if(rv.notOK()) {\r
- return Result.err(rv);\r
- }\r
- \r
- TimeTaken tt = trans.start("Get delegates for a user", Env.SUB);\r
-\r
- Result<List<DelegateDAO.Data>> dbDelgs = ques.delegateDAO.read(trans, user);\r
- try {\r
- if (dbDelgs.isOKhasData()) {\r
- return mapper.delegate(dbDelgs.value);\r
- } else {\r
- return Result.err(Status.ERR_DelegateNotFound,"No Delegate found for [%s]",user);\r
- }\r
- } finally {\r
- tt.done();\r
- } \r
- }\r
-\r
- @Override\r
- public Result<DELGS> getDelegatesByDelegate(AuthzTrans trans, String delegate) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("Delegate", delegate).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- DelegateDAO.Data ddd = new DelegateDAO.Data();\r
- ddd.user = delegate;\r
- Result<Void> rv = ques.mayUser(trans, ddd, Access.read);\r
- if(rv.notOK()) {\r
- return Result.err(rv);\r
- }\r
-\r
- TimeTaken tt = trans.start("Get users for a delegate", Env.SUB);\r
-\r
- Result<List<DelegateDAO.Data>> dbDelgs = ques.delegateDAO.readByDelegate(trans, delegate);\r
- try {\r
- if (dbDelgs.isOKhasData()) {\r
- return mapper.delegate(dbDelgs.value);\r
- } else {\r
- return Result.err(Status.ERR_DelegateNotFound,"Delegate [%s] is not delegating for anyone.",delegate);\r
- }\r
- } finally {\r
- tt.done();\r
- } \r
- }\r
-\r
-/***********************************\r
- * APPROVAL \r
- ***********************************/\r
- @Override\r
- public Result<Void> updateApproval(AuthzTrans trans, APPROVALS approvals) {\r
- Result<List<ApprovalDAO.Data>> rlad = mapper.approvals(approvals);\r
- if(rlad.notOK()) {\r
- return Result.err(rlad);\r
- }\r
- int numApprs = rlad.value.size();\r
- if(numApprs<1) {\r
- return Result.err(Status.ERR_NoApprovals,"No Approvals sent for Updating");\r
- }\r
- int numProcessed = 0;\r
- String user = trans.user();\r
- \r
- Result<List<ApprovalDAO.Data>> curr;\r
- for(ApprovalDAO.Data updt : rlad.value) {\r
- if(updt.ticket!=null) {\r
- curr = ques.approvalDAO.readByTicket(trans, updt.ticket);\r
- } else if(updt.id!=null) {\r
- curr = ques.approvalDAO.read(trans, updt);\r
- } else if(updt.approver!=null) {\r
- curr = ques.approvalDAO.readByApprover(trans, updt.approver);\r
- } else {\r
- return Result.err(Status.ERR_BadData,"Approvals need ID, Ticket or Approval data to update");\r
- }\r
- if(curr.isOKhasData()) {\r
- for(ApprovalDAO.Data cd : curr.value){\r
- // Check for right record. Need ID, or (Ticket&Trans.User==Appr)\r
- // If Default ID\r
- boolean delegatedAction = ques.isDelegated(trans, user, cd.approver);\r
- String delegator = cd.approver;\r
- if(updt.id!=null || \r
- (updt.ticket!=null && user.equals(cd.approver)) ||\r
- (updt.ticket!=null && delegatedAction)) {\r
- if(updt.ticket.equals(cd.ticket)) {\r
- cd.id = changed(updt.id,cd.id);\r
- cd.ticket = changed(updt.ticket,cd.ticket);\r
- cd.user = changed(updt.user,cd.user);\r
- cd.approver = changed(updt.approver,cd.approver);\r
- cd.type = changed(updt.type,cd.type);\r
- cd.status = changed(updt.status,cd.status);\r
- cd.memo = changed(updt.memo,cd.memo);\r
- cd.operation = changed(updt.operation,cd.operation);\r
- cd.updated = changed(updt.updated,cd.updated);\r
- ques.approvalDAO.update(trans, cd);\r
- Result<Void> rv = func.performFutureOp(trans, cd);\r
- if (rv.isOK()) {\r
- if (delegatedAction) {\r
- trans.audit().log("actor=",user,",action=",updt.status,",operation=\"",cd.memo,\r
- '"',",requestor=",cd.user,",delegator=",delegator);\r
- }\r
- if (!delegatedAction && cd.status.equalsIgnoreCase("denied")) {\r
- trans.audit().log("actor=",trans.user(),",action=denied,operation=\"",cd.memo,'"',",requestor=",cd.user);\r
- }\r
- rv = ques.approvalDAO.delete(trans, cd, false);\r
- }\r
- ++numProcessed;\r
-\r
- }\r
- }\r
- }\r
- }\r
- }\r
-\r
- if(numApprs==numProcessed) {\r
- return Result.ok();\r
- }\r
- return Result.err(Status.ERR_ActionNotCompleted,numProcessed + " out of " + numApprs + " completed");\r
-\r
- }\r
- \r
- private<T> T changed(T src, T dflt) {\r
- if(src!=null) {\r
- return src;\r
- }\r
- return dflt;\r
- }\r
-\r
- @Override\r
- public Result<APPROVALS> getApprovalsByUser(AuthzTrans trans, String user) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("User", user).err()) { \r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
-\r
- Result<List<ApprovalDAO.Data>> rapd = ques.approvalDAO.readByUser(trans, user);\r
- if(rapd.isOK()) {\r
- return mapper.approvals(rapd.value);\r
- } else {\r
- return Result.err(rapd);\r
- }\r
-}\r
-\r
- @Override\r
- public Result<APPROVALS> getApprovalsByTicket(AuthzTrans trans, String ticket) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("Ticket", ticket).err()) { \r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- UUID uuid;\r
- try {\r
- uuid = UUID.fromString(ticket);\r
- } catch (IllegalArgumentException e) {\r
- return Result.err(Status.ERR_BadData,e.getMessage());\r
- }\r
- \r
- Result<List<ApprovalDAO.Data>> rapd = ques.approvalDAO.readByTicket(trans, uuid);\r
- if(rapd.isOK()) {\r
- return mapper.approvals(rapd.value);\r
- } else {\r
- return Result.err(rapd);\r
- }\r
- }\r
- \r
- @Override\r
- public Result<APPROVALS> getApprovalsByApprover(AuthzTrans trans, String approver) {\r
- final Validator v = new Validator();\r
- if(v.nullOrBlank("Approver", approver).err()) {\r
- return Result.err(Status.ERR_BadData,v.errs());\r
- }\r
- \r
- List<ApprovalDAO.Data> listRapds = new ArrayList<ApprovalDAO.Data>();\r
- \r
- Result<List<ApprovalDAO.Data>> myRapd = ques.approvalDAO.readByApprover(trans, approver);\r
- if(myRapd.notOK()) {\r
- return Result.err(myRapd);\r
- }\r
- \r
- listRapds.addAll(myRapd.value);\r
- \r
- Result<List<DelegateDAO.Data>> delegatedFor = ques.delegateDAO.readByDelegate(trans, approver);\r
- if (delegatedFor.isOK()) {\r
- for (DelegateDAO.Data dd : delegatedFor.value) {\r
- if (dd.expires.after(new Date())) {\r
- String delegator = dd.user;\r
- Result<List<ApprovalDAO.Data>> rapd = ques.approvalDAO.readByApprover(trans, delegator);\r
- if (rapd.isOK()) {\r
- for (ApprovalDAO.Data d : rapd.value) { \r
- if (!d.user.equals(trans.user())) {\r
- listRapds.add(d);\r
- }\r
- }\r
- }\r
- }\r
- }\r
- }\r
- \r
- return mapper.approvals(listRapds);\r
- }\r
- \r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.service.AuthzService#clearCache(org.onap.aaf.authz.env.AuthzTrans, java.lang.String)\r
- */\r
- @Override\r
- public Result<Void> cacheClear(AuthzTrans trans, String cname) {\r
- if(ques.isGranted(trans,trans.user(),Define.ROOT_NS,CACHE,cname,"clear")) {\r
- return ques.clearCache(trans,cname);\r
- }\r
- return Result.err(Status.ERR_Denied, "%s does not have AAF Permission '%s.cache|%s|clear",\r
- trans.user(),Define.ROOT_NS,cname);\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.service.AuthzService#cacheClear(org.onap.aaf.authz.env.AuthzTrans, java.lang.String, java.lang.Integer)\r
- */\r
- @Override\r
- public Result<Void> cacheClear(AuthzTrans trans, String cname, int[] segment) {\r
- if(ques.isGranted(trans,trans.user(),Define.ROOT_NS,CACHE,cname,"clear")) {\r
- Result<Void> v=null;\r
- for(int i: segment) {\r
- v=ques.cacheClear(trans,cname,i);\r
- }\r
- if(v!=null) {\r
- return v;\r
- }\r
- }\r
- return Result.err(Status.ERR_Denied, "%s does not have AAF Permission '%s.cache|%s|clear",\r
- trans.user(),Define.ROOT_NS,cname);\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.service.AuthzService#dbReset(org.onap.aaf.authz.env.AuthzTrans)\r
- */\r
- @Override\r
- public void dbReset(AuthzTrans trans) {\r
- ques.historyDAO.reportPerhapsReset(trans, null);\r
- }\r
-\r
-}\r
-\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service;\r
-\r
-import java.util.Date;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.service.mapper.Mapper;\r
-import org.onap.aaf.dao.DAOException;\r
-import org.onap.aaf.dao.aaf.cass.NsType;\r
-\r
-public interface AuthzService<NSS,PERMS,PERMKEY,ROLES,USERS,USERROLES,DELGS,CERTS,KEYS,REQUEST,HISTORY,ERR,APPROVALS> {\r
- public Mapper<NSS,PERMS,PERMKEY,ROLES,USERS,USERROLES,DELGS,CERTS,KEYS,REQUEST,HISTORY,ERR,APPROVALS> mapper();\r
- \r
-/***********************************\r
- * NAMESPACE \r
- ***********************************/\r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @param ns\r
- * @return\r
- * @throws DAOException \r
- * @throws \r
- */\r
- public Result<Void> createNS(AuthzTrans trans, REQUEST request, NsType type);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param ns\r
- * @return\r
- */\r
- public Result<Void> addAdminNS(AuthzTrans trans, String ns, String id);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param ns\r
- * @return\r
- */\r
- public Result<Void> delAdminNS(AuthzTrans trans, String ns, String id);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param ns\r
- * @param id\r
- * @return\r
- */\r
- public Result<Void> addResponsibleNS(AuthzTrans trans, String ns, String id);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param ns\r
- * @param id\r
- * @return\r
- */\r
- public Result<Void> delResponsibleNS(AuthzTrans trans, String ns, String id);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param ns\r
- * @param key\r
- * @param value\r
- * @return\r
- */\r
- public Result<Void> createNsAttrib(AuthzTrans trans, String ns, String key, String value);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param ns\r
- * @param key\r
- * @param value\r
- * @return\r
- */\r
- public Result<?> updateNsAttrib(AuthzTrans trans, String ns, String key, String value);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param ns\r
- * @param key\r
- * @return\r
- */\r
- public Result<Void> deleteNsAttrib(AuthzTrans trans, String ns, String key);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param ns\r
- * @param key\r
- * @return\r
- */\r
- public Result<KEYS> readNsByAttrib(AuthzTrans trans, String key);\r
-\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param ns\r
- * @return\r
- */\r
- public Result<NSS> getNSbyName(AuthzTrans trans, String ns);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @return\r
- */\r
- public Result<NSS> getNSbyAdmin(AuthzTrans trans, String user, boolean full);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @return\r
- */\r
- public Result<NSS> getNSbyResponsible(AuthzTrans trans, String user, boolean full);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @return\r
- */\r
- public Result<NSS> getNSbyEither(AuthzTrans trans, String user, boolean full);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param parent\r
- * @return\r
- */\r
- public Result<NSS> getNSsChildren(AuthzTrans trans, String parent);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param req\r
- * @return\r
- */\r
- public Result<Void> updateNsDescription(AuthzTrans trans, REQUEST req);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param ns\r
- * @param user\r
- * @return\r
- * @throws DAOException\r
- */\r
- public Result<Void> deleteNS(AuthzTrans trans, String ns);\r
-\r
-/***********************************\r
- * PERM \r
- ***********************************/\r
- /**\r
- * \r
- * @param trans\r
- * @param rreq\r
- * @return\r
- * @throws DAOException \r
- * @throws MappingException\r
- */\r
- public Result<Void> createPerm(AuthzTrans trans, REQUEST rreq);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param childPerm\r
- * @return\r
- * @throws DAOException \r
- */\r
- public Result<PERMS> getPermsByType(AuthzTrans trans, String perm);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param type\r
- * @param instance\r
- * @param action\r
- * @return\r
- */\r
- public Result<PERMS> getPermsByName(AuthzTrans trans, String type,\r
- String instance, String action);\r
-\r
- /**\r
- * Gets all the permissions for a user across all the roles it is assigned to\r
- * @param userName\r
- * @return\r
- * @throws Exception \r
- * @throws Exception\r
- */\r
- public Result<PERMS> getPermsByUser(AuthzTrans trans, String userName);\r
-\r
- /**\r
- * Gets all the permissions for a user across all the roles it is assigned to\r
- * \r
- * Add AAF Perms representing the "MayUser" calls if\r
- * 1) Allowed\r
- * 2) User has equivalent permission\r
- * \r
- * @param userName\r
- * @return\r
- * @throws Exception \r
- * @throws Exception\r
- */\r
- public Result<PERMS> getPermsByUser(AuthzTrans trans, PERMS perms, String userName);\r
-\r
- /**\r
- * \r
- * Gets all the permissions for a user across all the roles it is assigned to\r
- * \r
- * @param roleName\r
- * @return\r
- * @throws Exception\r
- */\r
- public Result<PERMS> getPermsByRole(AuthzTrans trans, String roleName);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param ns\r
- * @return\r
- */\r
- public Result<PERMS> getPermsByNS(AuthzTrans trans, String ns);\r
-\r
- /**\r
- * rename permission\r
- * \r
- * @param trans\r
- * @param rreq\r
- * @param isRename\r
- * @param origType\r
- * @param origInstance\r
- * @param origAction\r
- * @return\r
- */\r
- public Result<Void> renamePerm(AuthzTrans trans, REQUEST rreq, String origType, String origInstance, String origAction);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param req\r
- * @return\r
- */\r
- public Result<Void> updatePermDescription(AuthzTrans trans, REQUEST req);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param from\r
- * @return\r
- */\r
- public Result<Void> resetPermRoles(AuthzTrans trans, REQUEST from);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param from\r
- * @return\r
- * @throws Exception\r
- */\r
- public Result<Void> deletePerm(AuthzTrans trans, REQUEST from);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @param perm\r
- * @param type\r
- * @param action\r
- * @return\r
- * @throws Exception\r
- */\r
- Result<Void> deletePerm(AuthzTrans trans, String perm, String type, String action);\r
-\r
-/***********************************\r
- * ROLE \r
- ***********************************/\r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @param role\r
- * @param approvers\r
- * @return\r
- * @throws DAOException \r
- * @throws Exception\r
- */\r
- public Result<Void> createRole(AuthzTrans trans, REQUEST req);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param role\r
- * @return\r
- */\r
- public Result<ROLES> getRolesByName(AuthzTrans trans, String role);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @return\r
- * @throws DAOException \r
- */\r
- public Result<ROLES> getRolesByUser(AuthzTrans trans, String user);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @return\r
- */\r
- public Result<ROLES> getRolesByNS(AuthzTrans trans, String user);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param name\r
- * @return\r
- */\r
- public Result<ROLES> getRolesByNameOnly(AuthzTrans trans, String name);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param type\r
- * @param instance\r
- * @param action\r
- * @return\r
- */\r
- public Result<ROLES> getRolesByPerm(AuthzTrans trans, String type, String instance, String action);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param req\r
- * @return\r
- */\r
- public Result<Void> updateRoleDescription(AuthzTrans trans, REQUEST req);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param rreq\r
- * @return\r
- * @throws DAOException\r
- */\r
- public Result<Void> addPermToRole(AuthzTrans trans, REQUEST rreq);\r
- \r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param rreq\r
- * @return\r
- * @throws DAOException\r
- */\r
- Result<Void> delPermFromRole(AuthzTrans trans, REQUEST rreq);\r
-\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @param role\r
- * @return\r
- * @throws DAOException \r
- * @throws MappingException \r
- */\r
- public Result<Void> deleteRole(AuthzTrans trans, String role);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param req\r
- * @return\r
- */\r
- public Result<Void> deleteRole(AuthzTrans trans, REQUEST req);\r
-\r
-/***********************************\r
- * CRED \r
- ***********************************/\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param from\r
- * @return\r
- */\r
- Result<Void> createUserCred(AuthzTrans trans, REQUEST from);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param from\r
- * @return\r
- */\r
- Result<Void> changeUserCred(AuthzTrans trans, REQUEST from);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param from\r
- * @param days\r
- * @return\r
- */\r
- Result<Void> extendUserCred(AuthzTrans trans, REQUEST from, String days);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param ns\r
- * @return\r
- */\r
- public Result<USERS> getCredsByNS(AuthzTrans trans, String ns);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param id\r
- * @return\r
- */\r
- public Result<USERS> getCredsByID(AuthzTrans trans, String id);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param req\r
- * @param id\r
- * @return\r
- */\r
- public Result<CERTS> getCertInfoByID(AuthzTrans trans, HttpServletRequest req, String id);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param credReq\r
- * @return\r
- */\r
- public Result<Void> deleteUserCred(AuthzTrans trans, REQUEST credReq);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @return\r
- * @throws Exception\r
- */\r
- public Result<Date> doesCredentialMatch(AuthzTrans trans, REQUEST credReq);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param basicAuth\r
- * @return\r
- */\r
- public Result<Date> validateBasicAuth(AuthzTrans trans, String basicAuth);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param role\r
- * @return\r
- */\r
- public Result<USERS> getUsersByRole(AuthzTrans trans, String role);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param role\r
- * @return\r
- */\r
- public Result<USERS> getUserInRole(AuthzTrans trans, String user, String role);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param type\r
- * @param instance\r
- * @param action\r
- * @return\r
- */\r
- public Result<USERS> getUsersByPermission(AuthzTrans trans,String type, String instance, String action);\r
- \r
- \r
-\r
-\r
-/***********************************\r
- * USER-ROLE \r
- ***********************************/\r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @param request\r
- * @return\r
- * @throws Exception\r
- */\r
- public Result<Void> createUserRole(AuthzTrans trans, REQUEST request);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param role\r
- * @return\r
- */\r
- public Result<USERROLES> getUserRolesByRole(AuthzTrans trans, String role);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param role\r
- * @return\r
- */\r
- public Result<USERROLES> getUserRolesByUser(AuthzTrans trans, String user);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param from\r
- * @return\r
- */\r
- public Result<Void> resetRolesForUser(AuthzTrans trans, REQUEST from);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param from\r
- * @return\r
- */\r
- public Result<Void> resetUsersForRole(AuthzTrans trans, REQUEST from);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @param role\r
- * @return\r
- */\r
- public Result<Void> extendUserRole(AuthzTrans trans, String user,\r
- String role);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @param usr\r
- * @param role\r
- * @return\r
- * @throws DAOException \r
- */\r
- public Result<Void> deleteUserRole(AuthzTrans trans, String usr, String role);\r
-\r
-\r
-\r
-/***********************************\r
- * HISTORY \r
- ***********************************/ \r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @param yyyymm\r
- * @return\r
- */\r
- public Result<HISTORY> getHistoryByUser(AuthzTrans trans, String user, int[] yyyymm, int sort);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param subj\r
- * @param yyyymm\r
- * @param sort\r
- * @return\r
- */\r
- public Result<HISTORY> getHistoryByRole(AuthzTrans trans, String subj, int[] yyyymm, int sort);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param subj\r
- * @param yyyymm\r
- * @param sort\r
- * @return\r
- */\r
- public Result<HISTORY> getHistoryByPerm(AuthzTrans trans, String subj, int[] yyyymm, int sort);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param subj\r
- * @param yyyymm\r
- * @param sort\r
- * @return\r
- */\r
- public Result<HISTORY> getHistoryByNS(AuthzTrans trans, String subj, int[] yyyymm, int sort);\r
-\r
-/***********************************\r
- * DELEGATE \r
- ***********************************/\r
- /**\r
- * \r
- * @param trans\r
- * @param delegates\r
- * @return\r
- * @throws Exception\r
- */\r
- public Result<Void> createDelegate(AuthzTrans trans, REQUEST reqDelegate);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param delegates\r
- * @return\r
- * @throws Exception\r
- */\r
- public Result<Void> updateDelegate(AuthzTrans trans, REQUEST reqDelegate);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param userName\r
- * @param delegate\r
- * @return\r
- * @throws Exception\r
- */\r
- public Result<Void> deleteDelegate(AuthzTrans trans, REQUEST reqDelegate);\r
- \r
- /**\r
- * \r
- * @param trans\r
- * @param userName\r
- * @return\r
- */\r
- public Result<Void> deleteDelegate(AuthzTrans trans, String userName);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @return\r
- * @throws Exception\r
- */\r
- public Result<DELGS> getDelegatesByUser(AuthzTrans trans, String user);\r
- \r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param delegate\r
- * @return\r
- */\r
- public Result<DELGS> getDelegatesByDelegate(AuthzTrans trans, String delegate);\r
-\r
-/***********************************\r
- * APPROVAL \r
- ***********************************/\r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @param approver\r
- * @param status\r
- * @return\r
- */\r
- public Result<Void> updateApproval(AuthzTrans trans, APPROVALS approvals);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param user\r
- * @return\r
- */\r
- public Result<APPROVALS> getApprovalsByUser(AuthzTrans trans, String user);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param ticket\r
- * @return\r
- */\r
- public Result<APPROVALS> getApprovalsByTicket(AuthzTrans trans, String ticket);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param approver\r
- * @return\r
- */\r
- public Result<APPROVALS> getApprovalsByApprover(AuthzTrans trans, String approver);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param cname\r
- * @return\r
- */\r
- public Result<Void> cacheClear(AuthzTrans trans, String cname);\r
-\r
- /**\r
- * \r
- * @param trans\r
- * @param cname\r
- * @param segment\r
- * @return\r
- */\r
- public Result<Void> cacheClear(AuthzTrans trans, String cname, int[] segment);\r
-\r
- /**\r
- * \r
- * @param trans\r
- */\r
- public void dbReset(AuthzTrans trans);\r
-\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.cssa.rserv.HttpCode;\r
-\r
-public abstract class Code extends HttpCode<AuthzTrans, AuthzFacade> implements Cloneable {\r
- public boolean useJSON;\r
-\r
- public Code(AuthzFacade facade, String description, boolean useJSON, String ... roles) {\r
- super(facade, description, roles);\r
- this.useJSON = useJSON;\r
- }\r
- \r
- public <D extends Code> D clone(AuthzFacade facade, boolean useJSON) throws Exception {\r
- @SuppressWarnings("unchecked")\r
- D d = (D)clone();\r
- d.useJSON = useJSON;\r
- d.context = facade;\r
- return d;\r
- }\r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service;\r
-\r
-import org.onap.aaf.authz.layer.Result;\r
-\r
-/**\r
- * There are several ways to determine if \r
- *\r
- */\r
-public interface MayChange {\r
- public Result<?> mayChange();\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.Code;\r
-import org.onap.aaf.authz.service.mapper.Mapper.API;\r
-import org.onap.aaf.cssa.rserv.HttpMethods;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-import org.onap.aaf.cadi.Symm;\r
-\r
-/**\r
- * API Apis\r
- *\r
- */\r
-public class API_Api {\r
- // Hide Public Constructor\r
- private API_Api() {}\r
- \r
- /**\r
- * Normal Init level APIs\r
- * \r
- * @param authzAPI\r
- * @param facade\r
- * @throws Exception\r
- */\r
- public static void init(final AuthAPI authzAPI, AuthzFacade facade) throws Exception {\r
- ////////\r
- // Overall APIs\r
- ///////\r
- authzAPI.route(HttpMethods.GET,"/api",API.API,new Code(facade,"Document API", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getAPI(trans,resp,authzAPI);\r
- if(r.isOK()) {\r
- resp.setStatus(HttpStatus.OK_200);\r
- } else {\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- ////////\r
- // Overall Examples\r
- ///////\r
- authzAPI.route(HttpMethods.GET,"/api/example/*",API.VOID,new Code(facade,"Document API", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- String pathInfo = req.getPathInfo();\r
- int question = pathInfo.lastIndexOf('?');\r
- \r
- pathInfo = pathInfo.substring(13, question<0?pathInfo.length():question);// IMPORTANT, this is size of "/api/example/"\r
- String nameOrContextType=Symm.base64noSplit.decode(pathInfo);\r
- Result<Void> r = context.getAPIExample(trans,resp,nameOrContextType,\r
- question>=0 && "optional=true".equalsIgnoreCase(req.getPathInfo().substring(question+1))\r
- );\r
- if(r.isOK()) {\r
- resp.setStatus(HttpStatus.OK_200);\r
- } else {\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.GET;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.PUT;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.Code;\r
-import org.onap.aaf.authz.service.mapper.Mapper.API;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-\r
-public class API_Approval {\r
- // Hide Public Constructor\r
- private API_Approval() {}\r
- \r
- public static void init(AuthAPI authzAPI, AuthzFacade facade) throws Exception {\r
-\r
- /**\r
- * Get Approvals by User\r
- */\r
- authzAPI.route(GET, "/authz/approval/user/:user",API.APPROVALS,\r
- new Code(facade,"Get Approvals by User", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getApprovalsByUser(trans, resp, pathParam(req,"user"));\r
- if(r.isOK()) {\r
- resp.setStatus(HttpStatus.OK_200); \r
- } else {\r
- context.error(trans,resp,r);\r
- } \r
- } \r
- });\r
-\r
- /**\r
- * Get Approvals by Ticket\r
- */\r
- authzAPI.route(GET, "/authz/approval/ticket/:ticket",API.VOID,new Code(facade,"Get Approvals by Ticket ", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getApprovalsByTicket(trans, resp, pathParam(req,"ticket"));\r
- if(r.isOK()) {\r
- resp.setStatus(HttpStatus.OK_200);\r
- } else {\r
- context.error(trans,resp,r);\r
- } \r
- } \r
- });\r
-\r
- /**\r
- * Get Approvals by Approver\r
- */\r
- authzAPI.route(GET, "/authz/approval/approver/:approver",API.APPROVALS,new Code(facade,"Get Approvals by Approver", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getApprovalsByApprover(trans, resp, pathParam(req,"approver"));\r
- if(r.isOK()) {\r
- resp.setStatus(HttpStatus.OK_200);\r
- } else {\r
- context.error(trans,resp,r);\r
- } \r
- } \r
- });\r
-\r
-\r
- /**\r
- * Update an approval\r
- */\r
- authzAPI.route(PUT, "/authz/approval",API.APPROVALS,new Code(facade,"Update approvals", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.updateApproval(trans, req, resp);\r
- if(r.isOK()) {\r
- resp.setStatus(HttpStatus.OK_200);\r
- } else {\r
- context.error(trans,resp,r);\r
- } \r
- } \r
- });\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.DELETE;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.GET;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.POST;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.PUT;\r
-\r
-import java.security.Principal;\r
-import java.util.Date;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.cadi.DirectAAFUserPass;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.Code;\r
-import org.onap.aaf.authz.service.mapper.Mapper.API;\r
-import org.onap.aaf.cssa.rserv.HttpMethods;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-import org.onap.aaf.cadi.CredVal;\r
-import org.onap.aaf.cadi.Symm;\r
-import org.onap.aaf.cadi.principal.BasicPrincipal;\r
-import org.onap.aaf.cadi.principal.X509Principal;\r
-import org.onap.aaf.inno.env.Env;\r
-\r
-/**\r
- * Initialize All Dispatches related to Credentials (AUTHN)\r
- *\r
- */\r
-public class API_Creds {\r
- // Hide Public Interface\r
- private API_Creds() {}\r
- // needed to validate Creds even when already Authenticated x509\r
- /**\r
- * TIME SENSITIVE APIs\r
- * \r
- * These will be first in the list\r
- * \r
- * @param env\r
- * @param authzAPI\r
- * @param facade\r
- * @param directAAFUserPass \r
- * @throws Exception\r
- */\r
- public static void timeSensitiveInit(Env env, AuthAPI authzAPI, AuthzFacade facade, final DirectAAFUserPass directAAFUserPass) throws Exception {\r
- /**\r
- * Basic Auth, quick Validation\r
- * \r
- * Responds OK or NotAuthorized\r
- */\r
- authzAPI.route(env, HttpMethods.GET, "/authn/basicAuth", new Code(facade,"Is given BasicAuth valid?",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
-\r
- Principal p = trans.getUserPrincipal();\r
- if (p instanceof BasicPrincipal) {\r
- // the idea is that if call is made with this credential, and it's a BasicPrincipal, it's ok\r
- // otherwise, it wouldn't have gotten here.\r
- resp.setStatus(HttpStatus.OK_200);\r
- } else if (p instanceof X509Principal) {\r
- // have to check Basic Auth here, because it might be CSP.\r
- String ba = req.getHeader("Authorization");\r
- if(ba.startsWith("Basic ")) {\r
- String decoded = Symm.base64noSplit.decode(ba.substring(6));\r
- int colon = decoded.indexOf(':');\r
- if(directAAFUserPass.validate(\r
- decoded.substring(0,colon), \r
- CredVal.Type.PASSWORD , \r
- decoded.substring(colon+1).getBytes())) {\r
- \r
- resp.setStatus(HttpStatus.OK_200);\r
- } else {\r
- resp.setStatus(HttpStatus.FORBIDDEN_403);\r
- }\r
- }\r
- } else if(p == null) {\r
- trans.error().log("Transaction not Authenticated... no Principal");\r
- resp.setStatus(HttpStatus.FORBIDDEN_403);\r
- } else {\r
- trans.checkpoint("Basic Auth Check Failed: This wasn't a Basic Auth Trans");\r
- // For Auth Security questions, we don't give any info to client on why failed\r
- resp.setStatus(HttpStatus.FORBIDDEN_403);\r
- }\r
- }\r
- },"text/plain");\r
- \r
- /** \r
- * returns whether a given Credential is valid\r
- */\r
- authzAPI.route(POST, "/authn/validate", API.CRED_REQ, new Code(facade,"Is given Credential valid?",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Date> r = context.doesCredentialMatch(trans, req, resp);\r
- if(r.isOK()) {\r
- resp.setStatus(HttpStatus.OK_200);\r
- } else {\r
- // For Security, we don't give any info out on why failed, other than forbidden\r
- resp.setStatus(HttpStatus.FORBIDDEN_403);\r
- }\r
- }\r
- }); \r
-\r
- /** \r
- * returns whether a given Credential is valid\r
- */\r
- authzAPI.route(GET, "/authn/cert/id/:id", API.CERTS, new Code(facade,"Get Cert Info by ID",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.getCertInfoByID(trans, req, resp, pathParam(req,":id") );\r
- if(r.isOK()) {\r
- resp.setStatus(HttpStatus.OK_200); \r
- } else {\r
- // For Security, we don't give any info out on why failed, other than forbidden\r
- resp.setStatus(HttpStatus.FORBIDDEN_403);\r
- }\r
- }\r
- }); \r
-\r
-\r
-\r
-\r
- }\r
- \r
- /**\r
- * Normal Init level APIs\r
- * \r
- * @param authzAPI\r
- * @param facade\r
- * @throws Exception\r
- */\r
- public static void init(AuthAPI authzAPI, AuthzFacade facade) throws Exception {\r
- /**\r
- * Create a new ID/Credential\r
- */\r
- authzAPI.route(POST,"/authn/cred",API.CRED_REQ,new Code(facade,"Add a New ID/Credential", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.createUserCred(trans, req);\r
- if(r.isOK()) {\r
- resp.setStatus(HttpStatus.CREATED_201);\r
- } else {\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
- \r
- /** \r
- * gets all credentials by Namespace\r
- */\r
- authzAPI.route(GET, "/authn/creds/ns/:ns", API.USERS, new Code(facade,"Get Creds for a Namespace",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.getCredsByNS(trans, resp, pathParam(req, "ns"));\r
- if(r.isOK()) {\r
- resp.setStatus(HttpStatus.OK_200); \r
- } else {\r
- context.error(trans,resp,r);\r
- }\r
- }\r
-\r
- });\r
- \r
- /** \r
- * gets all credentials by ID\r
- */\r
- authzAPI.route(GET, "/authn/creds/id/:id", API.USERS, new Code(facade,"Get Creds by ID",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.getCredsByID(trans, resp, pathParam(req, "id"));\r
- if(r.isOK()) {\r
- resp.setStatus(HttpStatus.OK_200); \r
- } else {\r
- context.error(trans,resp,r);\r
- }\r
- }\r
-\r
- });\r
-\r
-\r
- /**\r
- * Update ID/Credential (aka reset)\r
- */\r
- authzAPI.route(PUT,"/authn/cred",API.CRED_REQ,new Code(facade,"Update an ID/Credential", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.changeUserCred(trans, req);\r
- if(r.isOK()) {\r
- resp.setStatus(HttpStatus.OK_200);\r
- } else {\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- /**\r
- * Extend ID/Credential\r
- * This behavior will accelerate getting out of P1 outages due to ignoring renewal requests, or\r
- * other expiration issues.\r
- * \r
- * Scenario is that people who are solving Password problems at night, are not necessarily those who\r
- * know what the passwords are supposed to be. Also, changing Password, without changing Configurations\r
- * using that password only exacerbates the P1 Issue.\r
- */\r
- authzAPI.route(PUT,"/authn/cred/:days",API.CRED_REQ,new Code(facade,"Extend an ID/Credential", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.extendUserCred(trans, req, pathParam(req, "days"));\r
- if(r.isOK()) {\r
- resp.setStatus(HttpStatus.OK_200);\r
- } else {\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- /**\r
- * Delete a ID/Credential by Object\r
- */\r
- authzAPI.route(DELETE,"/authn/cred",API.CRED_REQ,new Code(facade,"Delete a Credential", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.deleteUserCred(trans, req);\r
- if(r.isOK()) {\r
- resp.setStatus(HttpStatus.OK_200);\r
- } else {\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.DELETE;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.GET;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.POST;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.PUT;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.Code;\r
-import org.onap.aaf.authz.service.mapper.Mapper.API;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-\r
-public class API_Delegate {\r
- public static void init(AuthAPI authzAPI, AuthzFacade facade) throws Exception {\r
- /**\r
- * Add a delegate\r
- */\r
- authzAPI.route(POST, "/authz/delegate",API.DELG_REQ,new Code(facade,"Add a Delegate", true) {\r
-\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.createDelegate(trans, req, resp);\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.CREATED_201); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- } \r
- } \r
- });\r
- \r
- /**\r
- * Update a delegate\r
- */\r
- authzAPI.route(PUT, "/authz/delegate",API.DELG_REQ,new Code(facade,"Update a Delegate", true) {\r
-\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.updateDelegate(trans, req, resp);\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- } \r
- } \r
- });\r
- \r
- /**\r
- * DELETE delegates for a user\r
- */\r
- authzAPI.route(DELETE, "/authz/delegate",API.DELG_REQ,new Code(facade,"Delete delegates for a user", true) {\r
-\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.deleteDelegate(trans, req, resp);\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- } \r
- } \r
- });\r
- \r
- /**\r
- * DELETE a delegate\r
- */\r
- authzAPI.route(DELETE, "/authz/delegate/:user_name",API.VOID,new Code(facade,"Delete a Delegate", true) {\r
-\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.deleteDelegate(trans, pathParam(req, "user_name"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- } \r
- } \r
- });\r
- \r
- /**\r
- * Read who is delegating for User\r
- */\r
- authzAPI.route(GET, "/authz/delegates/user/:user",API.DELGS,new Code(facade,"Get Delegates by User", true) {\r
-\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getDelegatesByUser(trans, pathParam(req, "user"), resp);\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- } \r
- } \r
- });\r
-\r
- /**\r
- * Read for whom the User is delegating\r
- */\r
- authzAPI.route(GET, "/authz/delegates/delegate/:delegate",API.DELGS,new Code(facade,"Get Delegates by Delegate", true) {\r
-\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getDelegatesByDelegate(trans, pathParam(req, "delegate"), resp);\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- } \r
- } \r
- });\r
-\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.GET;\r
-\r
-import java.text.SimpleDateFormat;\r
-import java.util.ArrayList;\r
-import java.util.Collections;\r
-import java.util.Date;\r
-import java.util.GregorianCalendar;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.Code;\r
-import org.onap.aaf.authz.service.mapper.Mapper.API;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-\r
-/**\r
- * Pull certain types of History Info\r
- * \r
- * Specify yyyymm as \r
- * single - 201504\r
- * commas 201503,201504\r
- * ranges 201501-201504\r
- * combinations 201301,201401,201501-201504\r
- * \r
- *\r
- */\r
-public class API_History {\r
- /**\r
- * Normal Init level APIs\r
- * \r
- * @param authzAPI\r
- * @param facade\r
- * @throws Exception\r
- */\r
- public static void init(AuthAPI authzAPI, AuthzFacade facade) throws Exception {\r
- /**\r
- * Get History\r
- */\r
- authzAPI.route(GET,"/authz/hist/user/:user",API.HISTORY,new Code(facade,"Get History by User", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- int[] years;\r
- int descend;\r
- try {\r
- years = getYears(req);\r
- descend = decending(req);\r
- } catch(Exception e) {\r
- context.error(trans, resp, Result.err(Status.ERR_BadData, e.getMessage()));\r
- return;\r
- }\r
-\r
- Result<Void> r = context.getHistoryByUser(trans, resp, pathParam(req,":user"),years,descend);\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- /**\r
- * Get History by NS\r
- */\r
- authzAPI.route(GET,"/authz/hist/ns/:ns",API.HISTORY,new Code(facade,"Get History by Namespace", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- int[] years;\r
- int descend;\r
- try {\r
- years = getYears(req);\r
- descend = decending(req);\r
- } catch(Exception e) {\r
- context.error(trans, resp, Result.err(Status.ERR_BadData, e.getMessage()));\r
- return;\r
- }\r
- \r
- Result<Void> r = context.getHistoryByNS(trans, resp, pathParam(req,":ns"),years,descend);\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- /**\r
- * Get History by Role\r
- */\r
- authzAPI.route(GET,"/authz/hist/role/:role",API.HISTORY,new Code(facade,"Get History by Role", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- int[] years;\r
- int descend;\r
- try {\r
- years = getYears(req);\r
- descend = decending(req);\r
- } catch(Exception e) {\r
- context.error(trans, resp, Result.err(Status.ERR_BadData, e.getMessage()));\r
- return;\r
- }\r
-\r
- Result<Void> r = context.getHistoryByRole(trans, resp, pathParam(req,":role"),years,descend);\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- /**\r
- * Get History by Perm Type\r
- */\r
- authzAPI.route(GET,"/authz/hist/perm/:type",API.HISTORY,new Code(facade,"Get History by Perm Type", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- int[] years;\r
- int descend;\r
- try {\r
- years = getYears(req);\r
- descend = decending(req);\r
- } catch(Exception e) {\r
- context.error(trans, resp, Result.err(Status.ERR_BadData, e.getMessage()));\r
- return;\r
- }\r
- \r
- Result<Void> r = context.getHistoryByPerm(trans, resp, pathParam(req,":type"),years,descend);\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
- }\r
-\r
- // Check if Ascending\r
- private static int decending(HttpServletRequest req) {\r
- if("true".equalsIgnoreCase(req.getParameter("desc")))return -1;\r
- if("true".equalsIgnoreCase(req.getParameter("asc")))return 1;\r
- return 0;\r
- }\r
- \r
- // Get Common "yyyymm" parameter, or none\r
- private static final SimpleDateFormat FMT = new SimpleDateFormat("yyyyMM");\r
- \r
- private static int[] getYears(HttpServletRequest req) throws NumberFormatException {\r
- String yyyymm = req.getParameter("yyyymm");\r
- ArrayList<Integer> ai= new ArrayList<Integer>();\r
- if(yyyymm==null) {\r
- GregorianCalendar gc = new GregorianCalendar();\r
- // three months is the default\r
- for(int i=0;i<3;++i) {\r
- ai.add(Integer.parseInt(FMT.format(gc.getTime())));\r
- gc.add(GregorianCalendar.MONTH, -1);\r
- }\r
- } else {\r
- for(String ym : yyyymm.split(",")) {\r
- String range[] = ym.split("\\s*-\\s*");\r
- switch(range.length) {\r
- case 0:\r
- break;\r
- case 1:\r
- if(!ym.endsWith("-")) {\r
- ai.add(getNum(ym));\r
- break;\r
- } else {\r
- range=new String[] {ym.substring(0, 6),FMT.format(new Date())};\r
- }\r
- default:\r
- GregorianCalendar gc = new GregorianCalendar();\r
- gc.set(GregorianCalendar.MONTH, Integer.parseInt(range[1].substring(4,6))-1);\r
- gc.set(GregorianCalendar.YEAR, Integer.parseInt(range[1].substring(0,4)));\r
- int end = getNum(FMT.format(gc.getTime())); \r
- \r
- gc.set(GregorianCalendar.MONTH, Integer.parseInt(range[0].substring(4,6))-1);\r
- gc.set(GregorianCalendar.YEAR, Integer.parseInt(range[0].substring(0,4)));\r
- for(int i=getNum(FMT.format(gc.getTime()));i<=end;gc.add(GregorianCalendar.MONTH, 1),i=getNum(FMT.format(gc.getTime()))) {\r
- ai.add(i);\r
- }\r
-\r
- }\r
- }\r
- }\r
- if(ai.size()==0) {\r
- throw new NumberFormatException(yyyymm + " is an invalid number or range");\r
- }\r
- Collections.sort(ai);\r
- int ym[] = new int[ai.size()];\r
- for(int i=0;i<ym.length;++i) {\r
- ym[i]=ai.get(i);\r
- }\r
- return ym;\r
- }\r
- \r
- private static int getNum(String n) {\r
- if(n==null || n.length()!=6) throw new NumberFormatException(n + " is not in YYYYMM format");\r
- return Integer.parseInt(n);\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.DELETE;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.POST;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.common.Define;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.Code;\r
-import org.onap.aaf.authz.service.mapper.Mapper.API;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-import org.onap.aaf.dao.session.SessionFilter;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-import org.onap.aaf.cadi.taf.dos.DenialOfServiceTaf;\r
-import org.onap.aaf.inno.env.Trans;\r
-\r
-/**\r
- * User Role APIs\r
- *\r
- */\r
-public class API_Mgmt {\r
-\r
- private static final String SUCCESS = "SUCCESS";\r
-\r
- /**\r
- * Normal Init level APIs\r
- * \r
- * @param authzAPI\r
- * @param facade\r
- * @throws Exception\r
- */\r
- public static void init(final AuthAPI authzAPI, AuthzFacade facade) throws Exception {\r
-\r
- /**\r
- * Clear Cache Segment\r
- */\r
- authzAPI.route(DELETE,"/mgmt/cache/:area/:segments",API.VOID,new Code(facade,"Clear Cache by Segment", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.cacheClear(trans, pathParam(req,"area"), pathParam(req,"segments"));\r
- switch(r.status) {\r
- case OK:\r
- trans.checkpoint(SUCCESS,Trans.ALWAYS);\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
- \r
- /**\r
- * Clear Cache\r
- */\r
- authzAPI.route(DELETE,"/mgmt/cache/:area",API.VOID,new Code(facade,"Clear Cache", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r;\r
- String area;\r
- r = context.cacheClear(trans, area=pathParam(req,"area"));\r
- switch(r.status) {\r
- case OK:\r
- trans.audit().log("Cache " + area + " has been cleared by "+trans.user());\r
- trans.checkpoint(SUCCESS,Trans.ALWAYS);\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- /**\r
- * Clear DB Sessions\r
- */\r
- authzAPI.route(DELETE,"/mgmt/dbsession",API.VOID,new Code(facade,"Clear DBSessions", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- try {\r
- if(req.isUserInRole(Define.ROOT_NS+".db|pool|clear")) {\r
- SessionFilter.clear();\r
- context.dbReset(trans);\r
-\r
- trans.audit().log("DB Sessions have been cleared by "+trans.user());\r
-\r
- trans.checkpoint(SUCCESS,Trans.ALWAYS);\r
- resp.setStatus(HttpStatus.OK_200);\r
- return;\r
- }\r
- context.error(trans,resp,Result.err(Result.ERR_Denied,"%s is not allowed to clear dbsessions",trans.user()));\r
- } catch(Exception e) {\r
- trans.error().log(e, "clearing dbsession");\r
- context.error(trans,resp,Result.err(e));\r
- }\r
- }\r
- });\r
-\r
- /**\r
- * Deny an IP \r
- */\r
- authzAPI.route(POST, "/mgmt/deny/ip/:ip", API.VOID, new Code(facade,"Deny IP",true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- String ip = pathParam(req,":ip");\r
- if(req.isUserInRole(Define.ROOT_NS+".deny|"+Define.ROOT_COMPANY+"|ip")) {\r
- if(DenialOfServiceTaf.denyIP(ip)) {\r
- trans.audit().log(ip+" has been set to deny by "+trans.user());\r
- trans.checkpoint(SUCCESS,Trans.ALWAYS);\r
-\r
- resp.setStatus(HttpStatus.CREATED_201);\r
- } else {\r
- context.error(trans,resp,Result.err(Status.ERR_ConflictAlreadyExists, \r
- ip + " is already being denied"));\r
- }\r
- } else {\r
- trans.audit().log(trans.user(),"has attempted to deny",ip,"without authorization");\r
- context.error(trans,resp,Result.err(Status.ERR_Denied, \r
- trans.getUserPrincipal().getName() + " is not allowed to set IP Denial"));\r
- }\r
- }\r
- });\r
- \r
- /**\r
- * Stop Denying an IP\r
- */\r
- authzAPI.route(DELETE, "/mgmt/deny/ip/:ip", API.VOID, new Code(facade,"Stop Denying IP",true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- String ip = pathParam(req,":ip");\r
- if(req.isUserInRole(Define.ROOT_NS+".deny|"+Define.ROOT_COMPANY+"|ip")) {\r
- if(DenialOfServiceTaf.removeDenyIP(ip)) {\r
- trans.audit().log(ip+" has been removed from denial by "+trans.user());\r
- trans.checkpoint(SUCCESS,Trans.ALWAYS);\r
- resp.setStatus(HttpStatus.OK_200);\r
- } else {\r
- context.error(trans,resp,Result.err(Status.ERR_NotFound, \r
- ip + " is not on the denial list"));\r
- }\r
- } else {\r
- trans.audit().log(trans.user(),"has attempted to remove",ip," from being denied without authorization");\r
- context.error(trans,resp,Result.err(Status.ERR_Denied, \r
- trans.getUserPrincipal().getName() + " is not allowed to remove IP Denial"));\r
- }\r
- }\r
- });\r
-\r
- /**\r
- * Deny an ID \r
- */\r
- authzAPI.route(POST, "/mgmt/deny/id/:id", API.VOID, new Code(facade,"Deny ID",true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- String id = pathParam(req,":id");\r
- if(req.isUserInRole(Define.ROOT_NS+".deny|"+Define.ROOT_COMPANY+"|id")) {\r
- if(DenialOfServiceTaf.denyID(id)) {\r
- trans.audit().log(id+" has been set to deny by "+trans.user());\r
- trans.checkpoint(SUCCESS,Trans.ALWAYS);\r
- resp.setStatus(HttpStatus.CREATED_201);\r
- } else {\r
- context.error(trans,resp,Result.err(Status.ERR_ConflictAlreadyExists, \r
- id + " is already being denied"));\r
- }\r
- } else {\r
- trans.audit().log(trans.user(),"has attempted to deny",id,"without authorization");\r
- context.error(trans,resp,Result.err(Status.ERR_Denied, \r
- trans.getUserPrincipal().getName() + " is not allowed to set ID Denial"));\r
- }\r
- }\r
- });\r
- \r
- /**\r
- * Stop Denying an ID\r
- */\r
- authzAPI.route(DELETE, "/mgmt/deny/id/:id", API.VOID, new Code(facade,"Stop Denying ID",true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- String id = pathParam(req,":id");\r
- if(req.isUserInRole(Define.ROOT_NS+".deny|"+Define.ROOT_COMPANY+"|id")) {\r
- if(DenialOfServiceTaf.removeDenyID(id)) {\r
- trans.audit().log(id+" has been removed from denial by " + trans.user());\r
- trans.checkpoint(SUCCESS,Trans.ALWAYS);\r
- resp.setStatus(HttpStatus.OK_200);\r
- } else {\r
- context.error(trans,resp,Result.err(Status.ERR_NotFound, \r
- id + " is not on the denial list"));\r
- }\r
- } else {\r
- trans.audit().log(trans.user(),"has attempted to remove",id," from being denied without authorization");\r
- context.error(trans,resp,Result.err(Status.ERR_Denied, \r
- trans.getUserPrincipal().getName() + " is not allowed to remove ID Denial"));\r
- }\r
- }\r
- });\r
-\r
- /**\r
- * Deny an ID \r
- */\r
- authzAPI.route(POST, "/mgmt/log/id/:id", API.VOID, new Code(facade,"Special Log ID",true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- String id = pathParam(req,":id");\r
- if(req.isUserInRole(Define.ROOT_NS+".log|"+Define.ROOT_COMPANY+"|id")) {\r
- if(Question.specialLogOn(trans,id)) {\r
- trans.audit().log(id+" has been set to special Log by "+trans.user());\r
- trans.checkpoint(SUCCESS,Trans.ALWAYS);\r
- resp.setStatus(HttpStatus.CREATED_201);\r
- } else {\r
- context.error(trans,resp,Result.err(Status.ERR_ConflictAlreadyExists, \r
- id + " is already being special Logged"));\r
- }\r
- } else {\r
- trans.audit().log(trans.user(),"has attempted to special Log",id,"without authorization");\r
- context.error(trans,resp,Result.err(Status.ERR_Denied, \r
- trans.getUserPrincipal().getName() + " is not allowed to set ID special Logging"));\r
- }\r
- }\r
- });\r
- \r
- /**\r
- * Stop Denying an ID\r
- */\r
- authzAPI.route(DELETE, "/mgmt/log/id/:id", API.VOID, new Code(facade,"Stop Special Log ID",true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- String id = pathParam(req,":id");\r
- if(req.isUserInRole(Define.ROOT_NS+".log|"+Define.ROOT_COMPANY+"|id")) {\r
- if(Question.specialLogOff(trans,id)) {\r
- trans.audit().log(id+" has been removed from special Logging by " + trans.user());\r
- trans.checkpoint(SUCCESS,Trans.ALWAYS);\r
- resp.setStatus(HttpStatus.OK_200);\r
- } else {\r
- context.error(trans,resp,Result.err(Status.ERR_NotFound, \r
- id + " is not on the special Logging list"));\r
- }\r
- } else {\r
- trans.audit().log(trans.user(),"has attempted to remove",id," from being special Logged without authorization");\r
- context.error(trans,resp,Result.err(Status.ERR_Denied, \r
- trans.getUserPrincipal().getName() + " is not allowed to remove ID special Logging"));\r
- }\r
- }\r
- });\r
-\r
-\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.DELETE;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.GET;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.POST;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.PUT;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.Code;\r
-import org.onap.aaf.authz.service.mapper.Mapper.API;\r
-import org.onap.aaf.dao.aaf.cass.NsType;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-\r
-public class API_NS {\r
- private static final String FULL = "full";\r
- private static final String TRUE = "true";\r
-\r
- public static void init(AuthAPI authzAPI, AuthzFacade facade) throws Exception {\r
- /**\r
- * puts a new Namespace in Authz DB\r
- * \r
- * TESTCASES: TC_NS1, TC_NSdelete1\r
- */\r
- authzAPI.route(POST,"/authz/ns",API.NS_REQ, new Code(facade,"Create a Namespace",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans,\r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- NsType nst = NsType.fromString(req.getParameter("type"));\r
- Result<Void> r = context.requestNS(trans, req, resp,nst);\r
- \r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.CREATED_201); \r
- break;\r
- case Status.ACC_Future:\r
- resp.setStatus(HttpStatus.ACCEPTED_202); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
- \r
- /**\r
- * removes a Namespace from Authz DB\r
- * \r
- * TESTCASES: TC_NS1, TC_NSdelete1\r
- */\r
- authzAPI.route(DELETE,"/authz/ns/:ns",API.VOID, new Code(facade,"Delete a Namespace",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans,\r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.deleteNS(trans, req, resp, pathParam(req,":ns"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
-\r
- /**\r
- * Add an Admin in NS in Authz DB\r
- * \r
- * TESTCASES: TC_NS1\r
- */\r
- authzAPI.route(POST,"/authz/ns/:ns/admin/:id",API.VOID, new Code(facade,"Add an Admin to a Namespace",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans,\r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.addAdminToNS(trans, resp, pathParam(req,":ns"), pathParam(req,":id"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.CREATED_201); \r
- break;\r
- case Status.ACC_Future:\r
- resp.setStatus(HttpStatus.ACCEPTED_202); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
- \r
- /**\r
- * Removes an Admin from Namespace in Authz DB\r
- * \r
- * TESTCASES: TC_NS1\r
- */\r
- authzAPI.route(DELETE,"/authz/ns/:ns/admin/:id",API.VOID, new Code(facade,"Remove an Admin from a Namespace",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans,\r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.delAdminFromNS(trans, resp, pathParam(req,":ns"), pathParam(req,":id"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
-\r
- /**\r
- * Add an Admin in NS in Authz DB\r
- * \r
- * TESTCASES: TC_NS1\r
- */\r
- authzAPI.route(POST,"/authz/ns/:ns/responsible/:id",API.VOID, new Code(facade,"Add a Responsible Identity to a Namespace",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans,\r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.addResponsibilityForNS(trans, resp, pathParam(req,":ns"), pathParam(req,":id"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.CREATED_201); \r
- break;\r
- case Status.ACC_Future:\r
- resp.setStatus(HttpStatus.ACCEPTED_202); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
-\r
-\r
- /**\r
- * \r
- */\r
- authzAPI.route(GET,"/authz/nss/:id",API.NSS, new Code(facade,"Return Information about Namespaces", true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getNSsByName(trans, resp, pathParam(req,":id"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- ); \r
- \r
- /**\r
- * Get all Namespaces where user is an admin\r
- */\r
- authzAPI.route(GET,"/authz/nss/admin/:user",API.NSS, new Code(facade,"Return Namespaces where User is an Admin", true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getNSsByAdmin(trans, resp, pathParam(req,":user"),TRUE.equals(req.getParameter(FULL)));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
- \r
- /**\r
- * Get all Namespaces where user is a responsible party\r
- */\r
- authzAPI.route(GET,"/authz/nss/responsible/:user",API.NSS, new Code(facade,"Return Namespaces where User is Responsible", true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getNSsByResponsible(trans, resp, pathParam(req,":user"),TRUE.equals(req.getParameter(FULL)));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
-\r
- /**\r
- * Get all Namespaces where user is an admin or owner\r
- */\r
- authzAPI.route(GET,"/authz/nss/either/:user",API.NSS, new Code(facade,"Return Namespaces where User Admin or Owner", true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getNSsByEither(trans, resp, pathParam(req,":user"),TRUE.equals(req.getParameter(FULL)));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
-\r
- /**\r
- * Get all children Namespaces\r
- */\r
- authzAPI.route(GET,"/authz/nss/children/:id",API.NSS, new Code(facade,"Return Child Namespaces", true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getNSsChildren(trans, resp, pathParam(req,":id"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
-\r
- /**\r
- * Set a description of a Namespace\r
- */\r
- authzAPI.route(PUT,"/authz/ns",API.NS_REQ,new Code(facade,"Set a Description for a Namespace",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.updateNsDescription(trans, req, resp);\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }); \r
- \r
- /**\r
- * Removes an Owner from Namespace in Authz DB\r
- * \r
- * TESTCASES: TC_NS1\r
- */\r
- authzAPI.route(DELETE,"/authz/ns/:ns/responsible/:id",API.VOID, new Code(facade,"Remove a Responsible Identity from Namespace",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans,\r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.delResponsibilityForNS(trans, resp, pathParam(req,":ns"), pathParam(req,":id"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
-\r
- authzAPI.route(POST,"/authz/ns/:ns/attrib/:key/:value",API.VOID, new Code(facade,"Add an Attribute from a Namespace",true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.createAttribForNS(trans, resp, \r
- pathParam(req,":ns"), \r
- pathParam(req,":key"),\r
- pathParam(req,":value"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.CREATED_201); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
-\r
- authzAPI.route(GET,"/authz/ns/attrib/:key",API.KEYS, new Code(facade,"get Ns Key List From Attribute",true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.readNsByAttrib(trans, resp, pathParam(req,":key"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
-\r
- authzAPI.route(PUT,"/authz/ns/:ns/attrib/:key/:value",API.VOID, new Code(facade,"update an Attribute from a Namespace",true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.updAttribForNS(trans, resp, \r
- pathParam(req,":ns"), \r
- pathParam(req,":key"),\r
- pathParam(req,":value"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
- \r
- authzAPI.route(DELETE,"/authz/ns/:ns/attrib/:key",API.VOID, new Code(facade,"delete an Attribute from a Namespace",true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.delAttribForNS(trans, resp, \r
- pathParam(req,":ns"), \r
- pathParam(req,":key"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
-\r
- }\r
- \r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.DELETE;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.GET;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.POST;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.PUT;\r
-\r
-import java.net.URLDecoder;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.Code;\r
-import org.onap.aaf.authz.service.mapper.Mapper.API;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-import org.onap.aaf.cadi.config.Config;\r
-\r
-public class API_Perms {\r
- public static void timeSensitiveInit(AuthAPI authzAPI, AuthzFacade facade) throws Exception {\r
- /** \r
- * gets all permissions by user name\r
- */\r
- authzAPI.route(GET, "/authz/perms/user/:user", API.PERMS, new Code(facade,"Get Permissions by User",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.getPermsByUser(trans, resp, pathParam(req, "user"));\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
-\r
- });\r
- \r
- /** \r
- * gets all permissions by user name\r
- */\r
- authzAPI.route(POST, "/authz/perms/user/:user", API.PERMS, new Code(facade,"Get Permissions by User, Query AAF Perms",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.getPermsByUserWithAAFQuery(trans, req, resp, pathParam(req, "user"));\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
-\r
- });\r
-\r
-\r
- } // end timeSensitiveInit\r
-\r
- public static void init(AuthAPI authzAPI, AuthzFacade facade) throws Exception {\r
- /**\r
- * Create a Permission\r
- */\r
- authzAPI.route(POST,"/authz/perm",API.PERM_REQ,new Code(facade,"Create a Permission",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.createPerm(trans, req, resp);\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.CREATED_201); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- /** \r
- * get details of Permission\r
- */\r
- authzAPI.route(GET, "/authz/perms/:type/:instance/:action", API.PERMS, new Code(facade,"Get Permissions by Key",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.getPermsByName(trans, resp, \r
- pathParam(req, "type"),\r
- URLDecoder.decode(pathParam(req, "instance"),Config.UTF_8),\r
- pathParam(req, "action"));\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
-\r
- });\r
- \r
- /** \r
- * get children of Permission\r
- */\r
- authzAPI.route(GET, "/authz/perms/:type", API.PERMS, new Code(facade,"Get Permissions by Type",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.getPermsByType(trans, resp, pathParam(req, "type"));\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
-\r
- });\r
-\r
- \r
- /**\r
- * gets all permissions by role name\r
- */\r
- authzAPI.route(GET,"/authz/perms/role/:role",API.PERMS,new Code(facade,"Get Permissions by Role",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.getPermsForRole(trans, resp, pathParam(req, "role"));\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- /**\r
- * gets all permissions by Namespace\r
- */\r
- authzAPI.route(GET,"/authz/perms/ns/:ns",API.PERMS,new Code(facade,"Get PermsByNS",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.getPermsByNS(trans, resp, pathParam(req, "ns"));\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
- \r
- /**\r
- * Set a perm's description\r
- */\r
- authzAPI.route(PUT,"/authz/perm",API.PERM_REQ,new Code(facade,"Set Description for Permission",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.updatePermDescription(trans, req, resp);\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }); \r
- \r
- /**\r
- * Update a permission with a rename\r
- */\r
- authzAPI.route(PUT,"/authz/perm/:type/:instance/:action",API.PERM_REQ,new Code(facade,"Update a Permission",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.renamePerm(trans, req, resp, pathParam(req, "type"), \r
- pathParam(req, "instance"), pathParam(req, "action"));\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }); \r
- \r
- /**\r
- * Delete a Permission\r
- */\r
- authzAPI.route(DELETE,"/authz/perm",API.PERM_REQ,new Code(facade,"Delete a Permission",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.deletePerm(trans,req, resp);\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- \r
- \r
-\r
- /**\r
- * Delete a Permission\r
- */\r
- authzAPI.route(DELETE,"/authz/perm/:name/:type/:action",API.PERM_KEY,new Code(facade,"Delete a Permission",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.deletePerm(trans, resp,\r
- pathParam(req, ":name"),\r
- pathParam(req, ":type"),\r
- pathParam(req, ":action"));\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- } // end init\r
-}\r
-\r
-\r
-\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.DELETE;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.GET;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.POST;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.PUT;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.Code;\r
-import org.onap.aaf.authz.service.mapper.Mapper.API;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-\r
-public class API_Roles {\r
- public static void init(AuthAPI authzAPI, AuthzFacade facade) throws Exception {\r
- /**\r
- * puts a new role in Authz DB\r
- */\r
- authzAPI.route(POST,"/authz/role",API.ROLE_REQ, new Code(facade,"Create Role",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans,\r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.createRole(trans, req, resp);\r
- \r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.CREATED_201); \r
- break;\r
- case Status.ACC_Future:\r
- resp.setStatus(HttpStatus.ACCEPTED_202); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
-\r
- /** \r
- * get Role by name\r
- */\r
- authzAPI.route(GET, "/authz/roles/:role", API.ROLES, new Code(facade,"GetRolesByFullName",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.getRolesByName(trans, resp, pathParam(req, "role"));\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
-\r
- });\r
-\r
-\r
- /** \r
- * gets all Roles by user name\r
- */\r
- authzAPI.route(GET, "/authz/roles/user/:name", API.ROLES, new Code(facade,"GetRolesByUser",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.getRolesByUser(trans, resp, pathParam(req, "name"));\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
-\r
- });\r
-\r
- /** \r
- * gets all Roles by Namespace\r
- */\r
- authzAPI.route(GET, "/authz/roles/ns/:ns", API.ROLES, new Code(facade,"GetRolesByNS",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.getRolesByNS(trans, resp, pathParam(req, "ns"));\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- /** \r
- * gets all Roles by Name without the Namespace\r
- */\r
- authzAPI.route(GET, "/authz/roles/name/:name", API.ROLES, new Code(facade,"GetRolesByNameOnly",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getRolesByNameOnly(trans, resp, pathParam(req, ":name"));\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
- \r
- /**\r
- * Deletes a Role from Authz DB by Object\r
- */\r
- authzAPI.route(DELETE,"/authz/role",API.ROLE_REQ, new Code(facade,"Delete Role",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans,\r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.deleteRole(trans, req, resp);\r
- \r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- \r
- }\r
- );\r
- \r
-\r
- \r
- /**\r
- * Deletes a Role from Authz DB by Key\r
- */\r
- authzAPI.route(DELETE,"/authz/role/:role",API.ROLE, new Code(facade,"Delete Role",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans,\r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.deleteRole(trans, resp, pathParam(req,":role"));\r
- \r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- \r
- }\r
- );\r
- \r
-\r
- /**\r
- * Add a Permission to a Role (Grant)\r
- */\r
- authzAPI.route(POST,"/authz/role/perm",API.ROLE_PERM_REQ, new Code(facade,"Add Permission to Role",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans,\r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.addPermToRole(trans, req, resp);\r
- \r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.CREATED_201); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }\r
- );\r
- \r
- /**\r
- * Get all Roles by Permission\r
- */\r
- authzAPI.route(GET,"/authz/roles/perm/:type/:instance/:action",API.ROLES,new Code(facade,"GetRolesByPerm",true) {\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.getRolesByPerm(trans, resp, \r
- pathParam(req, "type"),\r
- pathParam(req, "instance"),\r
- pathParam(req, "action"));\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
- \r
- /**\r
- * Set a role's description\r
- */\r
- authzAPI.route(PUT,"/authz/role",API.ROLE_REQ,new Code(facade,"Set Description for role",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.updateRoleDescription(trans, req, resp);\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }); \r
- \r
- /**\r
- * Set a permission's roles to roles given\r
- */\r
- authzAPI.route(PUT,"/authz/role/perm",API.ROLE_PERM_REQ,new Code(facade,"Set a Permission's Roles",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans, \r
- HttpServletRequest req,\r
- HttpServletResponse resp) throws Exception {\r
- \r
- Result<Void> r = context.resetPermRoles(trans, req, resp);\r
- switch(r.status) {\r
- case OK: \r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- }); \r
- \r
- /**\r
- * Delete a Permission from a Role\r
- */\r
- authzAPI.route(DELETE,"/authz/role/:role/perm",API.ROLE_PERM_REQ, new Code(facade,"Delete Permission from Role",true) {\r
- @Override\r
- public void handle(\r
- AuthzTrans trans,\r
- HttpServletRequest req, \r
- HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.delPermFromRole(trans, req, resp);\r
- \r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- \r
- }\r
- );\r
-\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.GET;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.Code;\r
-import org.onap.aaf.authz.service.mapper.Mapper.API;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-\r
-/**\r
- * User Role APIs\r
- *\r
- */\r
-public class API_User {\r
- /**\r
- * Normal Init level APIs\r
- * \r
- * @param authzAPI\r
- * @param facade\r
- * @throws Exception\r
- */\r
- public static void init(final AuthAPI authzAPI, AuthzFacade facade) throws Exception {\r
- /**\r
- * get all Users who have Permission X\r
- */\r
- authzAPI.route(GET,"/authz/users/perm/:type/:instance/:action",API.USERS,new Code(facade,"Get Users By Permission", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
-// trans.checkpoint(pathParam(req,"type") + " " \r
-// + pathParam(req,"instance") + " " \r
-// + pathParam(req,"action"));\r
-//\r
- Result<Void> r = context.getUsersByPermission(trans, resp,\r
- pathParam(req, ":type"),\r
- pathParam(req, ":instance"),\r
- pathParam(req, ":action"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
-\r
- /**\r
- * get all Users who have Role X\r
- */\r
- authzAPI.route(GET,"/authz/users/role/:role",API.USERS,new Code(facade,"Get Users By Role", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getUsersByRole(trans, resp, pathParam(req, ":role"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
- \r
- /**\r
- * Get User Role if exists\r
- * @deprecated\r
- */\r
- authzAPI.route(GET,"/authz/userRole/:user/:role",API.USERS,new Code(facade,"Get if User is In Role", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getUserInRole(trans, resp, pathParam(req,":user"),pathParam(req,":role"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- /**\r
- * Get User Role if exists\r
- */\r
- authzAPI.route(GET,"/authz/users/:user/:role",API.USERS,new Code(facade,"Get if User is In Role", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getUserInRole(trans, resp, pathParam(req,":user"),pathParam(req,":role"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
- \r
-\r
-\r
- }\r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.onap.aaf.authz.layer.Result.OK;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.DELETE;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.GET;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.POST;\r
-import static org.onap.aaf.cssa.rserv.HttpMethods.PUT;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-import javax.servlet.http.HttpServletResponse;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.Code;\r
-import org.onap.aaf.authz.service.mapper.Mapper.API;\r
-\r
-import com.att.aft.dme2.internal.jetty.http.HttpStatus;\r
-\r
-/**\r
- * User Role APIs\r
- *\r
- */\r
-public class API_UserRole {\r
- /**\r
- * Normal Init level APIs\r
- * \r
- * @param authzAPI\r
- * @param facade\r
- * @throws Exception\r
- */\r
- public static void init(final AuthAPI authzAPI, AuthzFacade facade) throws Exception {\r
- /**\r
- * Request User Role Access\r
- */\r
- authzAPI.route(POST,"/authz/userRole",API.USER_ROLE_REQ,new Code(facade,"Request User Role Access", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.requestUserRole(trans, req, resp);\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.CREATED_201); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
- \r
- \r
- /**\r
- * Get UserRoles by Role\r
- */\r
- authzAPI.route(GET,"/authz/userRoles/role/:role",API.USER_ROLES,new Code(facade,"Get UserRoles by Role", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getUserRolesByRole(trans, resp, pathParam(req,":role"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
- \r
- /**\r
- * Get UserRoles by User\r
- */\r
- authzAPI.route(GET,"/authz/userRoles/user/:user",API.USER_ROLES,new Code(facade,"Get UserRoles by User", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.getUserRolesByUser(trans, resp, pathParam(req,":user"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- \r
- /**\r
- * Update roles attached to user in path\r
- */\r
- authzAPI.route(PUT,"/authz/userRole/user",API.USER_ROLE_REQ,new Code(facade,"Update Roles for a user", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.resetRolesForUser(trans, resp, req);\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
- \r
- \r
- /**\r
- * Update users attached to role in path\r
- */\r
- authzAPI.route(PUT,"/authz/userRole/role",API.USER_ROLE_REQ,new Code(facade,"Update Users for a role", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.resetUsersForRole(trans, resp, req);\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
- \r
- /**\r
- * Extend Expiration Date (according to Organizational rules)\r
- */\r
- authzAPI.route(PUT, "/authz/userRole/extend/:user/:role", API.VOID, new Code(facade,"Extend Expiration", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.extendUserRoleExpiration(trans,resp,pathParam(req,":user"),pathParam(req,":role"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- \r
- }\r
- \r
- });\r
- \r
- \r
- /**\r
- * Create a new ID/Credential\r
- */\r
- authzAPI.route(DELETE,"/authz/userRole/:user/:role",API.VOID,new Code(facade,"Delete User Role", true) {\r
- @Override\r
- public void handle(AuthzTrans trans, HttpServletRequest req, HttpServletResponse resp) throws Exception {\r
- Result<Void> r = context.deleteUserRole(trans, resp, pathParam(req,":user"),pathParam(req,":role"));\r
- switch(r.status) {\r
- case OK:\r
- resp.setStatus(HttpStatus.OK_200); \r
- break;\r
- default:\r
- context.error(trans,resp,r);\r
- }\r
- }\r
- });\r
-\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.mapper;\r
-\r
-import java.util.Collection;\r
-import java.util.List;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.service.MayChange;\r
-import org.onap.aaf.dao.Bytification;\r
-import org.onap.aaf.dao.aaf.cass.ApprovalDAO;\r
-import org.onap.aaf.dao.aaf.cass.CertDAO;\r
-import org.onap.aaf.dao.aaf.cass.CredDAO;\r
-import org.onap.aaf.dao.aaf.cass.DelegateDAO;\r
-import org.onap.aaf.dao.aaf.cass.FutureDAO;\r
-import org.onap.aaf.dao.aaf.cass.HistoryDAO;\r
-import org.onap.aaf.dao.aaf.cass.Namespace;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.UserRoleDAO;\r
-\r
-import org.onap.aaf.rosetta.Marshal;\r
-\r
-public interface Mapper<\r
- NSS,\r
- PERMS,\r
- PERMKEY,\r
- ROLES,\r
- USERS,\r
- USERROLES,\r
- DELGS,\r
- CERTS,\r
- KEYS,\r
- REQUEST,\r
- HISTORY,\r
- ERROR,\r
- APPROVALS>\r
-{\r
- enum API{NSS,NS_REQ, \r
- PERMS,PERM_KEY,PERM_REQ,\r
- ROLES,ROLE,ROLE_REQ,ROLE_PERM_REQ,\r
- USERS,USER_ROLE_REQ,USER_ROLES,\r
- CRED_REQ,CERTS,\r
- APPROVALS,\r
- DELGS,DELG_REQ,\r
- KEYS,\r
- HISTORY,\r
- ERROR,\r
- API,\r
- VOID};\r
- public Class<?> getClass(API api);\r
- public<A> Marshal<A> getMarshal(API api);\r
- public<A> A newInstance(API api);\r
-\r
- public Result<PermDAO.Data> permkey(AuthzTrans trans, PERMKEY from);\r
- public Result<PermDAO.Data> perm(AuthzTrans trans, REQUEST from);\r
- public Result<RoleDAO.Data> role(AuthzTrans trans, REQUEST from);\r
- public Result<Namespace> ns(AuthzTrans trans, REQUEST from);\r
- public Result<CredDAO.Data> cred(AuthzTrans trans, REQUEST from, boolean requiresPass);\r
- public Result<USERS> cred(List<CredDAO.Data> lcred, USERS to);\r
- public Result<CERTS> cert(List<CertDAO.Data> lcert, CERTS to);\r
- public Result<DelegateDAO.Data> delegate(AuthzTrans trans, REQUEST from);\r
- public Result<DELGS> delegate(List<DelegateDAO.Data> lDelg);\r
- public Result<APPROVALS> approvals(List<ApprovalDAO.Data> lAppr);\r
- public Result<List<ApprovalDAO.Data>> approvals(APPROVALS apprs);\r
- public Result<List<PermDAO.Data>> perms(AuthzTrans trans, PERMS perms);\r
- \r
- public Result<UserRoleDAO.Data> userRole(AuthzTrans trans, REQUEST from);\r
- public Result<PermDAO.Data> permFromRPRequest(AuthzTrans trans, REQUEST from);\r
- public Result<RoleDAO.Data> roleFromRPRequest(AuthzTrans trans, REQUEST from);\r
- \r
- /*\r
- * Check Requests of varying sorts for Future fields set\r
- */\r
- public Result<FutureDAO.Data> future(AuthzTrans trans, String table, REQUEST from, Bytification content, boolean enableApproval, Memo memo, MayChange mc);\r
-\r
- public Result<NSS> nss(AuthzTrans trans, Namespace from, NSS to);\r
-\r
- // Note: Prevalidate if NS given is allowed to be seen before calling\r
- public Result<NSS> nss(AuthzTrans trans, Collection<Namespace> from, NSS to);\r
-// public Result<NSS> ns_attrib(AuthzTrans trans, Set<String> from, NSS to);\r
- public Result<PERMS> perms(AuthzTrans trans, List<PermDAO.Data> from, PERMS to, boolean filter);\r
- public Result<ROLES> roles(AuthzTrans trans, List<RoleDAO.Data> from, ROLES roles, boolean filter);\r
- // Note: Prevalidate if NS given is allowed to be seen before calling\r
- public Result<USERS> users(AuthzTrans trans, Collection<UserRoleDAO.Data> from, USERS to);\r
- public Result<USERROLES> userRoles(AuthzTrans trans, Collection<UserRoleDAO.Data> from, USERROLES to);\r
- public Result<KEYS> keys(Collection<String> from);\r
-\r
- public Result<HISTORY> history(AuthzTrans trans, List<HistoryDAO.Data> history, final int sort);\r
- \r
- public ERROR errorFromMessage(StringBuilder holder, String msgID, String text, String... detail);\r
- \r
- /*\r
- * A Memo Creator... Use to avoid creating superfluous Strings until needed.\r
- */\r
- public static interface Memo {\r
- public String get();\r
- }\r
-\r
-\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.mapper;\r
-\r
-import java.nio.ByteBuffer;\r
-import java.util.ArrayList;\r
-import java.util.Collection;\r
-import java.util.Collections;\r
-import java.util.Comparator;\r
-import java.util.Date;\r
-import java.util.GregorianCalendar;\r
-import java.util.List;\r
-import java.util.UUID;\r
-\r
-import javax.xml.datatype.XMLGregorianCalendar;\r
-\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.org.Organization;\r
-import org.onap.aaf.authz.org.Organization.Expiration;\r
-import org.onap.aaf.authz.service.MayChange;\r
-import org.onap.aaf.cssa.rserv.Pair;\r
-import org.onap.aaf.dao.Bytification;\r
-import org.onap.aaf.dao.aaf.cass.ApprovalDAO;\r
-import org.onap.aaf.dao.aaf.cass.CertDAO;\r
-import org.onap.aaf.dao.aaf.cass.CredDAO;\r
-import org.onap.aaf.dao.aaf.cass.DelegateDAO;\r
-import org.onap.aaf.dao.aaf.cass.FutureDAO;\r
-import org.onap.aaf.dao.aaf.cass.HistoryDAO;\r
-import org.onap.aaf.dao.aaf.cass.Namespace;\r
-import org.onap.aaf.dao.aaf.cass.NsSplit;\r
-import org.onap.aaf.dao.aaf.cass.NsType;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.Status;\r
-import org.onap.aaf.dao.aaf.cass.UserRoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.DelegateDAO.Data;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-import org.onap.aaf.dao.aaf.hl.Question.Access;\r
-\r
-import org.onap.aaf.cadi.aaf.marshal.CertsMarshal;\r
-import org.onap.aaf.cadi.util.Vars;\r
-import org.onap.aaf.inno.env.Env;\r
-import org.onap.aaf.inno.env.TimeTaken;\r
-import org.onap.aaf.inno.env.util.Chrono;\r
-import org.onap.aaf.rosetta.Marshal;\r
-\r
-import aaf.v2_0.Api;\r
-import aaf.v2_0.Approval;\r
-import aaf.v2_0.Approvals;\r
-import aaf.v2_0.Certs;\r
-import aaf.v2_0.Certs.Cert;\r
-import aaf.v2_0.CredRequest;\r
-import aaf.v2_0.Delg;\r
-import aaf.v2_0.DelgRequest;\r
-import aaf.v2_0.Delgs;\r
-import aaf.v2_0.Error;\r
-import aaf.v2_0.History;\r
-import aaf.v2_0.History.Item;\r
-import aaf.v2_0.Keys;\r
-import aaf.v2_0.NsRequest;\r
-import aaf.v2_0.Nss;\r
-import aaf.v2_0.Nss.Ns;\r
-import aaf.v2_0.Nss.Ns.Attrib;\r
-import aaf.v2_0.Perm;\r
-import aaf.v2_0.PermKey;\r
-import aaf.v2_0.PermRequest;\r
-import aaf.v2_0.Perms;\r
-import aaf.v2_0.Pkey;\r
-import aaf.v2_0.Request;\r
-import aaf.v2_0.Role;\r
-import aaf.v2_0.RolePermRequest;\r
-import aaf.v2_0.RoleRequest;\r
-import aaf.v2_0.Roles;\r
-import aaf.v2_0.UserRole;\r
-import aaf.v2_0.UserRoleRequest;\r
-import aaf.v2_0.UserRoles;\r
-import aaf.v2_0.Users;\r
-import aaf.v2_0.Users.User;\r
-\r
-public class Mapper_2_0 implements Mapper<Nss, Perms, Pkey, Roles, Users, UserRoles, Delgs, Certs, Keys, Request, History, Error, Approvals> {\r
- private Question q;\r
-\r
- public Mapper_2_0(Question q) {\r
- this.q = q;\r
- }\r
- \r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.service.mapper.Mapper#ns(java.lang.Object, org.onap.aaf.authz.service.mapper.Mapper.Holder)\r
- */\r
- @Override\r
- public Result<Namespace> ns(AuthzTrans trans, Request base) {\r
- NsRequest from = (NsRequest)base;\r
- Namespace namespace = new Namespace();\r
- namespace.name = from.getName();\r
- namespace.admin = from.getAdmin();\r
- namespace.owner = from.getResponsible();\r
- namespace.description = from.getDescription();\r
- trans.checkpoint(namespace.name, Env.ALWAYS);\r
- \r
- NsType nt = NsType.fromString(from.getType());\r
- if(nt.equals(NsType.UNKNOWN)) {\r
- String ns = namespace.name;\r
- int count = 0;\r
- for(int i=ns.indexOf('.');\r
- i>=0;\r
- i=ns.indexOf('.',i+1)) {\r
- ++count;\r
- }\r
- switch(count) {\r
- case 0: nt = NsType.ROOT;break;\r
- case 1: nt = NsType.COMPANY;break;\r
- default: nt = NsType.APP;\r
- }\r
- }\r
- namespace.type = nt.type;\r
- \r
- return Result.ok(namespace);\r
- }\r
-\r
- @Override\r
- public Result<Nss> nss(AuthzTrans trans, Namespace from, Nss to) {\r
- List<Ns> nss = to.getNs();\r
- Ns ns = new Ns();\r
- ns.setName(from.name);\r
- if(from.admin!=null)ns.getAdmin().addAll(from.admin);\r
- if(from.owner!=null)ns.getResponsible().addAll(from.owner);\r
- if(from.attrib!=null) {\r
- for(Pair<String,String> attrib : from.attrib) {\r
- Attrib toAttrib = new Attrib();\r
- toAttrib.setKey(attrib.x);\r
- toAttrib.setValue(attrib.y);\r
- ns.getAttrib().add(toAttrib);\r
- }\r
- }\r
-\r
- ns.setDescription(from.description);\r
- nss.add(ns);\r
- return Result.ok(to);\r
- }\r
-\r
- /**\r
- * Note: Prevalidate if NS given is allowed to be seen before calling\r
- */\r
- @Override\r
- public Result<Nss> nss(AuthzTrans trans, Collection<Namespace> from, Nss to) {\r
- List<Ns> nss = to.getNs();\r
- for(Namespace nd : from) {\r
- Ns ns = new Ns();\r
- ns.setName(nd.name);\r
- ns.getAdmin().addAll(nd.admin);\r
- ns.getResponsible().addAll(nd.owner);\r
- ns.setDescription(nd.description);\r
- if(nd.attrib!=null) {\r
- for(Pair<String,String> attrib : nd.attrib) {\r
- Attrib toAttrib = new Attrib();\r
- toAttrib.setKey(attrib.x);\r
- toAttrib.setValue(attrib.y);\r
- ns.getAttrib().add(toAttrib);\r
- }\r
- }\r
-\r
- nss.add(ns);\r
- }\r
- return Result.ok(to);\r
- }\r
-\r
- @Override\r
- public Result<Perms> perms(AuthzTrans trans, List<PermDAO.Data> from, Perms to, boolean filter) {\r
- List<Perm> perms = to.getPerm();\r
- TimeTaken tt = trans.start("Filter Perms before return", Env.SUB);\r
- try {\r
- if(from!=null) {\r
- for (PermDAO.Data data : from) {\r
- if(!filter || q.mayUser(trans, trans.user(), data, Access.read).isOK()) {\r
- Perm perm = new Perm();\r
- perm.setType(data.fullType());\r
- perm.setInstance(data.instance);\r
- perm.setAction(data.action);\r
- for(String role : data.roles(false)) {\r
- perm.getRoles().add(role);\r
- }\r
- perm.setDescription(data.description);\r
- perms.add(perm);\r
- }\r
- }\r
- }\r
- } finally {\r
- tt.done();\r
- }\r
- \r
- tt = trans.start("Sort Perms", Env.SUB);\r
- try {\r
- Collections.sort(perms, new Comparator<Perm>() {\r
- @Override\r
- public int compare(Perm perm1, Perm perm2) {\r
- int typeCompare = perm1.getType().compareToIgnoreCase(perm2.getType());\r
- if (typeCompare == 0) {\r
- int instanceCompare = perm1.getInstance().compareToIgnoreCase(perm2.getInstance());\r
- if (instanceCompare == 0) {\r
- return perm1.getAction().compareToIgnoreCase(perm2.getAction());\r
- }\r
- return instanceCompare;\r
- }\r
- return typeCompare;\r
- } \r
- });\r
- } finally {\r
- tt.done();\r
- }\r
- return Result.ok(to);\r
- }\r
- \r
- @Override\r
- public Result<List<PermDAO.Data>> perms(AuthzTrans trans, Perms perms) {\r
- List<PermDAO.Data> lpd = new ArrayList<PermDAO.Data>();\r
- for (Perm p : perms.getPerm()) {\r
- Result<NsSplit> nss = q.deriveNsSplit(trans, p.getType());\r
- PermDAO.Data pd = new PermDAO.Data();\r
- if(nss.isOK()) { \r
- pd.ns=nss.value.ns;\r
- pd.type = nss.value.name;\r
- pd.instance = p.getInstance();\r
- pd.action = p.getAction();\r
- for (String role : p.getRoles())\r
- pd.roles(true).add(role);\r
- lpd.add(pd);\r
- } else {\r
- return Result.err(nss);\r
- }\r
- }\r
- return Result.ok(lpd);\r
- }\r
-\r
- @Override\r
- public Result<PermDAO.Data> permkey(AuthzTrans trans, Pkey from) {\r
- return q.permFrom(trans, from.getType(),from.getInstance(),from.getAction());\r
- }\r
- \r
- @Override\r
- public Result<PermDAO.Data> permFromRPRequest(AuthzTrans trans, Request req) {\r
- RolePermRequest from = (RolePermRequest)req;\r
- Pkey perm = from.getPerm();\r
- if(perm==null)return Result.err(Status.ERR_NotFound, "Permission not found");\r
- Result<NsSplit> nss = q.deriveNsSplit(trans, perm.getType());\r
- PermDAO.Data pd = new PermDAO.Data();\r
- if(nss.isOK()) { \r
- pd.ns=nss.value.ns;\r
- pd.type = nss.value.name;\r
- pd.instance = from.getPerm().getInstance();\r
- pd.action = from.getPerm().getAction();\r
- trans.checkpoint(pd.fullPerm(), Env.ALWAYS);\r
- \r
- String[] roles = {};\r
- \r
- if (from.getRole() != null) {\r
- roles = from.getRole().split(",");\r
- }\r
- for (String role : roles) { \r
- pd.roles(true).add(role);\r
- }\r
- return Result.ok(pd);\r
- } else {\r
- return Result.err(nss);\r
- }\r
- }\r
- \r
- @Override\r
- public Result<RoleDAO.Data> roleFromRPRequest(AuthzTrans trans, Request req) {\r
- RolePermRequest from = (RolePermRequest)req;\r
- Result<NsSplit> nss = q.deriveNsSplit(trans, from.getRole());\r
- RoleDAO.Data rd = new RoleDAO.Data();\r
- if(nss.isOK()) { \r
- rd.ns = nss.value.ns;\r
- rd.name = nss.value.name;\r
- trans.checkpoint(rd.fullName(), Env.ALWAYS);\r
- return Result.ok(rd);\r
- } else {\r
- return Result.err(nss);\r
- }\r
- }\r
- \r
- @Override\r
- public Result<PermDAO.Data> perm(AuthzTrans trans, Request req) {\r
- PermRequest from = (PermRequest)req;\r
- Result<NsSplit> nss = q.deriveNsSplit(trans, from.getType());\r
- PermDAO.Data pd = new PermDAO.Data();\r
- if(nss.isOK()) { \r
- pd.ns=nss.value.ns;\r
- pd.type = nss.value.name;\r
- pd.instance = from.getInstance();\r
- pd.action = from.getAction();\r
- pd.description = from.getDescription();\r
- trans.checkpoint(pd.fullPerm(), Env.ALWAYS);\r
- return Result.ok(pd);\r
- } else {\r
- return Result.err(nss);\r
- }\r
- }\r
-\r
- @Override\r
- public Result<RoleDAO.Data> role(AuthzTrans trans, Request base) {\r
- RoleRequest from = (RoleRequest)base;\r
- Result<NsSplit> nss = q.deriveNsSplit(trans, from.getName());\r
- if(nss.isOK()) {\r
- RoleDAO.Data to = new RoleDAO.Data();\r
- to.ns = nss.value.ns;\r
- to.name = nss.value.name;\r
- to.description = from.getDescription();\r
- trans.checkpoint(to.fullName(), Env.ALWAYS);\r
-\r
- return Result.ok(to);\r
- } else {\r
- return Result.err(nss);\r
- }\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.service.mapper.Mapper#roles(java.util.List)\r
- */\r
- @Override\r
- public Result<Roles> roles(AuthzTrans trans, List<RoleDAO.Data> from, Roles to, boolean filter) {\r
- for(RoleDAO.Data frole : from) {\r
- // Only Add Data to view if User is allowed to see this Role \r
- //if(!filter || q.mayUserViewRole(trans, trans.user(), frole).isOK()) {\r
- if(!filter || q.mayUser(trans, trans.user(), frole,Access.read).isOK()) {\r
- Role role = new Role();\r
- role.setName(frole.ns + '.' + frole.name);\r
- role.setDescription(frole.description);\r
- for(String p : frole.perms(false)) { // can see any Perms in the Role he has permission for\r
- Result<String[]> rpa = PermDAO.Data.decodeToArray(trans,q,p);\r
- if(rpa.notOK()) return Result.err(rpa);\r
- \r
- String[] pa = rpa.value;\r
- Pkey pKey = new Pkey();\r
- pKey.setType(pa[0]+'.'+pa[1]);\r
- pKey.setInstance(pa[2]);\r
- pKey.setAction(pa[3]);\r
- role.getPerms().add(pKey);\r
- }\r
- to.getRole().add(role);\r
- }\r
- }\r
- return Result.ok(to);\r
- }\r
-\r
- /*\r
- * (non-Javadoc)\r
- * @see org.onap.aaf.authz.service.mapper.Mapper#users(java.util.Collection, java.lang.Object)\r
- * \r
- * Note: Prevalidate all data for permission to view\r
- */\r
- @Override\r
- public Result<Users> users(AuthzTrans trans, Collection<UserRoleDAO.Data> from, Users to) {\r
- List<User> cu = to.getUser();\r
- for(UserRoleDAO.Data urd : from) {\r
- User user = new User();\r
- user.setId(urd.user);\r
- user.setExpires(Chrono.timeStamp(urd.expires));\r
- cu.add(user);\r
- }\r
- return Result.ok(to);\r
- }\r
-\r
- /*\r
- * (non-Javadoc)\r
- * @see org.onap.aaf.authz.service.mapper.Mapper#users(java.util.Collection, java.lang.Object)\r
- * \r
- * Note: Prevalidate all data for permission to view\r
- */\r
- @Override\r
- public Result<UserRoles> userRoles(AuthzTrans trans, Collection<UserRoleDAO.Data> from, UserRoles to) {\r
- List<UserRole> cu = to.getUserRole();\r
- for(UserRoleDAO.Data urd : from) {\r
- UserRole ur = new UserRole();\r
- ur.setUser(urd.user);\r
- ur.setRole(urd.role);\r
- ur.setExpires(Chrono.timeStamp(urd.expires));\r
- cu.add(ur);\r
- }\r
- return Result.ok(to);\r
- }\r
-\r
- /**\r
- * \r
- * @param base\r
- * @param start\r
- * @return\r
- */\r
- @Override\r
- public Result<UserRoleDAO.Data> userRole(AuthzTrans trans, Request base) {\r
- try {\r
- UserRoleRequest from = (UserRoleRequest)base;\r
-\r
- // Setup UserRoleData, either for immediate placement, or for future\r
- UserRoleDAO.Data to = new UserRoleDAO.Data();\r
- if (from.getUser() != null) {\r
- String user = from.getUser();\r
- to.user = user;\r
- }\r
- if (from.getRole() != null) {\r
- to.role(trans,q,from.getRole());\r
- }\r
- to.expires = getExpires(trans.org(),Expiration.UserInRole,base,from.getUser());\r
- trans.checkpoint(to.toString(), Env.ALWAYS);\r
-\r
- return Result.ok(to);\r
- } catch (Exception t) {\r
- return Result.err(Status.ERR_BadData,t.getMessage());\r
- }\r
- }\r
-\r
- @Override\r
- public Result<CredDAO.Data> cred(AuthzTrans trans, Request base, boolean requiresPass) {\r
- CredRequest from = (CredRequest)base;\r
- CredDAO.Data to = new CredDAO.Data();\r
- to.id=from.getId();\r
- to.ns = Question.domain2ns(to.id);\r
- String passwd = from.getPassword();\r
- if(requiresPass) {\r
- String ok = trans.org().isValidPassword(to.id,passwd);\r
- if(ok.length()>0) {\r
- return Result.err(Status.ERR_BadData,ok);\r
- }\r
-\r
- } else {\r
- to.type=0;\r
- }\r
- if(passwd != null) {\r
- to.cred = ByteBuffer.wrap(passwd.getBytes());\r
- to.type = CredDAO.RAW; \r
- } else {\r
- to.type = 0;\r
- }\r
- \r
- // Note: Ensure requested EndDate created will match Organization Password Rules\r
- // P.S. Do not apply TempPassword rule here. Do that when you know you are doing a Create/Reset (see Service)\r
- to.expires = getExpires(trans.org(),Expiration.Password,base,from.getId());\r
- trans.checkpoint(to.id, Env.ALWAYS);\r
-\r
- return Result.ok(to);\r
- }\r
- \r
- @Override\r
- public Result<Users> cred(List<CredDAO.Data> from, Users to) {\r
- List<User> cu = to.getUser();\r
- for(CredDAO.Data cred : from) {\r
- User user = new User();\r
- user.setId(cred.id);\r
- user.setExpires(Chrono.timeStamp(cred.expires));\r
- user.setType(cred.type);\r
- cu.add(user);\r
- }\r
- return Result.ok(to);\r
- }\r
- \r
-@Override\r
- public Result<Certs> cert(List<CertDAO.Data> from, Certs to) {\r
- List<Cert> lc = to.getCert();\r
- for(CertDAO.Data fcred : from) {\r
- Cert cert = new Cert();\r
- cert.setId(fcred.id);\r
- cert.setX500(fcred.x500);\r
- /**TODO - change Interface \r
- * @deprecated */\r
- cert.setFingerprint(fcred.serial.toByteArray());\r
- lc.add(cert);\r
- }\r
- return Result.ok(to);\r
- }\r
-\r
- /**\r
- * Analyze whether Requests should be acted on now, or in the future, based on Start Date, and whether the requester\r
- * is allowed to change this value directly\r
- * \r
- * Returning Result.OK means it should be done in the future.\r
- * Returning Result.ACC_Now means to act on table change now.\r
- */\r
- @Override\r
- public Result<FutureDAO.Data> future(AuthzTrans trans, String table, Request from, \r
- Bytification content, boolean enableApproval, Memo memo, MayChange mc) {\r
- Result<?> rMayChange = mc.mayChange();\r
- boolean needsAppr;\r
- if(needsAppr = rMayChange.notOK()) {\r
- if(enableApproval) {\r
- if(!trans.futureRequested()) {\r
- return Result.err(rMayChange);\r
- }\r
- } else {\r
- return Result.err(rMayChange);\r
- }\r
- }\r
- GregorianCalendar now = new GregorianCalendar(); \r
- GregorianCalendar start = from.getStart()==null?now:from.getStart().toGregorianCalendar();\r
- \r
- GregorianCalendar expires = trans.org().expiration(start, Expiration.Future);\r
- XMLGregorianCalendar xgc;\r
- if((xgc=from.getEnd())!=null) {\r
- GregorianCalendar fgc = xgc.toGregorianCalendar();\r
- expires = expires.before(fgc)?expires:fgc; // Min of desired expiration, and Org expiration\r
- }\r
- \r
- //TODO needs two answers from this. What's the NSS, and may Change.\r
- FutureDAO.Data fto;\r
- if(start.after(now) || needsAppr ) {\r
- //String user = trans.user();\r
- fto = new FutureDAO.Data();\r
- fto.target=table;\r
- fto.memo = memo.get();\r
- fto.start = start.getTime();\r
- fto.expires = expires.getTime();\r
- if(needsAppr) { // Need to add Approvers...\r
- /*\r
- Result<Data> rslt = mc.getNsd();\r
- if(rslt.notOKorIsEmpty())return Result.err(rslt);\r
- appr.addAll(mc.getNsd().value.responsible);\r
- try {\r
- //Note from 2013 Is this getting Approvers for user only? What about Delegates?\r
- // 3/25/2014. Approvers are set by Corporate policy. We don't have to worry here about what that means.\r
- // It is important to get Delegates, if necessary, at notification time\r
- // If we add delegates now, it will get all confused as to who is actually responsible.\r
- for(Organization.User ou : org.getApprovers(trans, user)) {\r
- appr.add(ou.email);\r
- }\r
- } catch (Exception e) {\r
- return Result.err(Status.ERR_Policy,org.getName() + " did not respond with Approvers: " + e.getLocalizedMessage());\r
- }\r
- */\r
- }\r
- try {\r
- fto.construct = content.bytify();\r
- } catch (Exception e) {\r
- return Result.err(Status.ERR_BadData,"Data cannot be saved for Future.");\r
- }\r
- } else {\r
- return Result.err(Status.ACC_Now, "Make Data changes now.");\r
- }\r
- return Result.ok(fto);\r
- }\r
-\r
-\r
- /* (non-Javadoc)\r
- * @see org.onap.aaf.authz.service.mapper.Mapper#history(java.util.List)\r
- */\r
- @Override\r
- public Result<History> history(AuthzTrans trans, List<HistoryDAO.Data> history, final int sort) {\r
- History hist = new History();\r
- List<Item> items = hist.getItem();\r
- for(HistoryDAO.Data data : history) {\r
- History.Item item = new History.Item();\r
- item.setYYYYMM(Integer.toString(data.yr_mon));\r
- Date date = Chrono.uuidToDate(data.id);\r
- item.setTimestamp(Chrono.timeStamp(date));\r
- item.setAction(data.action);\r
- item.setMemo(data.memo);\r
- item.setSubject(data.subject);\r
- item.setTarget(data.target);\r
- item.setUser(data.user);\r
- items.add(item);\r
- }\r
- \r
- if(sort != 0) {\r
- TimeTaken tt = trans.start("Sort ", Env.SUB);\r
- try {\r
- java.util.Collections.sort(items, new Comparator<Item>() {\r
- @Override\r
- public int compare(Item o1, Item o2) {\r
- return sort*(o1.getTimestamp().compare(o2.getTimestamp()));\r
- }\r
- });\r
- } finally {\r
- tt.done();\r
- }\r
- }\r
- return Result.ok(hist);\r
- }\r
-\r
- @Override\r
- public Error errorFromMessage(StringBuilder holder, String msgID, String text, String... var) {\r
- Error err = new Error();\r
- err.setMessageId(msgID);\r
- // AT&T Restful Error Format requires numbers "%" placements\r
- err.setText(Vars.convert(holder, text, var));\r
- for(String s : var) {\r
- err.getVariables().add(s);\r
- }\r
- return err;\r
- }\r
- \r
- @Override\r
- public Class<?> getClass(API api) {\r
- switch(api) {\r
- case NSS: return Nss.class;\r
- case NS_REQ: return NsRequest.class;\r
- case PERMS: return Perms.class;\r
- case PERM_KEY: return PermKey.class;\r
- case ROLES: return Roles.class;\r
- case ROLE: return Role.class;\r
- case USERS: return Users.class;\r
- case DELGS: return Delgs.class;\r
- case CERTS: return Certs.class;\r
- case DELG_REQ: return DelgRequest.class;\r
- case PERM_REQ: return PermRequest.class;\r
- case ROLE_REQ: return RoleRequest.class;\r
- case CRED_REQ: return CredRequest.class;\r
- case USER_ROLE_REQ: return UserRoleRequest.class;\r
- case USER_ROLES: return UserRoles.class;\r
- case ROLE_PERM_REQ: return RolePermRequest.class;\r
- case APPROVALS: return Approvals.class;\r
- case KEYS: return Keys.class;\r
- case HISTORY: return History.class;\r
-// case MODEL: return Model.class;\r
- case ERROR: return Error.class;\r
- case API: return Api.class;\r
- case VOID: return Void.class;\r
- }\r
- return null;\r
- }\r
-\r
- @SuppressWarnings("unchecked")\r
- @Override\r
- public <A> A newInstance(API api) {\r
- switch(api) {\r
- case NS_REQ: return (A) new NsRequest();\r
- case NSS: return (A) new Nss();\r
- case PERMS: return (A)new Perms();\r
- case PERM_KEY: return (A)new PermKey();\r
- case ROLES: return (A)new Roles();\r
- case ROLE: return (A)new Role();\r
- case USERS: return (A)new Users();\r
- case DELGS: return (A)new Delgs();\r
- case CERTS: return (A)new Certs();\r
- case PERM_REQ: return (A)new PermRequest();\r
- case CRED_REQ: return (A)new CredRequest();\r
- case ROLE_REQ: return (A)new RoleRequest();\r
- case USER_ROLE_REQ: return (A)new UserRoleRequest();\r
- case USER_ROLES: return (A)new UserRoles();\r
- case ROLE_PERM_REQ: return (A)new RolePermRequest();\r
- case HISTORY: return (A)new History();\r
- case KEYS: return (A)new Keys();\r
- //case MODEL: return (A)new Model();\r
- case ERROR: return (A)new Error();\r
- case API: return (A)new Api();\r
- case VOID: return null;\r
- \r
- case APPROVALS: return (A) new Approvals();\r
- case DELG_REQ: return (A) new DelgRequest();\r
- }\r
- return null;\r
- }\r
- \r
- @SuppressWarnings("unchecked")\r
- /**\r
- * Get Typed Marshaler as they are defined\r
- * \r
- * @param api\r
- * @return\r
- */\r
- public <A> Marshal<A> getMarshal(API api) {\r
- switch(api) {\r
- case CERTS: return (Marshal<A>) new CertsMarshal();\r
- default:\r
- return null;\r
- }\r
- }\r
-\r
- @Override\r
- public Result<Approvals> approvals(List<ApprovalDAO.Data> lAppr) {\r
- Approvals apprs = new Approvals();\r
- List<Approval> lappr = apprs.getApprovals();\r
- Approval a;\r
- for(ApprovalDAO.Data appr : lAppr) {\r
- a = new Approval();\r
- a.setId(appr.id.toString());\r
- a.setTicket(appr.ticket.toString());\r
- a.setUser(appr.user);\r
- a.setApprover(appr.approver);\r
- a.setType(appr.type);\r
- a.setStatus(appr.status);\r
- a.setMemo(appr.memo);\r
- a.setOperation(appr.operation);\r
- a.setUpdated(Chrono.timeStamp(appr.updated));\r
- lappr.add(a);\r
- }\r
- return Result.ok(apprs);\r
- }\r
- \r
- @Override\r
- public Result<List<ApprovalDAO.Data>> approvals(Approvals apprs) {\r
- List<ApprovalDAO.Data> lappr = new ArrayList<ApprovalDAO.Data>();\r
- for(Approval a : apprs.getApprovals()) {\r
- ApprovalDAO.Data ad = new ApprovalDAO.Data();\r
- String str = a.getId();\r
- if(str!=null)ad.id=UUID.fromString(str);\r
- str = a.getTicket();\r
- if(str!=null)ad.ticket=UUID.fromString(str);\r
- ad.user=a.getUser();\r
- ad.approver=a.getApprover();\r
- ad.type=a.getType();\r
- ad.status=a.getStatus();\r
- ad.operation=a.getOperation();\r
- ad.memo=a.getMemo();\r
- \r
- XMLGregorianCalendar xgc = a.getUpdated();\r
- if(xgc!=null)ad.updated=xgc.toGregorianCalendar().getTime();\r
- lappr.add(ad);\r
- }\r
- return Result.ok(lappr);\r
- }\r
-\r
- @Override\r
- public Result<Delgs> delegate(List<DelegateDAO.Data> lDelg) {\r
- Delgs delgs = new Delgs();\r
- List<Delg> ldelg = delgs.getDelgs();\r
- Delg d;\r
- for(DelegateDAO.Data del: lDelg) {\r
- d = new Delg();\r
- d.setUser(del.user);\r
- d.setDelegate(del.delegate);\r
- if(del.expires!=null)d.setExpires(Chrono.timeStamp(del.expires));\r
- ldelg.add(d);\r
- }\r
- return Result.ok(delgs);\r
- }\r
-\r
- @Override\r
- public Result<Data> delegate(AuthzTrans trans, Request base) {\r
- try {\r
- DelgRequest from = (DelgRequest)base;\r
- DelegateDAO.Data to = new DelegateDAO.Data();\r
- String user = from.getUser();\r
- to.user = user;\r
- String delegate = from.getDelegate();\r
- to.delegate = delegate;\r
- to.expires = getExpires(trans.org(),Expiration.UserDelegate,base,from.getUser());\r
- trans.checkpoint(to.user+"=>"+to.delegate, Env.ALWAYS);\r
-\r
- return Result.ok(to);\r
- } catch (Exception t) {\r
- return Result.err(Status.ERR_BadData,t.getMessage());\r
- }\r
- }\r
-\r
- /*\r
- * We want "Expired" dates to start at a specified time set by the Organization, and consistent wherever\r
- * the date is created from.\r
- */ \r
- private Date getExpires(Organization org, Expiration exp, Request base, String id) {\r
- XMLGregorianCalendar end = base.getEnd();\r
- GregorianCalendar gc = end==null?new GregorianCalendar():end.toGregorianCalendar();\r
- GregorianCalendar orggc;\r
- orggc = org.expiration(gc,exp,id); \r
-\r
- // We'll choose the lesser of dates to ensure Policy Compliance...\r
- \r
- GregorianCalendar endgc = end==null||gc.after(orggc)?orggc:gc;\r
- // Allow the Organization to determine when official "day Start" begins, Specifically when to consider something Expired.\r
- endgc = Chrono.firstMomentOfDay(endgc);\r
- endgc.set(GregorianCalendar.HOUR_OF_DAY, org.startOfDay());\r
- return endgc.getTime();\r
- }\r
-\r
-\r
- @Override\r
- public Result<Keys> keys(Collection<String> from) {\r
- Keys keys = new Keys();\r
- keys.getKey().addAll(from);\r
- return Result.ok(keys).emptyList(from.isEmpty());\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.validation;\r
-\r
-import java.util.regex.Pattern;\r
-\r
-import org.onap.aaf.authz.cadi.DirectAAFLur.PermPermission;\r
-import org.onap.aaf.authz.env.AuthzTrans;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.authz.org.Organization;\r
-import org.onap.aaf.dao.aaf.cass.CredDAO;\r
-import org.onap.aaf.dao.aaf.cass.DelegateDAO;\r
-import org.onap.aaf.dao.aaf.cass.Namespace;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-import org.onap.aaf.dao.aaf.cass.UserRoleDAO;\r
-\r
-/**\r
- * Validator\r
- * Consistently apply content rules for content (incoming)\r
- * \r
- * Note: We restrict content for usability in URLs (because RESTful service), and avoid \r
- * issues with Regular Expressions, and other enabling technologies. \r
- *\r
- */\r
-public class Validator {\r
- // % () ,-. 0-9 =A-Z _a-z\r
- private static final String ESSENTIAL="\\x25\\x28\\x29\\x2C-\\x2E\\x30-\\x39\\x3D\\x40-\\x5A\\x5F\\x61-\\x7A";\r
- private static final Pattern ESSENTIAL_CHARS=Pattern.compile("["+ESSENTIAL+"]+");\r
- \r
- // Must be 1 or more of Alphanumeric or the following :._-\r
- // '*' only allowed when it is the only character, or the only element in a key separator\r
- // :* :hello:* :hello:*:there etc\r
- public static final Pattern ACTION_CHARS=Pattern.compile(\r
- "["+ESSENTIAL+"]+" + // All AlphaNumeric+\r
- "|\\*" // Just Star\r
- );\r
-\r
- public static final Pattern INST_CHARS=Pattern.compile(\r
- "["+ESSENTIAL+"]+[\\*]*" + // All AlphaNumeric+ possibly ending with *\r
- "|\\*" + // Just Star\r
- "|(([:/]\\*)|([:/][!]{0,1}["+ESSENTIAL+"]+[\\*]*[:/]*))+" // Key :asdf:*:sdf*:sdk\r
- );\r
- \r
- // Must be 1 or more of Alphanumeric or the following ._-, and be in the form id@domain\r
- public static final Pattern ID_CHARS=Pattern.compile("[\\w.-]+@[\\w.-]+");\r
- // Must be 1 or more of Alphanumeric or the following ._-\r
- public static final Pattern NAME_CHARS=Pattern.compile("[\\w.-]+");\r
- \r
- private final Pattern actionChars;\r
- private final Pattern instChars;\r
- private StringBuilder msgs;\r
-\r
- /**\r
- * Default Validator does not check for non-standard Action/Inst chars\r
- * \r
- * \r
- * IMPORTANT: Use ONLY when the Validator is doing something simple... NullOrBlank\r
- */\r
- public Validator() {\r
- actionChars = ACTION_CHARS;\r
- instChars = INST_CHARS;\r
- }\r
- \r
- /**\r
- * When Trans is passed in, check for non-standard Action/Inst chars\r
- * \r
- * This is an opportunity to change characters, if required.\r
- * \r
- * Use for any Object method passed (i.e. role(RoleDAO.Data d) ), to ensure fewer bugs.\r
- * \r
- * @param trans\r
- */\r
- public Validator(AuthzTrans trans) {\r
- actionChars = ACTION_CHARS;\r
- instChars = INST_CHARS;\r
- }\r
-\r
-\r
- public Validator perm(Result<PermDAO.Data> rpd) {\r
- if(rpd.notOK()) {\r
- msg(rpd.details);\r
- } else {\r
- perm(rpd.value);\r
- }\r
- return this;\r
- }\r
-\r
-\r
- public Validator perm(PermDAO.Data pd) {\r
- if(pd==null) {\r
- msg("Perm Data is null.");\r
- } else {\r
- ns(pd.ns);\r
- permType(pd.type,pd.ns);\r
- permInstance(pd.instance);\r
- permAction(pd.action);\r
- if(pd.roles!=null) { \r
- for(String role : pd.roles) {\r
- role(role);\r
- }\r
- }\r
- }\r
- return this;\r
- }\r
-\r
- public Validator role(Result<RoleDAO.Data> rrd) {\r
- if(rrd.notOK()) {\r
- msg(rrd.details);\r
- } else {\r
- role(rrd.value);\r
- }\r
- return this;\r
- }\r
-\r
- public Validator role(RoleDAO.Data pd) {\r
- if(pd==null) {\r
- msg("Role Data is null.");\r
- } else {\r
- ns(pd.ns);\r
- role(pd.name);\r
- if(pd.perms!=null) {\r
- for(String perm : pd.perms) {\r
- String[] ps = perm.split("\\|");\r
- if(ps.length!=3) {\r
- msg("Perm [" + perm + "] in Role [" + pd.fullName() + "] is not correctly separated with '|'");\r
- } else {\r
- permType(ps[0],null);\r
- permInstance(ps[1]);\r
- permAction(ps[2]);\r
- }\r
- }\r
- }\r
- }\r
- return this;\r
- }\r
-\r
- public Validator delegate(Organization org, Result<DelegateDAO.Data> rdd) {\r
- if(rdd.notOK()) {\r
- msg(rdd.details);\r
- } else {\r
- delegate(org, rdd.value);\r
- }\r
- return this;\r
- }\r
-\r
- public Validator delegate(Organization org, DelegateDAO.Data dd) {\r
- if(dd==null) {\r
- msg("Delegate Data is null.");\r
- } else {\r
- user(org,dd.user);\r
- user(org,dd.delegate);\r
- }\r
- return this;\r
- }\r
-\r
-\r
- public Validator cred(Organization org, Result<CredDAO.Data> rcd, boolean isNew) {\r
- if(rcd.notOK()) {\r
- msg(rcd.details);\r
- } else {\r
- cred(org,rcd.value,isNew);\r
- }\r
- return this;\r
- }\r
-\r
- public Validator cred(Organization org, CredDAO.Data cd, boolean isNew) {\r
- if(cd==null) {\r
- msg("Cred Data is null.");\r
- } else {\r
- if(nob(cd.id,ID_CHARS)) {\r
- msg("ID [" + cd.id + "] is invalid");\r
- }\r
- if(!org.isValidCred(cd.id)) {\r
- msg("ID [" + cd.id + "] is invalid for a cred");\r
- }\r
- String str = cd.id;\r
- int idx = str.indexOf('@');\r
- if(idx>0) {\r
- str = str.substring(0,idx);\r
- }\r
- \r
- if(cd.id.endsWith(org.getRealm())) {\r
- if(isNew && (str=org.isValidID(str)).length()>0) {\r
- msg(cd.id,str);\r
- }\r
- }\r
- \r
- if(cd.type==null) {\r
- msg("Credential Type must be set");\r
- } else {\r
- switch(cd.type) {\r
- case CredDAO.BASIC_AUTH_SHA256:\r
- // ok\r
- break;\r
- default:\r
- msg("Credential Type [",Integer.toString(cd.type),"] is invalid");\r
- }\r
- }\r
- }\r
- return this;\r
- }\r
-\r
-\r
- public Validator user(Organization org, String user) {\r
- if(nob(user,ID_CHARS)) {\r
- msg("User [",user,"] is invalid.");\r
- }\r
- //TODO Change when Multi-Org solution is created\r
-// if(org instanceof ATT) {\r
-// if(!user.endsWith("@csp.att.com") &&\r
-// !org.isValidCred(user)) \r
-// msg("User [",user,"] is not valid ID for Credential in ",org.getRealm());\r
-// }\r
- return this;\r
- }\r
-\r
- public Validator ns(Result<Namespace> nsd) {\r
- notOK(nsd);\r
- ns(nsd.value.name);\r
- for(String s : nsd.value.admin) {\r
- if(nob(s,ID_CHARS)) {\r
- msg("Admin [" + s + "] is invalid."); \r
- }\r
- \r
- }\r
- for(String s : nsd.value.owner) {\r
- if(nob(s,ID_CHARS)) {\r
- msg("Responsible [" + s + "] is invalid."); \r
- }\r
- \r
- }\r
- return this;\r
- }\r
-\r
-\r
- public Validator ns(String ns) {\r
- if(nob(ns,NAME_CHARS)){\r
- msg("NS [" + ns + "] is invalid.");\r
- }\r
- return this;\r
- }\r
-\r
- public String errs() {\r
- return msgs.toString();\r
- }\r
-\r
-\r
- public Validator permType(String type, String ns) {\r
- // TODO check for correct Splits? Type|Instance|Action ?\r
- if(nob(type,NAME_CHARS)) {\r
- msg("Perm Type [" + (ns==null?"":ns+(type.length()==0?"":'.'))+type + "] is invalid.");\r
- }\r
- return this;\r
- }\r
-\r
- public Validator permInstance(String instance) {\r
- // TODO check for correct Splits? Type|Instance|Action ?\r
- if(nob(instance,instChars)) {\r
- msg("Perm Instance [" + instance + "] is invalid.");\r
- }\r
- return this;\r
- }\r
-\r
- public Validator permAction(String action) {\r
- // TODO check for correct Splits? Type|Instance|Action ?\r
- if(nob(action, actionChars)) {\r
- msg("Perm Action [" + action + "] is invalid.");\r
- }\r
- return this;\r
- }\r
-\r
- public Validator role(String role) {\r
- if(nob(role, NAME_CHARS)) {\r
- msg("Role [" + role + "] is invalid.");\r
- }\r
- return this;\r
- }\r
-\r
- public Validator user_role(UserRoleDAO.Data urdd) {\r
- if(urdd==null) {\r
- msg("UserRole is null");\r
- } else {\r
- role(urdd.role);\r
- nullOrBlank("UserRole.ns",urdd.ns);\r
- nullOrBlank("UserRole.rname",urdd.rname);\r
- }\r
- return this;\r
- }\r
-\r
- public Validator nullOrBlank(String name, String str) {\r
- if(str==null) {\r
- msg(name + " is null.");\r
- } else if(str.length()==0) {\r
- msg(name + " is blank.");\r
- }\r
- return this;\r
- }\r
- \r
- public Validator nullOrBlank(PermDAO.Data pd) {\r
- if(pd==null) {\r
- msg("Permission is null");\r
- } else {\r
- nullOrBlank("NS",pd.ns).\r
- nullOrBlank("Type",pd.type).\r
- nullOrBlank("Instance",pd.instance).\r
- nullOrBlank("Action",pd.action);\r
- }\r
- return this;\r
- }\r
-\r
- public Validator nullOrBlank(RoleDAO.Data rd) {\r
- if(rd==null) {\r
- msg("Role is null");\r
- } else {\r
- nullOrBlank("NS",rd.ns).\r
- nullOrBlank("Name",rd.name);\r
- }\r
- return this;\r
- }\r
-\r
- // nob = Null Or Not match Pattern\r
- private boolean nob(String str, Pattern p) {\r
- return str==null || !p.matcher(str).matches(); \r
- }\r
-\r
- private void msg(String ... strs) {\r
- if(msgs==null) {\r
- msgs=new StringBuilder();\r
- }\r
- for(String str : strs) {\r
- msgs.append(str);\r
- }\r
- msgs.append('\n');\r
- }\r
- \r
- public boolean err() {\r
- return msgs!=null;\r
- }\r
-\r
-\r
- public Validator notOK(Result<?> res) {\r
- if(res==null) {\r
- msgs.append("Result object is blank");\r
- } else if(res.notOK()) {\r
- msgs.append(res.getClass().getSimpleName() + " is not OK");\r
- }\r
- return this;\r
- }\r
-\r
- public Validator key(String key) {\r
- if(nob(key,NAME_CHARS)) {\r
- msg("NS Prop Key [" + key + "] is invalid");\r
- }\r
- return this;\r
- }\r
- \r
- public Validator value(String value) {\r
- if(nob(value,ESSENTIAL_CHARS)) {\r
- msg("NS Prop value [" + value + "] is invalid");\r
- }\r
- return this;\r
- }\r
-\r
-\r
-}\r
+++ /dev/null
-#
-#Wed Nov 30 23:48:45 EST 2016
-alcdtl15rj6015,60498=latitude\=32.78014;longitude\=-96.800451;lease\=1480372013837;protocol\=http;contextPath\=/;routeOffer\=BAU_SE
-ALCDTL46RJ6015,55998=latitude\=32.78014;longitude\=-96.800451;lease\=1479687428093;protocol\=http;contextPath\=/;routeOffer\=BAU_SE
-localhost,42246=latitude\=32.78014;longitude\=-96.800451;lease\=1478985613892;protocol\=http;contextPath\=/;routeOffer\=BAU_SE
-localhost,39157=latitude\=32.78014;longitude\=-96.800451;lease\=1478811101528;protocol\=http;contextPath\=/;routeOffer\=BAU_SE
-alcdtl15rj6015,55889=latitude\=32.78014;longitude\=-96.800451;lease\=1480371829514;protocol\=http;contextPath\=/;routeOffer\=BAU_SE
-localhost,36473=latitude\=32.78014;longitude\=-96.800451;lease\=1478801682319;protocol\=http;contextPath\=/;routeOffer\=BAU_SE
+++ /dev/null
-USE authz;
-
-// Create Root pass
-INSERT INTO cred (id,ns,type,cred,expires)
- VALUES ('dgl@openecomp.org','org.openecomp',1,0xab3831f27b39d7a039f9a92aa2bbfe51,'2020-12-31');
-
-INSERT INTO cred (id,ns,type,cred,expires)
- VALUES ('m99751@dmaapBC.openecomp.org','org.openecomp.dmaapBC',1,0xab3831f27b39d7a039f9a92aa2bbfe51,'2020-12-31');
-
-INSERT INTO cred (id,ns,type,cred,expires)
- VALUES ('m99501@dmaapBC.openecomp.org','org.openecomp.dmaapBC',1,0xab3831f27b39d7a039f9a92aa2bbfe51,'2020-12-31');
-
-
-// Create 'com' root NS
-INSERT INTO ns (name,scope,description,parent,type)
- VALUES('com',1,'Root Namespace',null,1);
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('com','admin',{'com.access|*|*'},'Com Admins');
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('com','owner',{'com.access|*|read'},'Com Owners');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('com','access','*','read',{'com.owner'},'Com Read Access');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('com','access','*','*',{'com.admin'},'Com Write Access');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','com.owner','2020-12-31','com','owner');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','com.admin','2020-12-31','com','admin');
-
-// Create org root NS
-INSERT INTO ns (name,scope,description,parent,type)
- VALUES('org',1,'Root Namespace Org',null,1);
-
-INSERT INTO ns (name,scope,description,parent,type)
- VALUES('org.openecomp.dcae',3,'DCAE Namespace Org','org.openecomp',3);
-
-INSERT INTO ns (name,scope,description,parent,type)
- VALUES('org.openecomp.dmaapBC',3,'DMaaP BC Namespace Org','org.openecomp',3);
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('org','admin',{'org.access|*|*'},'Com Admins');
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('org','owner',{'org.access|*|read'},'Com Owners');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('org','access','*','read',{'org.owner'},'Com Read Access');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('org','access','*','*',{'org.admin'},'Com Write Access');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','org.owner','2020-12-31','org','owner');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','org.admin','2020-12-31','org','admin');
-
-
-// Create com.att
-
-INSERT INTO ns (name,scope,description,parent,type)
- VALUES('com.att',2,'AT&T Namespace','com',2);
-
-INSERT INTO role(ns, name, perms,description)
- VALUES('com.att','admin',{'com.att.access|*|*'},'AT&T Admins');
-
-INSERT INTO role(ns, name, perms,description)
- VALUES('com.att','owner',{'com.att.access|*|read'},'AT&T Owners');
-
-INSERT INTO perm(ns, type, instance, action, roles,description)
- VALUES ('com.att','access','*','read',{'com.att.owner'},'AT&T Read Access');
-
-INSERT INTO perm(ns, type, instance, action, roles,description)
- VALUES ('com.att','access','*','*',{'com.att.admin'},'AT&T Write Access');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','com.att.owner','2020-12-31','com.att','owner');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','com.att.admin','2020-12-31','com.att','admin');
-
-// Create com.att.aaf
-
-INSERT INTO ns (name,scope,description,parent,type)
- VALUES('com.att.aaf',3,'Application Authorization Framework','com.att',3);
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('com.att.aaf','admin',{'com.att.aaf.access|*|*'},'AAF Admins');
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('com.att.aaf','owner',{'com.att.aaf.access|*|read'},'AAF Owners');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('com.att.aaf','access','*','read',{'com.att.aaf.owner'},'AAF Read Access');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('com.att.aaf','access','*','*',{'com.att.aaf.admin'},'AAF Write Access');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','com.att.aaf.admin','2020-12-31','com.att.aaf','admin');
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','com.att.aaf.owner','2020-12-31','com.att.aaf','owner');
-
-
-// Create org.openecomp
-INSERT INTO ns (name,scope,description,parent,type)
- VALUES('org.openecomp',2,'Open EComp NS','com.att',2);
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('org.openecomp','admin',{'org.openecomp.access|*|*'},'OpenEcomp Admins');
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('org.openecomp','owner',{'org.openecomp.access|*|read'},'OpenEcomp Owners');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('org.openecomp','access','*','read',{'org.openecomp.owner'},'OpenEcomp Read Access');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('org.openecomp','access','*','*',{'org.openecomp.admin'},'OpenEcomp Write Access');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','org.openecomp.admin','2020-12-31','org.openecomp','admin');
-
-// Create org.openecomp.dmaapBC
-
-INSERT INTO ns (name,scope,description,parent,type)
- VALUES('org.openecomp.dmaapBC',3,'Application Authorization Framework','org.openecomp',3);
-
-//INSERT INTO role(ns, name, perms, description)
-// VALUES('org.openecomp.dmaapBC','admin',{'org.openecomp.dmaapBC.access|*|*'},'AAF Admins');
-
-INSERT INTO role(ns, name, perms, description)
-VALUES('org.openecomp.dmaapBC','admin',{'org.openecomp.dmaapBC.access|*|*','org.openecomp.dmaapBC.topicFactory|:org.openecomp.dmaapBC.topic:org.openecomp.dmaapBC|create','org.openecomp.dmaapBC.mr.topic|:topic.org.openecomp.dmaapBC.newtopic|sub','org.openecomp.dmaapBC.mr.topic|:topic.org.openecomp.dmaapBC.newtopic|pub'},'AAF Admins');
-
-//INSERT INTO role(ns, name, perms, description)
-//VALUES('org.openecomp.dmaapBC','admin',{'org.openecomp.dmaapBC.access|*|*','org.openecomp.dmaapBC.mr.topic|:topic.org.openecomp.dmaapBC.newtopic|sub'},'AAF Admins');
-
-//INSERT INTO role(ns, name, perms, description)
-//VALUES('org.openecomp.dmaapBC','admin',{'org.openecomp.dmaapBC.access|*|*','org.openecomp.dmaapBC.mr.topic|:topic.org.openecomp.dmaapBC.newtopic|pub'},'AAF Admins');
-
-
-
-INSERT INTO role(ns, name, perms, description)
- VALUES('org.openecomp.dmaapBC','owner',{'org.openecomp.dmaapBC.access|*|read'},'AAF Owners');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('org.openecomp.dmaapBC','access','*','read',{'org.openecomp.dmaapBC.owner'},'AAF Read Access');
-
-INSERT INTO perm(ns, type, instance, action, roles, description)
- VALUES ('org.openecomp.dmaapBC','access','*','*',{'org.openecomp.dmaapBC.admin'},'AAF Write Access');
-
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','org.openecomp.dmaapBC.admin','2020-12-31','org.openecomp.dmaapBC','admin');
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('dgl@openecomp.org','org.openecomp.dmaapBC.owner','2020-12-31','org.openecomp.dmaapBC','owner');
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('m99751@dmaapBC.openecomp.org','org.openecomp.dmaapBC.admin','2020-12-31','org.openecomp.dmaapBC','admin');
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('m99751@dmaapBC.openecomp.org','org.openecomp.dmaapBC.owner','2020-12-31','org.openecomp.dmaapBC','owner');
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('m99501@dmaapBC.openecomp.org','org.openecomp.dmaapBC.admin','2020-12-31','org.openecomp.dmaapBC','admin');
-INSERT INTO user_role(user,role,expires,ns,rname)
- VALUES ('m99501@dmaapBC.openecomp.org','org.openecomp.dmaapBC.owner','2020-12-31','org.openecomp.dmaapBC','owner');
+++ /dev/null
-iowna|Ima D. Owner|Ima|Owner|314-123-2000|ima.d.owner@osaaf.com|e|
-mmanager|Mark D. Manager|Mark|Manager|314-123-1234|mark.d.manager@osaaf.com|e|iowna
-bdevl|Robert D. Developer|Bob|Developer|314-123-1235|bob.d.develper@osaaf.com|e|mmanager
-mmarket|Mary D. Marketer|Mary|Marketer|314-123-1236|mary.d.marketer@osaaf.com|e|mmanager
-ccontra|Clarice D. Contractor|Clarice|Contractor|314-123-1237|clarice.d.contractor@osaaf.com|c|mmanager
-iretired|Ira Lee M. Retired|Ira|Retired|314-123-1238|clarice.d.contractor@osaaf.com|n|mmanager
-osaaf|ID of AAF|||||a|bdevl
+++ /dev/null
-// For Developer Machine single instance
-//
-CREATE KEYSPACE authz
-WITH REPLICATION = {'class' : 'SimpleStrategy','replication_factor':1};
-//
-// From Ravi, 6-17-2014. User for DEVL->TEST
-//
-// CREATE KEYSPACE authz WITH replication = { 'class': 'NetworkTopologyStrategy', 'HYWRCA02': '2', 'BRHMALDC': '2' };
-//
-// PROD
-//
-// CREATE KEYSPACE authz WITH replication = {'class': 'NetworkTopologyStrategy','ALPSGACT': '2','STLSMORC': '2','BRHMALDC': '2' };
-//
-// create user authz with password '<AUTHZ PASSWORD>' superuser;
-// grant all on keyspace authz to authz;
-//
-// For TEST (aaf_test)
-// CREATE KEYSPACE authz WITH replication = { 'class': 'NetworkTopologyStrategy', 'BRHMALDC': '1' };
-//
-// DEVL
-// CREATE KEYSPACE authz WITH replication = {'class': 'NetworkTopologyStrategy','STLSMORC': '2' };
-//
-// TEST / PERF
-// CREATE KEYSPACE authz WITH replication = {'class': 'NetworkTopologyStrategy','STLSMORC': '3','KGMTNC20': '3' };
-//
-// IST
-// CREATE KEYSPACE authz WITH replication = {'class': 'NetworkTopologyStrategy','STLSMORC':'3',
-// 'DLLSTXCF':'3','KGMTNC20':'3','SFLDMIBB':'3','HYWRCA02':'3' };
-//
-// with 6 localized with ccm
-// CREATE KEYSPACE authz WITH replication = { 'class': 'NetworkTopologyStrategy', 'dc1': '2', 'dc2': '2' };
-//
-
-USE authz;
-
-//
-// CORE Table function
-//
-
-// Namespace - establish hierarchical authority to modify
-// Permissions and Roles
-// "scope" is flag to determine Policy. Typical important scope
-// is "company" (1)
-CREATE TABLE ns (
- name varchar,
- scope int, // deprecated 2.0.11
- description varchar,
- parent varchar,
- type int,
- PRIMARY KEY (name)
-);
-CREATE INDEX ns_parent on ns(parent);
-
-
-// Oct 2015, not performant. Made Owner and Attrib first class Roles,
-// April, 2015. Originally, the plan was to utilize Cassandra 2.1.2, however, other team's preferences were to remain at current levels.
-// Therefore, we are taking the separate table approach. (coder Jeremiah Rohwedder)
-// We had dropped this by making first class objects of Responsible (Owner) and Admin. We need this again to mark namespaces
-// as having certain tools, like SWM, etc.
-CREATE TABLE ns_attrib (
- ns varchar,
- key varchar,
- value varchar,
- PRIMARY KEY (ns,key)
-);
-create index ns_attrib_key on ns_attrib(key);
-
-// Will be cached
-CREATE TABLE role (
- ns varchar,
- name varchar,
- perms set<varchar>, // Use "Key" of "name|type|action"
- description varchar,
- PRIMARY KEY (ns,name)
-);
-CREATE INDEX role_name ON role(name);
-
-// Will be cached
-CREATE TABLE perm (
- ns varchar,
- type varchar,
- instance varchar,
- action varchar,
- roles set<varchar>, // Need to find Roles given Permissions
- description varchar,
- PRIMARY KEY (ns,type,instance,action)
-);
-
-// This table is user for Authorization
-CREATE TABLE user_role (
- user varchar,
- role varchar, // deprecated: change to ns/rname after 2.0.11
- ns varchar,
- rname varchar,
- expires timestamp,
- PRIMARY KEY(user,role)
- );
-CREATE INDEX user_role_ns ON user_role(ns);
-CREATE INDEX user_role_role ON user_role(role);
-
-// This table is only for the case where return User Credential (MechID) Authentication
-CREATE TABLE cred (
- id varchar,
- type int,
- expires timestamp,
- ns varchar,
- other int,
- notes varchar,
- cred blob,
- prev blob,
- PRIMARY KEY (id,type,expires)
- );
-CREATE INDEX cred_ns ON cred(ns);
-
-// Certificate Cross Table
-// coordinated with CRED type 2
-CREATE TABLE cert (
- fingerprint blob,
- id varchar,
- x500 varchar,
- expires timestamp,
- PRIMARY KEY (fingerprint)
- );
-CREATE INDEX cert_id ON cert(id);
-CREATE INDEX cert_x500 ON cert(x500);
-
-CREATE TABLE notify (
- user text,
- type int,
- last timestamp,
- checksum int,
- PRIMARY KEY (user,type)
-);
-
-CREATE TABLE x509 (
- ca text,
- serial blob,
- id text,
- x500 text,
- x509 text,
- PRIMARY KEY (ca,serial)
-);
-
-
-CREATE INDEX x509_id ON x509 (id);
-CREATE INDEX x509_x500 ON x509 (x500);
-
-//
-// Deployment Artifact (for Certman)
-//
-CREATE TABLE artifact (
- mechid text,
- machine text,
- type Set<text>,
- sponsor text,
- ca text,
- dir text,
- appName text,
- os_user text,
- notify text,
- expires timestamp,
- renewDays int,
- PRIMARY KEY (mechid,machine)
-);
-CREATE INDEX artifact_machine ON artifact(machine);
-
-//
-// Non-Critical Table functions
-//
-// Table Info - for Caching
-CREATE TABLE cache (
- name varchar,
- seg int, // cache Segment
- touched timestamp,
- PRIMARY KEY(name,seg)
-);
-
-CREATE TABLE history (
- id timeuuid,
- yr_mon int,
- user varchar,
- action varchar,
- target varchar, // user, user_role,
- subject varchar, // field for searching main portion of target key
- memo varchar, //description of the action
- reconstruct blob, //serialized form of the target
- // detail Map<varchar, varchar>, // additional information
- PRIMARY KEY (id)
-);
-CREATE INDEX history_yr_mon ON history(yr_mon);
-CREATE INDEX history_user ON history(user);
-CREATE INDEX history_subject ON history(subject);
-
-//
-// A place to hold objects to be created at a future time.
-//
-CREATE TABLE future (
- id uuid, // uniquify
- target varchar, // Target Table
- memo varchar, // Description
- start timestamp, // When it should take effect
- expires timestamp, // When not longer valid
- construct blob, // How to construct this object (like History)
- PRIMARY KEY(id)
-);
-CREATE INDEX future_idx ON future(target);
-CREATE INDEX future_start_idx ON future(start);
-
-
-CREATE TABLE approval (
- id timeuuid, // unique Key
- ticket uuid, // Link to Future Record
- user varchar, // the user who needs to be approved
- approver varchar, // user approving
- type varchar, // approver types i.e. Supervisor, Owner
- status varchar, // approval status. pending, approved, denied
- memo varchar, // Text for Approval to know what's going on
- operation varchar, // List operation to perform
- PRIMARY KEY(id)
- );
-CREATE INDEX appr_approver_idx ON approval(approver);
-CREATE INDEX appr_user_idx ON approval(user);
-CREATE INDEX appr_ticket_idx ON approval(ticket);
-CREATE INDEX appr_status_idx ON approval(status);
-
-CREATE TABLE delegate (
- user varchar,
- delegate varchar,
- expires timestamp,
- PRIMARY KEY (user)
-);
-CREATE INDEX delg_delg_idx ON delegate(delegate);
-
-//
-// Used by authz-batch processes to ensure only 1 runs at a time
-//
-CREATE TABLE run_lock (
- class text,
- host text,
- start timestamp,
- PRIMARY KEY ((class))
-);
+++ /dev/null
-iowna|Ima D. Owner|Ima|Owner|314-123-2000|ima.d.owner@osaaf.com|e|
-mmanager|Mark D. Manager|Mark|Manager|314-123-1234|mark.d.manager@osaaf.com|e|iowna
-bdevl|Robert D. Developer|Bob|Developer|314-123-1235|bob.d.develper@osaaf.com|e|mmanager
-mmarket|Mary D. Marketer|Mary|Marketer|314-123-1236|mary.d.marketer@osaaf.com|e|mmanager
-ccontra|Clarice D. Contractor|Clarice|Contractor|314-123-1237|clarice.d.contractor@osaaf.com|c|mmanager
-iretired|Ira Lee M. Retired|Ira|Retired|314-123-1238|clarice.d.contractor@osaaf.com|n|mmanager
-osaaf|ID of AAF|||||a|bdevl
-m99751|ID of AAF|||||a|bdevl
-m99501|ID of AAF|||||a|bdevl
+++ /dev/null
-#-------------------------------------------------------------------------------\r
-# ============LICENSE_START====================================================\r
-# * org.onap.aaf\r
-# * ===========================================================================\r
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
-# * ===========================================================================\r
-# * Licensed under the Apache License, Version 2.0 (the "License");\r
-# * you may not use this file except in compliance with the License.\r
-# * You may obtain a copy of the License at\r
-# * \r
-# * http://www.apache.org/licenses/LICENSE-2.0\r
-# * \r
-# * Unless required by applicable law or agreed to in writing, software\r
-# * distributed under the License is distributed on an "AS IS" BASIS,\r
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
-# * See the License for the specific language governing permissions and\r
-# * limitations under the License.\r
-# * ============LICENSE_END====================================================\r
-# *\r
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
-# *\r
-#-------------------------------------------------------------------------------\r
-version: '2'\r
-services:\r
- aaf_container:\r
- image: attos/aaf\r
- ports:\r
- - "8101:8101"\r
-\r
- links:\r
- - cassandra_container\r
- volumes:\r
- # - ./authAPI.props:/opt/app/aaf/authz-service/2.0.15/etc/authAPI.props\r
- - ./wait_for_host_port.sh:/tmp/wait_for_host_port.sh\r
- - ./data2:/data\r
- # - ./runaafcli.sh:/opt/app/aaf/authz-service/2.0.15/runaafcli.sh\r
- # - ./com.osaaf.common.props:/opt/app/aaf/authz-service/2.0.15/etc/com.osaaf.common.props\r
- # - ./cadi-core-1.3.0.jar:/opt/app/aaf/authz-service/2.0.15/lib/cadi-core-1.3.0.jar\r
- # - ./cadi-aaf-1.3.0.jar:/opt/app/aaf/authz-service/2.0.15/lib/cadi-aaf-1.3.0.jar\r
- # - ./cadi-client-1.3.0.jar:/opt/app/aaf/authz-service/2.0.15/lib/cadi-client-1.3.0.jar\r
- # - ./authz-service-2.0.15.jar:/opt/app/aaf/authz-service/2.0.15/lib/authz-service-2.0.15.jar\r
- # - ./dme2-3.1.200.jar:/opt/app/aaf/authz-service/2.0.15/lib/dme2-3.1.200.jar\r
- entrypoint: ["bash", "-c", "/tmp/wait_for_host_port.sh cassandra_container 9042; sleep 20; /bin/sh -c ./startup.sh"]\r
- environment:\r
- - CASSANDRA_CLUSTER=cassandra_container\r
- \r
-\r
- cassandra_container:\r
- image: cassandra:2.1.16\r
- ports:\r
- - "7000:7000"\r
- - "7001:7001"\r
- - "9042:9042"\r
- - "9160:9160"\r
- volumes:\r
- - ./data:/data\r
- - ./wait_for_host_port.sh:/tmp/wait_for_host_port.sh\r
- entrypoint: ["bash", "-c", "(/tmp/wait_for_host_port.sh localhost 9042 cqlsh --file /data/init.cql -u cassandra -p cassandra localhost; cqlsh --file /data/ecomp.cql -u cassandra -p cassandra localhost) & (/docker-entrypoint.sh cassandra -f)"]\r
+++ /dev/null
-# lji: this startup file shadows the existing extry point startup.sh file of the container
-# because we need to pass in the cassandra cluster location
-
-LIB=/opt/app/aaf/authz-service/lib
-
-ETC=/opt/app/aaf/authz-service/etc
-DME2REG=/opt/dme2reg
-
-echo "this is LIB" $LIB
-echo "this is ETC" $ETC
-echo "this is DME2REG" $DME2REG
-
-CLASSPATH=$ETC
-for FILE in `find $LIB -name *.jar`; do
- CLASSPATH=$CLASSPATH:$FILE
-done
-
-FILEPATHS="/opt/app/aaf/authz-service/etc/com.osaaf.common.props /opt/app/aaf/authz-service/etc/com.osaaf.common.props"
-for FILEPATH in $FILEPATHS:
-do
- if [ -e ${FILEPATH} ]; then
- if [ -z `grep "cassandra.clusters=$CASSANDRA_CLUSTER" $FILEPATH` ]; then
- echo "cassandra.clusters=$CASSANDRA_CLUSTER" >> $FILEPATH;
- fi
- fi
-done
-
-
-java -classpath $CLASSPATH -DDME2_EP_REGISTRY_CLASS=DME2FS -DAFT_DME2_EP_REGISTRY_FS_DIR=$DME2REG org.onap.aaf.authz.service.AuthAPI
-
-# keet it running so we can check fs
-while sleep 2; do echo thinking; done
-
-
+++ /dev/null
-net.ipv6.conf.all.disable_ipv6=1
-net.ipv6.conf.default.disable_ipv6=1
-net.ipv6.conf.lol.disable_ipv6=1
+++ /dev/null
-#!/bin/bash
-
-set -e
-
-host="$1"
-port="$2"
-shift
-shift
-cmd="$@"
-
-until echo > /dev/tcp/${host}/${port} ; do
- >&2 echo "${host}:${port} is unavailable - sleeping"
- sleep 1
-done
-
->&2 echo "${host}:${port} is up - executing command"
-exec $cmd
+++ /dev/null
-FROM openjdk:8-jdk \r
-ADD opt /opt/\r
-ADD authz-service.jar /opt/app/aaf/authz-service/lib/authz-service.jar\r
-ADD startup.sh /startup.sh\r
-RUN chmod 777 /startup.sh\r
-RUN chmod -R 777 /opt/app/aaf/authz-service/etc\r
-ENTRYPOINT ./startup.sh\r
-\r
-\r
+++ /dev/null
-##\r
-## AUTHZ API (authz-service) Properties\r
-##\r
-#hostname=localhost\r
-hostname=0.0.0.0\r
-# Standard AFT for THIS box, and THIS box is in St Louis. Put your own LAT/LONG in here. Use "bing.com/maps" or \r
-# SWMTools (geoloc for DataCenters) to get YOURs\r
-\r
-AFT_LATITUDE=32.780140\r
-AFT_LONGITUDE=-96.800451\r
-AFT_ENVIRONMENT=AFTUAT\r
-DEPLOYED_VERSION=2.0.SAMPLE\r
-\r
-##DME2 related parameters\r
-DMEServiceName=service=org.onap.aaf.authz.AuthorizationService/version=2.0/envContext=DEV/routeOffer=BAU_SE\r
-\r
-#DME2 can limit Port Ranges with the following:\r
-AFT_DME2_PORT_RANGE=8101-8101,8100\r
-#DME2 picks any unused port in +1024 range\r
-#AFT_DME2_PORT=0\r
-AFT_DME2_ALLOW_PORT_CACHING=false\r
-\r
-\r
-# Point to "Common" files, used between all the AAF Services. ... \r
-\r
-\r
-\r
-#cadi_prop_files=com.osaaf.common.props;com.osaaf.props\r
-cadi_prop_files=opt/app/aaf/authz-service/etc/com.osaaf.common.props:opt/app/aaf/authz-service/etc/com.osaaf.props\r
-CACHE_HIGH_COUNT=40000\r
-CACHE_CLEAN_INTERVAL=60000\r
-\r
-\r
-\r
-\r
+++ /dev/null
-############################################################
-# Properties Written by Jonathan Gathman
-# on 2016-08-12T04:17:59.628-0500
-# These properties encapsulate the Verisign Public Certificates
-############################################################
-# DEVELOPER ONLY SETTING!!!!! DO NOT USE on ANY BOX other than your Developer box, and it
-# would be better if you got a Cert for that, and remove this! There is nothing stupider than
-# an unsecured Security Service.
-cadi_trust_all_x509=true
-
-# Public (i.e. Verisign) Key stores.
-# AFT_DME2_KEYSTORE=
-# AFT_DME2_KEYSTORE_PASSWORD=
-# AFT_DME2_KEY_PASSWORD=
-# cadi_truststore=
-# cadi_truststore_password=
-
-# Standard for this App/Machine
-aaf_env=DEV
-aaf_data_dir=opt/app/aaf/authz-service/etc/data
-cadi_loglevel=WARN
-aaf_id=<osaaf's Application Identity>
-aaf_password=enc:31-LFPNtP9Yl1DZKAz1rx8N8YfYVY8VKnnDr
-
-aaf_conn_timeout=6000
-aaf_timeout=10000
-aaf_user_expires=600000
-aaf_clean_interval=45000
-aaf_refresh_trigger_count=3
-aaf_high_count=30000
-
-# Basic Auth
-aaf_default_realm=openecomp.org
-#aaf_domain_support=.org
-basic_realm=openecomp.org
-basic_warn=false
-aaf_root_ns=org.openecomp
-localhost_deny=false
-
-
-# Cassandra
-# IP:Cass DataCenter:Latitude:Longitude,IP....
-cassandra.clusters=127.0.0.1
-cassandra.clusters.port=9042
-cassandra.clusters.user=authz
-cassandra.clusters.password=authz
-## Exceptions from Cassandra which require resetting the Cassandra Connections
-cassandra.reset.exceptions=com.datastax.driver.core.exceptions.NoHostAvailableException:"no host was tried":"Connection has been closed"
-
-# Consistency Settings
-cassandra.writeConsistency.ns=LOCAL_QUORUM
-cassandra.writeConsistency.perm=LOCAL_QUORUM
-cassandra.writeConsistency.role=LOCAL_QUORUM
-cassandra.writeConsistency.user_role=LOCAL_QUORUM
-cassandra.writeConsistency.cred=LOCAL_QUORUM
-cassandra.writeConsistency.ns_attrib=LOCAL_QUORUM
-
-## Supported Plugin Organizational Units
-Organization.org=org.onap.aaf.osaaf.defOrg.DefaultOrg
-
-## Email Server settings for Def Organization.
-#Sender's email ID needs to be mentioned
-com.osaaf.mailFromUserId=mailid@bogus.com
-com.osaaf.supportEmail=support@bogus.com
-com.osaaf.mailHost=smtp.bogus.com
-
-# Standard AAF DME2 Props
-AFT_DME2_REMOVE_PERSISTENT_CACHE_ON_STARTUP=TRUE
-AFT_DME2_DISABLE_PERSISTENT_CACHE=TRUE
-AFT_DME2_DISABLE_PERSISTENT_CACHE_LOAD=TRUE
-
-## SSL OPTIONAL ONLY IN DEVELOPMENT PC/Local... WHATEVER YOU DO, don't use this on any box than your local PC
-AFT_DME2_SSL_ENABLE=false
-# for when you turn on SSL... Only TLSv1.1+ is secure as of 2016
-AFT_DME2_SSL_WANT_CLIENT_AUTH=TRUE
-AFT_DME2_SSL_INCLUDE_PROTOCOLS=TLSv1.1,TLSv1.2
-AFT_DME2_SSL_VALIDATE_CERTS=FALSE
-AFT_DME2_CLIENT_IGNORE_SSL_CONFIG=false
-
-## Extra CA Trusts, for Certifiate Manager to build truststore with external CAs
-cm_trust_cas=VerisignG3_CA.cer;VerisignG4_CA.cer;VerisignG5_CA.cer
+++ /dev/null
-############################################################
-# Initial File for Generating
-# on 2016-10-26T06:56:19.905-0500
-# @copyright 2016, AT&T
-############################################################
-cm_url=https://<certificate manager host>:8150
-hostname=localhost
-cadi_x509_issuers=CN=ATT CADI Issuing CA - Test 01, OU=CSO, O=ATT, C=US
-#cadi_keyfile=keyfile
+++ /dev/null
-# lji: this startup file shadows the existing extry point startup.sh file of the container
-# because we need to pass in the cassandra cluster location
-
-LIB=/opt/app/aaf/authz-service/lib
-
-ETC=/opt/app/aaf/authz-service/etc
-DME2REG=/opt/dme2reg
-
-echo "this is LIB" $LIB
-echo "this is ETC" $ETC
-echo "this is DME2REG" $DME2REG
-
-CLASSPATH=$ETC
-for FILE in `find $LIB -name *.jar`; do
- CLASSPATH=$CLASSPATH:$FILE
-done
-
-FILEPATHS="/opt/app/aaf/authz-service/etc/com.osaaf.common.props /opt/app/aaf/authz-service/etc/com.osaaf.common.props"
-for FILEPATH in $FILEPATHS:
-do
- if [ -e ${FILEPATH} ]; then
- if [ -z `grep "cassandra.clusters=$CASSANDRA_CLUSTER" $FILEPATH` ]; then
- echo "cassandra.clusters=$CASSANDRA_CLUSTER" >> $FILEPATH;
- fi
- fi
-done
-
-
-java -classpath $CLASSPATH -DDME2_EP_REGISTRY_CLASS=DME2FS -DAFT_DME2_EP_REGISTRY_FS_DIR=$DME2REG org.onap.aaf.authz.service.AuthAPI
-
-# keet it running so we can check fs
-while sleep 2; do echo thinking; done
-
-
+++ /dev/null
-##\r
-## AUTHZ API (authz-service) Properties\r
-##\r
-#hostname=localhost\r
-hostname=0.0.0.0\r
-# Standard AFT for THIS box, and THIS box is in St Louis. Put your own LAT/LONG in here. Use "bing.com/maps" or \r
-# SWMTools (geoloc for DataCenters) to get YOURs\r
-\r
-AFT_LATITUDE=32.780140\r
-AFT_LONGITUDE=-96.800451\r
-AFT_ENVIRONMENT=AFTUAT\r
-DEPLOYED_VERSION=2.0.SAMPLE\r
-\r
-##DME2 related parameters\r
-DMEServiceName=service=org.onap.aaf.authz.AuthorizationService/version=2.0/envContext=DEV/routeOffer=BAU_SE\r
-\r
-#DME2 can limit Port Ranges with the following:\r
-AFT_DME2_PORT_RANGE=8101-8101,8100\r
-#DME2 picks any unused port in +1024 range\r
-#AFT_DME2_PORT=0\r
-AFT_DME2_ALLOW_PORT_CACHING=false\r
-\r
-\r
-# Point to "Common" files, used between all the AAF Services. ... \r
-\r
-\r
-\r
-#cadi_prop_files=com.osaaf.common.props;com.osaaf.props\r
-cadi_prop_files=opt/app/aaf/authz-service/etc/com.osaaf.common.props:opt/app/aaf/authz-service/etc/com.osaaf.props\r
-CACHE_HIGH_COUNT=40000\r
-CACHE_CLEAN_INTERVAL=60000\r
-\r
-\r
-\r
-\r
+++ /dev/null
-############################################################
-# Properties Written by Jonathan Gathman
-# on 2016-08-12T04:17:59.628-0500
-# These properties encapsulate the Verisign Public Certificates
-############################################################
-# DEVELOPER ONLY SETTING!!!!! DO NOT USE on ANY BOX other than your Developer box, and it
-# would be better if you got a Cert for that, and remove this! There is nothing stupider than
-# an unsecured Security Service.
-cadi_trust_all_x509=true
-
-# Public (i.e. Verisign) Key stores.
-# AFT_DME2_KEYSTORE=
-# AFT_DME2_KEYSTORE_PASSWORD=
-# AFT_DME2_KEY_PASSWORD=
-# cadi_truststore=
-# cadi_truststore_password=
-
-# Standard for this App/Machine
-aaf_env=DEV
-aaf_data_dir=opt/app/aaf/authz-service/etc/data
-cadi_loglevel=WARN
-aaf_id=<osaaf's Application Identity>
-aaf_password=enc:31-LFPNtP9Yl1DZKAz1rx8N8YfYVY8VKnnDr
-
-aaf_conn_timeout=6000
-aaf_timeout=10000
-aaf_user_expires=600000
-aaf_clean_interval=45000
-aaf_refresh_trigger_count=3
-aaf_high_count=30000
-
-# Basic Auth
-aaf_default_realm=openecomp.org
-#aaf_domain_support=.org
-basic_realm=openecomp.org
-basic_warn=false
-aaf_root_ns=org.openecomp
-localhost_deny=false
-
-
-# Cassandra
-# IP:Cass DataCenter:Latitude:Longitude,IP....
-cassandra.clusters=127.0.0.1
-cassandra.clusters.port=9042
-cassandra.clusters.user=authz
-cassandra.clusters.password=authz
-## Exceptions from Cassandra which require resetting the Cassandra Connections
-cassandra.reset.exceptions=com.datastax.driver.core.exceptions.NoHostAvailableException:"no host was tried":"Connection has been closed"
-
-# Consistency Settings
-cassandra.writeConsistency.ns=LOCAL_QUORUM
-cassandra.writeConsistency.perm=LOCAL_QUORUM
-cassandra.writeConsistency.role=LOCAL_QUORUM
-cassandra.writeConsistency.user_role=LOCAL_QUORUM
-cassandra.writeConsistency.cred=LOCAL_QUORUM
-cassandra.writeConsistency.ns_attrib=LOCAL_QUORUM
-
-## Supported Plugin Organizational Units
-Organization.org=org.onap.aaf.osaaf.defOrg.DefaultOrg
-
-## Email Server settings for Def Organization.
-#Sender's email ID needs to be mentioned
-com.osaaf.mailFromUserId=mailid@bogus.com
-com.osaaf.supportEmail=support@bogus.com
-com.osaaf.mailHost=smtp.bogus.com
-
-# Standard AAF DME2 Props
-AFT_DME2_REMOVE_PERSISTENT_CACHE_ON_STARTUP=TRUE
-AFT_DME2_DISABLE_PERSISTENT_CACHE=TRUE
-AFT_DME2_DISABLE_PERSISTENT_CACHE_LOAD=TRUE
-
-## SSL OPTIONAL ONLY IN DEVELOPMENT PC/Local... WHATEVER YOU DO, don't use this on any box than your local PC
-AFT_DME2_SSL_ENABLE=false
-# for when you turn on SSL... Only TLSv1.1+ is secure as of 2016
-AFT_DME2_SSL_WANT_CLIENT_AUTH=TRUE
-AFT_DME2_SSL_INCLUDE_PROTOCOLS=TLSv1.1,TLSv1.2
-AFT_DME2_SSL_VALIDATE_CERTS=FALSE
-AFT_DME2_CLIENT_IGNORE_SSL_CONFIG=false
-
-## Extra CA Trusts, for Certifiate Manager to build truststore with external CAs
-cm_trust_cas=VerisignG3_CA.cer;VerisignG4_CA.cer;VerisignG5_CA.cer
+++ /dev/null
-############################################################
-# Initial File for Generating
-# on 2016-10-26T06:56:19.905-0500
-# @copyright 2016, AT&T
-############################################################
-cm_url=https://<certificate manager host>:8150
-hostname=localhost
-cadi_x509_issuers=CN=ATT CADI Issuing CA - Test 01, OU=CSO, O=ATT, C=US
-#cadi_keyfile=keyfile
+++ /dev/null
-##
-## AUTHZ API (authz-service) Properties
-##
-
-# Standard AFT for THIS box, and THIS box is in St Louis. Put your own LAT/LONG in here. Use "bing.com/maps" or
-# SWMTools (geoloc for DataCenters) to get YOURs
-
-AFT_LATITUDE=32.780140
-AFT_LONGITUDE=-96.800451
-AFT_ENVIRONMENT=AFTUAT
-DEPLOYED_VERSION=2.0.SAMPLE
-
-##DME2 related parameters
-DMEServiceName=service=com.att.authz.AuthorizationService/version=2.0/envContext=DEV/routeOffer=BAU_SE
-
-#DME2 can limit Port Ranges with the following:
-# AFT_DME2_PORT_RANGE=8101-8029,8100
-# Leaving both unset makes DME2 picks any unused port in +1024 range (Ephemeral)
-# AFT_DME2_PORT=0
-AFT_DME2_ALLOW_PORT_CACHING=false
-
-# Point to "Common" files, used between all the AAF Services. ...
-cadi_prop_files=../opt/app/aaf/common/com.osaaf.common.props;../opt/app/aaf/common/com.osaaf.props
-
-CACHE_HIGH_COUNT=40000
-CACHE_CLEAN_INTERVAL=60000
-
-
-
-
+++ /dev/null
-#-------------------------------------------------------------------------------\r
-# ============LICENSE_START====================================================\r
-# * org.onap.aaf\r
-# * ===========================================================================\r
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
-# * ===========================================================================\r
-# * Licensed under the Apache License, Version 2.0 (the "License");\r
-# * you may not use this file except in compliance with the License.\r
-# * You may obtain a copy of the License at\r
-# * \r
-# * http://www.apache.org/licenses/LICENSE-2.0\r
-# * \r
-# * Unless required by applicable law or agreed to in writing, software\r
-# * distributed under the License is distributed on an "AS IS" BASIS,\r
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
-# * See the License for the specific language governing permissions and\r
-# * limitations under the License.\r
-# * ============LICENSE_END====================================================\r
-# *\r
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
-# *\r
-#-------------------------------------------------------------------------------\r
-#\r
-# Licensed to the Apache Software Foundation (ASF) under one\r
-# or more contributor license agreements. See the NOTICE file\r
-# distributed with this work for additional information\r
-# regarding copyright ownership. The ASF licenses this file\r
-# to you under the Apache License, Version 2.0 (the\r
-# "License"); you may not use this file except in compliance\r
-# with the License. You may obtain a copy of the License at\r
-#\r
-# http://www.apache.org/licenses/LICENSE-2.0\r
-#\r
-# Unless required by applicable law or agreed to in writing,\r
-# software distributed under the License is distributed on an\r
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\r
-# KIND, either express or implied. See the License for the\r
-# specific language governing permissions and limitations\r
-# under the License.\r
-#\r
-log4j.appender.INIT=org.apache.log4j.DailyRollingFileAppender\r
-log4j.appender.INIT.File=logs/${LOG4J_FILENAME_init}\r
-log4j.appender.INIT.DatePattern='.'yyyy-MM-dd\r
-#log4j.appender.INIT.MaxFileSize=10000KB\r
-#log4j.appender.INIT.MaxBackupIndex=7\r
-log4j.appender.INIT.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.INIT.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSSZ} %m %n\r
-\r
-log4j.appender.SRVR=org.apache.log4j.DailyRollingFileAppender\r
-log4j.appender.SRVR.File=logs/${LOG4J_FILENAME_authz}\r
-log4j.appender.SRVR.DatePattern='.'yyyy-MM-dd\r
-#log4j.appender.SRVR.MaxFileSize=10000KB\r
-#log4j.appender.SRVR.MaxBackupIndex=7\r
-log4j.appender.SRVR.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.SRVR.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSSZ} %p [%c] %m %n\r
-\r
-log4j.appender.AUDIT=org.apache.log4j.DailyRollingFileAppender\r
-log4j.appender.AUDIT.File=logs/${LOG4J_FILENAME_audit}\r
-log4j.appender.AUDIT.DatePattern='.'yyyy-MM-dd\r
-#log4j.appender.AUDIT.MaxFileSize=10000KB\r
-#log4j.appender.AUDIT.MaxBackupIndex=7\r
-log4j.appender.AUDIT.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.AUDIT.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSSZ} %m %n\r
-\r
-log4j.appender.TRACE=org.apache.log4j.DailyRollingFileAppender\r
-log4j.appender.TRACE.File=logs/${LOG4J_FILENAME_trace}\r
-log4j.appender.TRACE.DatePattern='.'yyyy-MM-dd\r
-log4j.appender.TRACE.MaxFileSize=10000KB\r
-log4j.appender.TRACE.MaxBackupIndex=7\r
-log4j.appender.TRACE.layout=org.apache.log4j.PatternLayout \r
-log4j.appender.TRACE.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSSZ} %m %n\r
-\r
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender\r
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\r
-log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSSZ} %p [%c] %m %n\r
-\r
-# General Apache libraries\r
-log4j.rootLogger=WARN\r
-log4j.logger.org.apache=WARN,INIT\r
-log4j.logger.dme2=WARN,INIT\r
-log4j.logger.init=WARN,stdout,INIT\r
-log4j.logger.authz=WARN,stdout,SRVR\r
-log4j.logger.audit=WARN,AUDIT\r
-log4j.logger.trace=TRACE,TRACE\r
-\r
+++ /dev/null
-#!/bin/sh\r
-##############################################################################\r
-# - Copyright 2012, 2016 AT&T Intellectual Properties\r
-##############################################################################
-umask 022\r
-ROOT_DIR=${INSTALL_ROOT}/${distFilesRootDirPath}\r
-\r
-# Grab the IID of all resources running under the name and same version(s) we're working on and stop those instances\r
-${LRM_HOME}/bin/lrmcli -running | \\r
- grep ${artifactId} | \\r
- grep ${version} | \\r
- cut -f1 | \\r
-while read _iid\r
-do\r
- if [ -n "${_iid}" ]; then\r
- ${LRM_HOME}/bin/lrmcli -shutdown -iid ${_iid} | grep SUCCESS\r
- if [ $? -ne 0 ]; then\r
- echo "$LRMID-{_iid} Shutdown failed"\r
- fi\r
- fi\r
-done\r
- \r
-# Grab the resources configured under the name and same version we're working on and delete those instances\r
-${LRM_HOME}/bin/lrmcli -configured | \\r
- grep ${artifactId} | \\r
- grep ${version} | \\r
- cut -f1,2,3 | \\r
-while read _name _version _routeoffer\r
-do\r
- if [ -n "${_name}" ]; then\r
- ${LRM_HOME}/bin/lrmcli -delete -name ${_name} -version ${_version} -routeoffer ${_routeoffer} | grep SUCCESS\r
- if [ $? -ne 0 ]; then\r
- echo "${_version} Delete failed"\r
- fi\r
- fi\r
-done \r
-\r
-rm -rf ${ROOT_DIR}\r
-\r
-exit 0\r
+++ /dev/null
-#!/bin/sh
-##############################################################################
-# AAF Installs
-# - Copyright 2015, 2016 AT&T Intellectual Properties
-##############################################################################
-umask 022
-ROOT_DIR=${INSTALL_ROOT}${distFilesRootDirPath}
-COMMON_DIR=${INSTALL_ROOT}${distFilesRootDirPath}/../../common
-LRM_XML=${ROOT_DIR}/etc/lrm-${artifactId}.xml
-LOGGING_PROP_FILE=${ROOT_DIR}/etc/log4j.properties
-LOGGER_PROP_FILE=${ROOT_DIR}/etc/logging.props
-AAFLOGIN=${ROOT_DIR}/bin/aaflogin
-JAVA_HOME=/opt/java/jdk/jdk180
-JAVA=$JAVA_HOME/bin/java
-CADI_JAR=`ls $ROOT_DIR/lib/cadi-core*.jar`
-
-cd ${ROOT_DIR}
-
-mkdir -p logs || fail 1 "Error on creating the logs directory."
-mkdir -p back || fail 1 "Error on creating the back directory."
-chmod 777 back || fail 1 "Error on creating the back directory."
-
-#
-# Some Functions that Vastly cleanup this install file...
-# You wouldn't believe how ugly it was before. Unreadable... JG
-#
-fail() {
- rc=$1
- shift;
- echo "ERROR: $@"
- exit $rc
-}
-
-#
-# Set the "SED" replacement for this Variable. Error if missing
-# Note that Variable in the Template is surrounded by "_" i.e. _ROOT_DIR_
-# Replacement Name
-# Value
-#
-required() {
- if [ -z "$2" ]; then
- ERRS+="\n\t$1 must be set for this installation"
- fi
- SED_E+=" -e s|$1|$2|g"
-}
-
-#
-# Set the "SED" replacement for this Variable. Use Default (3rd parm) if missing
-# Note that Variable in the Template is surrounded by "_" i.e. _ROOT_DIR_
-# Replacement Name
-# Value
-# Default Value
-#
-default() {
- if [ -z "$2" ]; then
- SED_E+=" -e s|$1|$3|g"
- else
- SED_E+=" -e s|$1|$2|g"
- fi
-}
-
-#
-# Password behavior:
-# For each Password passed in:
-# If Password starts with "enc:???", then replace it as is
-# If not, then check for CADI_KEYFILE... see next
-# If the CADI_KEYFILE is set, the utilize this as the CADI Keyfile
-# If it does not exist, create it, and change to "0400" mode
-# Utilize the Java and "cadi-core" found in Library to
-# Encrypt Password with Keyfile, prepending "enc:???"
-#
-passwd() {
- #
- # Test if var exists, and is required
- #
- if [ "${!1}" = "" ]; then
- if [ "${2}" = "required" ]; then
- ERRS+="\n\t$1 must be set for this installation"
- fi
- else
- #
- # Test if needs encrypting
- #
- if [[ ${!1} = enc:* ]]; then
- SED_E+=" -e s|_${1}_|${!1}|g"
- else
- if [ "${CADI_KEYFILE}" != "" ] && [ -e "${CADI_JAR}" ]; then
- #
- # Create or use Keyfile listed in CADI_KEYFILE
- #
- if [ -e "${CADI_KEYFILE}" ]; then
- if [ "$REPORTED_CADI_KEYFILE" = "" ]; then
- echo "Using existing CADI KEYFILE (${CADI_KEYFILE})"
- REPORTED_CADI_KEYFILE=true
- fi
- else
- echo "Creating CADI_KEYFILE (${CADI_KEYFILE})"
- $JAVA -jar $CADI_JAR keygen ${CADI_KEYFILE}
- chmod 0400 ${CADI_KEYFILE}
- fi
-
- PASS=`$JAVA -jar $CADI_JAR digest ${!1} ${CADI_KEYFILE}`
- SED_E+=" -e s|_${1}_|enc:$PASS|g"
- else
- if [ "$REPORTED_CADI_KEYFILE" = "" ]; then
- if [ "${CADI_KEYFILE}" = "" ]; then
- ERRS+="\n\tCADI_KEYFILE must be set for this installation"
- fi
- if [ ! -e "${CADI_JAR}" ]; then
- ERRS+="\n\t${CADI_JAR} must exist to deploy passwords"
- fi
- REPORTED_CADI_KEYFILE=true
- fi
- fi
- fi
- fi
-}
-
-# Linux requires this. Mac blows with it. Who knows if Windoze even does SED
-if [ -z "$SED_OPTS" ]; then
- SED_E+=" -c "
-else
- SED_E+=$SED_OPTS;
-fi
-
-#
-# Use "default" function if there is a property that isn't required, but can be defaulted
-# use "required" function if the property must be set by the environment
-#
- required _ROOT_DIR_ ${ROOT_DIR}
- default _COMMON_DIR_ ${AUTHZ_COMMON_DIR} ${COMMON_DIR}
- required _JAVA_HOME_ ${JAVA_HOME}
- required _SCLD_PLATFORM_ ${SCLD_PLATFORM}
- required _HOSTNAME_ ${TARGET_HOSTNAME_FQ}
- required _ARTIFACT_ID_ ${artifactId}
- default _ARTIFACT_VERSION_ ${AFTSWM_ACTION_NEW_VERSION}
- default _RESOURCE_REGISTRATION_ ${RESOURCE_REGISTRATION} true
- default _AUTHZ_DATA_DIR_ ${AUTHZ_DATA_DIR} ${ROOT_DIR}/../../data
- default _CM_URL_ ${CM_URL} ""
-
- # Specifics for Service
- if [ "${artifactId}" = "authz-service" ]; then
- PROPERTIES_FILE=${ROOT_DIR}/etc/authAPI.props
- default _RESOURCE_MIN_COUNT_ ${RESOURCE_MIN_COUNT} 1
- default _RESOURCE_MAX_COUNT_ ${RESOURCE_MAX_COUNT} 5
- required _AUTHZ_SERVICE_PORT_RANGE_ ${AUTHZ_SERVICE_PORT_RANGE}
-
- elif [ "${artifactId}" = "authz-gui" ]; then
- PROPERTIES_FILE=${ROOT_DIR}/etc/authGUI.props
- required _AUTHZ_GUI_PORT_RANGE_ ${AUTHZ_GUI_PORT_RANGE}
- default _RESOURCE_MIN_COUNT_ ${RESOURCE_MIN_COUNT} 1
- default _RESOURCE_MAX_COUNT_ ${RESOURCE_MAX_COUNT} 2
-
- elif [ "${artifactId}" = "authz-gw" ]; then
- PROPERTIES_FILE=${ROOT_DIR}/etc/authGW.props
- default _AUTHZ_GW_PORT_RANGE_ ${AUTHZ_GW_PORT_RANGE} 8095-8095
- default _RESOURCE_MIN_COUNT_ 1
- default _RESOURCE_MAX_COUNT_ 1
-
- elif [ "${artifactId}" = "authz-fs" ]; then
- PROPERTIES_FILE=${ROOT_DIR}/etc/FileServer.props
- OTHER_FILES=${ROOT_DIR}/data/test.html
- default _AUTHZ_FS_PORT_RANGE_ ${AUTHZ_FS_PORT_RANGE} 8096-8096
- default _RESOURCE_MIN_COUNT_ 1
- default _RESOURCE_MAX_COUNT_ 1
-
- elif [ "${artifactId}" = "authz-certman" ]; then
- PROPERTIES_FILE=${ROOT_DIR}/etc/certman.props
- default _AUTHZ_CERTMAN_PORT_RANGE_ ${AUTHZ_CERTMAN_PORT_RANGE} 8150-8159
- default _RESOURCE_MIN_COUNT_ 1
- default _RESOURCE_MAX_COUNT_ 1
- elif [ "${artifactId}" = "authz-batch" ]; then
- PROPERTIES_FILE=${ROOT_DIR}/etc/authBatch.props
- cd /
- OTHER_FILES=`find ${ROOT_DIR}/bin -depth -type f`
- cd -
- default _RESOURCE_MIN_COUNT_ 1
- default _RESOURCE_MAX_COUNT_ 1
- required _AUTHZ_GUI_URL_ ${AUTHZ_GUI_URL}
- else
- PROPERTIES_FILE=NONE
- fi
-
- if [ "${DME2_FS}" != "" ]; then
- SED_E+=" -e s|_DME2_FS_|-DDME2_EP_REGISTRY_CLASS=DME2FS\$\{AAF_SPACE\}-DAFT_DME2_EP_REGISTRY_FS_DIR=${DME2_FS}|g"
- else
- SED_E+=" -e s|_DME2_FS_||g"
- fi
-
-
- default _EMAIL_FROM_ ${EMAIL_FROM} authz@ems.att.com
- default _EMAIL_HOST_ ${EMAIL_HOST} mailhost.att.com
- default _ROUTE_OFFER_ ${ROUTE_OFFER} BAU_SE
- default _DME_TIMEOUT_ ${DME_TIMEOUT} 3000
-
- # Choose defaults for log level and logfile size
- if [ "${SCLD_PLATFORM}" = "PROD" ]; then
- LOG4J_LEVEL=WARN
- fi
-
- default _AFT_ENVIRONMENT_ ${AFT_ENVIRONMENT} AFTUAT
- default _ENV_CONTEXT_ ${ENV_CONTEXT} DEV
- default _LOG4J_LEVEL_ ${LOG4J_LEVEL} WARN
- default _LOG4J_SIZE_ ${LOG4J_SIZE} 10000KB
- default _LOG_DIR_ ${LOG_DIR} ${ROOT_DIR}/logs
- default _MAX_LOG_FILE_SIZE_ ${MAX_LOG_FILE_SIZE} 10000KB
- default _MAX_LOG_FILE_BACKUP_COUNT_ ${MAX_LOG_FILE_BACKUP_COUNT} 7
-
- if [ "${artifactId}" != "authz-batch" ]; then
- required _LRM_XML_ ${LRM_XML}
- fi
- required _AFT_LATITUDE_ ${LATITUDE}
- required _AFT_LONGITUDE_ ${LONGITUDE}
- required _HOSTNAME_ ${HOSTNAME}
-
- required _PROPERTIES_FILE_ ${PROPERTIES_FILE}
- required _LOGGING_PROP_FILE_ ${LOGGING_PROP_FILE}
-
- # Divide up Version
- default _MAJOR_VER_ "`expr ${version} : '\([0-9]*\)\..*'`"
- default _MINOR_VER_ "`expr ${version} : '[0-9]*\.\([0-9]*\)\..*'`"
- default _PATCH_VER_ "`expr ${version} : '[0-9]\.[0-9]*\.\(.*\)'`"
-
-# Now Fail if Required items are not set...
-# Report all of them at once!
-if [ "${ERRS}" != "" ] ; then
- fail 1 "${ERRS}"
-fi
-
-#echo ${SED_E}
-
-for i in ${PROPERTIES_FILE} ${LRM_XML} ${LOGGING_PROP_FILE} ${AAFLOGIN} ${OTHER_FILES} ; do
- if [ -r ${i} ]; then
- if [ -w ${i} ]; then
-# echo ${i}
- sed ${SED_E} -i'.sed' ${i} || fail 8 "could not sed ${i} "
- mv -f ${i}.sed ${ROOT_DIR}/back
- fi
- fi
-done
-
-#
-# Add the resource to LRM using the newly created/substituted XML file.
-#
-if [ -r ${LRM_XML} ]; then
- ${LRM_HOME}/bin/lrmcli -addOrUpgrade -file ${LRM_XML} || fail 1 "Add to LRM Failed"
- ${LRM_HOME}/bin/lrmcli -start -name com.att.authz.${artifactId} -version ${version} -routeoffer ${ROUTE_OFFER} | grep SUCCESS
-fi
-
-
-# Note: Must exit 0 or, it will be exit default 1 and fail
-exit 0
+++ /dev/null
-#!/bin/sh
-######################################################################
-# $RCSfile$ - $Revision$
-# Copyright 2012 AT&T Intellectual Property. All rights reserved.
-######################################################################
-#!/bin/sh
-exit 0
\ No newline at end of file
+++ /dev/null
-#!/bin/sh
-
-exec sh -x ../../common/deinstall.sh
+++ /dev/null
-<?xml version="1.0" encoding="utf-8"?>\r
-<!--\r
- ============LICENSE_START====================================================\r
- * org.onap.aaf\r
- * ===========================================================================\r
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * ===========================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- * \r
- * http://www.apache.org/licenses/LICENSE-2.0\r
- * \r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END====================================================\r
- *\r
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- *\r
--->\r
-<descriptor version="1" xmlns="http://aft.att.com/swm/descriptor">\r
- <platforms>\r
- <platform architecture="*" os="*" osVersions="*"/> \r
- </platforms>\r
- <paths>\r
- <path name="/opt/app/aaf" type="d" user="aft" group="aft" permissions="0755" recursive="false"/>\r
- <path name="/opt/app/aaf/${artifactId}" type="d" user="aft" group="aft" permissions="0755" recursive="false"/>\r
- <path name="/opt/app/aaf/${artifactId}/${version}" type="d" user="aft" group="aft" permissions="0755" recursive="true"/>\r
- </paths>\r
- <actions>\r
- <action type="INIT">\r
- <proc stage="PRE" user="aft" group="aft"/>\r
- <proc stage="POST" user="aft" group="aft"/>\r
- </action>\r
- <action type="INST">\r
- <proc stage="PRE" user="aft" group="aft"/>\r
- <proc stage="POST" user="aft" group="aft"/>\r
- </action>\r
- <action type="DINST">\r
- <proc stage="PRE" user="aft" group="aft"/>\r
- <proc stage="POST" user="aft" group="aft"/>\r
- </action>\r
- <action type="FALL">\r
- <proc stage="PRE" user="aft" group="aft"/>\r
- <proc stage="POST" user="aft" group="aft"/>\r
- </action>\r
- </actions>\r
-</descriptor>\r
+++ /dev/null
-#!/bin/sh\r
-######################################################################\r
-# $RCSfile$ - $Revision$\r
-# Copyright 2012 AT&T Intellectual Property. All rights reserved.\r
-######################################################################\r
-exec sh -x ../../common/install.sh
\ No newline at end of file
+++ /dev/null
-#!/bin/sh
-######################################################################
-# $RCSfile$ - $Revision$
-# Copyright 2012 AT&T Intellectual Property. All rights reserved.
-######################################################################
-exit 0
\ No newline at end of file
+++ /dev/null
-#!/bin/sh
-######################################################################
-# $RCSfile$ - $Revision$
-# Copyright 2012 AT&T Intellectual Property. All rights reserved.
-######################################################################
-exec sh -x ../../common/install.sh
+++ /dev/null
-#!/bin/sh
-######################################################################
-# $RCSfile$ - $Revision$
-# Copyright 2012 AT&T Intellectual Property. All rights reserved.
-######################################################################
-#!/bin/sh
-exit 0
\ No newline at end of file
+++ /dev/null
-#!/bin/sh
-######################################################################
-# $RCSfile$ - $Revision$
-# Copyright 2012 AT&T Intellectual Property. All rights reserved.
-######################################################################
-
-exec sh -x ../../common/install.sh
+++ /dev/null
-#!/bin/sh
-######################################################################
-# $RCSfile$ - $Revision$
-# Copyright 2012 AT&T Intellectual Property. All rights reserved.
-######################################################################
-
-exit 0
+++ /dev/null
-#-------------------------------------------------------------------------------\r
-# ============LICENSE_START====================================================\r
-# * org.onap.aaf\r
-# * ===========================================================================\r
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
-# * ===========================================================================\r
-# * Licensed under the Apache License, Version 2.0 (the "License");\r
-# * you may not use this file except in compliance with the License.\r
-# * You may obtain a copy of the License at\r
-# * \r
-# * http://www.apache.org/licenses/LICENSE-2.0\r
-# * \r
-# * Unless required by applicable law or agreed to in writing, software\r
-# * distributed under the License is distributed on an "AS IS" BASIS,\r
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
-# * See the License for the specific language governing permissions and\r
-# * limitations under the License.\r
-# * ============LICENSE_END====================================================\r
-# *\r
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
-# *\r
-#-------------------------------------------------------------------------------\r
-The following two commands can be used to create and approve a SWM installation package.\r
-\r
-These steps assume:\r
- 1. The component has been added in SWM\r
- 2. The java6 directory resides, by itself, under the directory '${artifactId}-${version}'\r
- 3. The SWM client is executed from the same directory containing '${artifactId}-${version}'\r
-\r
-\r
- attuid@swmcli- --> component pkgcreate -c ${groupId}:${artifactId}:${version} -d ${artifactId}-${version}\r
- attuid@swmcli- --> component pkgapprove -c ${groupId}:${artifactId}:${version}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.cadi;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import java.security.Principal;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.cadi.DirectAAFLur;\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-\r
-import org.onap.aaf.cadi.Permission;\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_DirectAAFLur {\r
- \r
-public static AuthzEnv env;\r
-public static Question question;\r
-public DirectAAFLur directAAFLur;\r
-\r
-\r
-\r
- @Before\r
- public void setUp()\r
- {\r
- directAAFLur = new DirectAAFLur(env, question); \r
- }\r
- \r
- @Test\r
- public void testFish()\r
- {\r
- \r
- Principal bait = null;\r
- Permission pond=null;\r
- directAAFLur.fish(bait, pond); \r
- \r
- assertTrue(true);\r
- \r
- }\r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.cadi;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-\r
-import org.onap.aaf.cadi.CredVal.Type;\r
-\r
-import static org.mockito.Mockito.*;\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.mockito.runners.MockitoJUnitRunner;\r
-import org.onap.aaf.authz.cadi.DirectAAFUserPass;\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-import org.powermock.core.classloader.annotations.PrepareForTest;\r
-import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-\r
-\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_DirectAAFUserPass {\r
- \r
-//public static AuthzEnv env;\r
-//public static Question question;\r
-public static String string;\r
-public DirectAAFUserPass directAAFUserPass;\r
-\r
-@Mock\r
-AuthzEnv env;\r
-Question question;\r
-String user;\r
-Type type; \r
-byte[] pass;\r
- @Before\r
- public void setUp() {\r
- directAAFUserPass = new DirectAAFUserPass(env, question, string);\r
- }\r
- \r
- @Test\r
- public void testvalidate(){\r
-\r
-// Boolean bolVal = directAAFUserPass.validate(user, type, pass);\r
- // assertEquals((bolVal==null),true);\r
-\r
- assertTrue(true);\r
- \r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.cadi;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import java.security.Principal;\r
-import java.security.cert.CertificateException;\r
-import java.security.cert.X509Certificate;\r
-\r
-import javax.servlet.http.HttpServletRequest;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.cadi.DirectCertIdentity;\r
-import org.onap.aaf.dao.aaf.cached.CachedCertDAO;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_DirectCertIdentity {\r
- \r
- public DirectCertIdentity directCertIdentity;\r
- \r
- @Before\r
- public void setUp(){\r
- directCertIdentity = new DirectCertIdentity();\r
- }\r
-\r
-\r
- @Mock\r
- HttpServletRequest req;\r
- X509Certificate cert;\r
- byte[] _certBytes;\r
- \r
- @Test\r
- public void testidentity(){\r
- \r
- try {\r
- Principal p = directCertIdentity.identity(req, cert, _certBytes);\r
- assertEquals(( (p) == null),true);\r
- \r
- } catch (CertificateException e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- }\r
- //assertTrue(true);\r
- \r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import java.util.Properties;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.cadi.DirectAAFUserPass;\r
-import org.onap.aaf.authz.env.AuthzEnv;\r
-import org.onap.aaf.authz.facade.AuthzFacade_2_0;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.dao.aaf.hl.Question;\r
-\r
-public class JU_AuthAPI {\r
- \r
- public AuthAPI authAPI;\r
- AuthzEnv env;\r
- private static final String ORGANIZATION = "Organization.";\r
- private static final String DOMAIN = "openecomp.org";\r
-\r
- public Question question;\r
- private AuthzFacade_2_0 facade;\r
- private AuthzFacade_2_0 facade_XML;\r
- private DirectAAFUserPass directAAFUserPass;\r
- public Properties props;\r
- @Before\r
- public void setUp(){\r
- try {\r
- authAPI = new AuthAPI(env);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- }\r
- }\r
- \r
- @Test\r
- public void testStartDME2(Properties props){\r
- try {\r
- authAPI.startDME2(props);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- }\r
- \r
- //assertTrue(true);\r
- \r
- }\r
-\r
-\r
- \r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.api.API_Api;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_API_Api {\r
- API_Api api_Api;\r
- @Mock\r
- AuthAPI authzAPI;\r
- AuthzFacade facade;\r
- \r
- @Before\r
- public void setUp(){\r
- //api_Api = new API_Api();\r
- }\r
-\r
-\r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testInit()\r
- {\r
- try {\r
- api_Api.init(authzAPI, facade);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- }\r
- assertTrue(true);\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.api.API_Approval;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_API_Approval {\r
- API_Approval api_Approval;\r
- \r
- @Mock\r
- AuthAPI authzAPI;\r
- AuthzFacade facade;\r
- \r
- @Before\r
- public void setUp()\r
- {\r
- \r
- }\r
-\r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testInit() {\r
- \r
- try {\r
- api_Approval.init(authzAPI, facade);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- }\r
- //assertTrue(true);\r
- }\r
-\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.cadi.DirectAAFUserPass;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.api.API_Creds;\r
-\r
-import org.onap.aaf.inno.env.Env;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_API_Creds {\r
-\r
-API_Creds api_Creds;\r
-@Mock\r
-AuthAPI authzAPI;\r
-AuthzFacade facade;\r
-Env env;\r
-DirectAAFUserPass directAAFUserPass;\r
- @Before\r
- public void setUp(){\r
- \r
- }\r
- \r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testInit(){ \r
- try {\r
- api_Creds.init(authzAPI, facade);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- } \r
- }\r
- \r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testTimeSensitiveInit(){\r
- \r
- try {\r
- api_Creds.timeSensitiveInit(env, authzAPI, facade, directAAFUserPass);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.api.API_Delegate;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_API_Delegate {\r
-API_Delegate api_Delegate;\r
-@Mock\r
-AuthAPI authzAPI;\r
-AuthzFacade facade;\r
- @Before\r
- public void setUp() {\r
- \r
- \r
- }\r
- \r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testInit(){\r
- \r
- try {\r
- api_Delegate.init(authzAPI, facade);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.api.API_History;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_API_History {\r
- API_History api_History;\r
- \r
- @Mock\r
- AuthAPI authzAPI;\r
- AuthzFacade facade;\r
- \r
- @Before\r
- public void setUp(){\r
- \r
- }\r
- \r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testInit(){\r
- \r
- try {\r
- api_History.init(authzAPI, facade);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- }\r
- assertTrue(true);\r
- }\r
-\r
-\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.api.API_Mgmt;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_API_Mgmt {\r
- API_Mgmt api_Mgmt;\r
- \r
- @Mock\r
- AuthAPI authzAPI;\r
- AuthzFacade facade;\r
- \r
- @Before\r
- public void setUp(){\r
- \r
- }\r
- \r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testInit(){\r
- \r
- try {\r
- api_Mgmt.init(authzAPI, facade);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.api.API_NS;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_API_NS {\r
- API_NS api_Ns;\r
- @Mock\r
- AuthAPI authzAPI;\r
- AuthzFacade facade;\r
-\r
- \r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testInit(){\r
- try {\r
- api_Ns.init(authzAPI, facade);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- }\r
- }\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.api.API_Perms;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_API_Perms {\r
- API_Perms api_Perms;\r
- @Mock\r
- AuthAPI authzAPI;\r
- AuthzFacade facade;\r
-\r
- @Before\r
- public void setUp(){\r
- \r
- }\r
- \r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testInit(){\r
- try {\r
- api_Perms.init(authzAPI, facade);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- }\r
- }\r
- \r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testTimeSensitiveInit(){\r
- try {\r
- api_Perms.timeSensitiveInit(authzAPI, facade);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.api.API_Roles;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_API_Roles {\r
- API_Roles api_Roles;\r
- @Mock\r
- AuthAPI authzAPI;\r
- AuthzFacade facade;\r
- \r
-\r
- @Before\r
- public void setUp() {\r
- assertTrue(true);\r
- }\r
- \r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testInit(){\r
- try {\r
- api_Roles.init(authzAPI, facade);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- } }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.api.API_User;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_API_User {\r
- API_User api_User;\r
- @Mock\r
- AuthAPI authzAPI;\r
- AuthzFacade facade;\r
-\r
- @Before\r
- public void setUp() {\r
- //assertTrue(true);\r
- }\r
- \r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testInit(){\r
- try {\r
- api_User.init(authzAPI, facade);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.api;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.mockito.Mock;\r
-import org.onap.aaf.authz.facade.AuthzFacade;\r
-import org.onap.aaf.authz.service.AuthAPI;\r
-import org.onap.aaf.authz.service.api.API_UserRole;\r
-import org.powermock.modules.junit4.PowerMockRunner;\r
-@RunWith(PowerMockRunner.class)\r
-public class JU_API_UserRole {\r
- API_UserRole api_UserRole;\r
- @Mock\r
- AuthAPI authzAPI;\r
- AuthzFacade facade;\r
-\r
- \r
- @SuppressWarnings("static-access")\r
- @Test\r
- public void testInit(){\r
- try {\r
- api_UserRole.init(authzAPI, facade);\r
- } catch (Exception e) {\r
- // TODO Auto-generated catch block\r
- e.printStackTrace();\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.mapper;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import org.junit.Test;\r
-\r
-public class JU_Mapper_2_0 {\r
-\r
- @Test\r
- public void test() {\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testApprovals(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testCert(){\r
- assertTrue(true);\r
- \r
- }\r
- \r
- @Test\r
- public void testCred(){\r
- assertTrue(true);\r
- \r
- }\r
- \r
- @Test\r
- public void testDelegate(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testErrorFromMessage(){\r
- assertTrue(true);\r
- \r
- }\r
- \r
- @Test\r
- public void testFuture(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testGetClass(){\r
- assertTrue(true);\r
- }\r
-\r
- @Test\r
- public void testGetExpires(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testGetMarshal(){\r
- assertTrue(true);\r
- \r
- }\r
- \r
- @Test\r
- public void testHistory(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testKeys(){\r
- assertTrue(true);\r
- \r
- }\r
- \r
- @Test\r
- public void testNewInstance(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testNs(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testNss(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testPerm(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testPermFromRPRequest(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testPermKey(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testPerms(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testRole(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testRoleFromRPRequest(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testRoles(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testUserRole(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testUserRoles(){\r
- assertTrue(true);\r
- }\r
- \r
- @Test\r
- public void testUsers(){\r
- assertTrue(true);\r
- }\r
- \r
- \r
- \r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.test;\r
-\r
-import static org.junit.Assert.assertEquals;\r
-import static org.junit.Assert.assertFalse;\r
-import static org.junit.Assert.assertTrue;\r
-\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.service.validation.Validator;\r
-\r
-public class JU_Validator {\r
-\r
-\r
- @Test\r
- public void test() {\r
- assertTrue(Validator.ACTION_CHARS.matcher("HowdyDoody").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("Howd?yDoody").matches());\r
- assertTrue(Validator.ACTION_CHARS.matcher("_HowdyDoody").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("HowdyDoody").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("Howd?yDoody").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("_HowdyDoody").matches());\r
-\r
- // \r
- assertTrue(Validator.ACTION_CHARS.matcher("*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("*").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(":*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":*").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(":*:*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":*:*").matches());\r
- \r
- assertFalse(Validator.ACTION_CHARS.matcher(":hello").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":hello").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("hello:").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("hello:d").matches());\r
-\r
- assertFalse(Validator.ACTION_CHARS.matcher(":hello:*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":hello:*").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(":hello:d*:*").matches());\r
- assertFalse(Validator.INST_CHARS.matcher(":hello:d*d:*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":hello:d*:*").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("HowdyDoody*").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("Howdy*Doody").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("HowdyDoody*").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("*HowdyDoody").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("*HowdyDoody").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(":h*").matches());\r
- assertFalse(Validator.INST_CHARS.matcher(":h*h*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":h*").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(":h:h*:*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":h:h*:*").matches());\r
- assertFalse(Validator.INST_CHARS.matcher(":h:h*h:*").matches());\r
- assertFalse(Validator.INST_CHARS.matcher(":h:h*h*:*").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(":h:*:*h").matches());\r
- assertFalse(Validator.INST_CHARS.matcher(":h:*:*h").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":com.test.*:ns:*").matches());\r
-\r
- \r
- assertFalse(Validator.ACTION_CHARS.matcher("1234+235gd").matches());\r
- assertTrue(Validator.ACTION_CHARS.matcher("1234-235gd").matches());\r
- assertTrue(Validator.ACTION_CHARS.matcher("1234-23_5gd").matches());\r
- assertTrue(Validator.ACTION_CHARS.matcher("1234-235g,d").matches());\r
- assertTrue(Validator.ACTION_CHARS.matcher("1234-235gd(Version12)").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("123#4-23@5g:d").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("123#4-23@5g:d").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("1234-23 5gd").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("1234-235gd ").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(" 1234-235gd").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(" ").matches());\r
-\r
- // Allow % and = (Needed for Escaping & Base64 usages) jg \r
- assertTrue(Validator.ACTION_CHARS.matcher("1234%235g=d").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(":1234%235g=d").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("1234%235g=d").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":1234%235g=d").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":1234%235g=d:%20==").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":1234%235g=d:==%20:=%23").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":1234%235g=d:*:=%23").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":1234%235g=d:==%20:*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":*:==%20:*").matches());\r
-\r
- // Allow / instead of : (more natural instance expression) jg \r
- assertFalse(Validator.INST_CHARS.matcher("1234/a").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("/1234/a").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("/1234/*/a/").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("/1234//a").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("1234/a").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("/1234/*/a/").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("1234//a").matches());\r
-\r
-\r
- assertFalse(Validator.INST_CHARS.matcher("1234+235gd").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("1234-235gd").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("1234-23_5gd").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("1234-235g,d").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("m1234@shb.dd.com").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("1234-235gd(Version12)").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("123#4-23@5g:d").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("123#4-23@5g:d").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("").matches());\r
-\r
- \r
- for( char c=0x20;c<0x7F;++c) {\r
- boolean b;\r
- switch(c) {\r
- case '?':\r
- case '|':\r
- case '*':\r
- continue; // test separately\r
- case '~':\r
- case ',':\r
- b = false;\r
- break;\r
- default:\r
- b=true;\r
- }\r
- }\r
- \r
- assertFalse(Validator.ID_CHARS.matcher("abc").matches());\r
- assertFalse(Validator.ID_CHARS.matcher("").matches());\r
- assertTrue(Validator.ID_CHARS.matcher("abc@att.com").matches());\r
- assertTrue(Validator.ID_CHARS.matcher("ab-me@att.com").matches());\r
- assertTrue(Validator.ID_CHARS.matcher("ab-me_.x@att._-com").matches());\r
- \r
- assertFalse(Validator.NAME_CHARS.matcher("ab-me_.x@att._-com").matches());\r
- assertTrue(Validator.NAME_CHARS.matcher("ab-me").matches());\r
- assertTrue(Validator.NAME_CHARS.matcher("ab-me_.xatt._-com").matches());\r
-\r
- \r
- // 7/22/2016\r
- assertTrue(Validator.INST_CHARS.matcher(\r
- "/!com.att.*/role/write").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(\r
- ":!com.att.*:role:write").matches());\r
-\r
- }\r
-\r
-}\r
+++ /dev/null
-/*******************************************************************************\r
- * ============LICENSE_START====================================================\r
- * * org.onap.aaf\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * * \r
- * * http://www.apache.org/licenses/LICENSE-2.0\r
- * * \r
- * * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-package org.onap.aaf.authz.service.validation;\r
-\r
-import static org.junit.Assert.assertFalse;\r
-import static org.junit.Assert.assertTrue;\r
-\r
-import java.util.HashSet;\r
-import java.util.Set;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.onap.aaf.authz.layer.Result;\r
-import org.onap.aaf.dao.aaf.cass.PermDAO;\r
-import org.onap.aaf.dao.aaf.cass.RoleDAO;\r
-\r
-public class JU_Validator {\r
-\r
- Validator validator;\r
-\r
- @Before\r
- public void setUp() {\r
- validator = new Validator();\r
- }\r
-\r
- @Test\r
- public void test() {\r
- assertTrue(Validator.ACTION_CHARS.matcher("HowdyDoody").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("Howd?yDoody").matches());\r
- assertTrue(Validator.ACTION_CHARS.matcher("_HowdyDoody").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("HowdyDoody").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("Howd?yDoody").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("_HowdyDoody").matches());\r
-\r
- //\r
- assertTrue(Validator.ACTION_CHARS.matcher("*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("*").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(":*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":*").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(":*:*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":*:*").matches());\r
-\r
- assertFalse(Validator.ACTION_CHARS.matcher(":hello").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":hello").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("hello:").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("hello:d").matches());\r
-\r
- assertFalse(Validator.ACTION_CHARS.matcher(":hello:*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":hello:*").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(":hello:d*:*").matches());\r
- assertFalse(Validator.INST_CHARS.matcher(":hello:d*d:*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":hello:d*:*").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("HowdyDoody*").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("Howdy*Doody").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("HowdyDoody*").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("*HowdyDoody").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("*HowdyDoody").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(":h*").matches());\r
- assertFalse(Validator.INST_CHARS.matcher(":h*h*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":h*").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(":h:h*:*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":h:h*:*").matches());\r
- assertFalse(Validator.INST_CHARS.matcher(":h:h*h:*").matches());\r
- assertFalse(Validator.INST_CHARS.matcher(":h:h*h*:*").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(":h:*:*h").matches());\r
- assertFalse(Validator.INST_CHARS.matcher(":h:*:*h").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":com.test.*:ns:*").matches());\r
-\r
- assertFalse(Validator.ACTION_CHARS.matcher("1234+235gd").matches());\r
- assertTrue(Validator.ACTION_CHARS.matcher("1234-235gd").matches());\r
- assertTrue(Validator.ACTION_CHARS.matcher("1234-23_5gd").matches());\r
- assertTrue(Validator.ACTION_CHARS.matcher("1234-235g,d").matches());\r
- assertTrue(Validator.ACTION_CHARS.matcher("1234-235gd(Version12)").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("123#4-23@5g:d").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("123#4-23@5g:d").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("1234-23 5gd").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("1234-235gd ").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(" 1234-235gd").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(" ").matches());\r
-\r
- // Allow % and = (Needed for Escaping & Base64 usages) jg\r
- assertTrue(Validator.ACTION_CHARS.matcher("1234%235g=d").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher(":1234%235g=d").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("1234%235g=d").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":1234%235g=d").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":1234%235g=d:%20==").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":1234%235g=d:==%20:=%23").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":1234%235g=d:*:=%23").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":1234%235g=d:==%20:*").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":*:==%20:*").matches());\r
-\r
- // Allow / instead of : (more natural instance expression) jg\r
- assertFalse(Validator.INST_CHARS.matcher("1234/a").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("/1234/a").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("/1234/*/a/").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("/1234//a").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("1234/a").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("/1234/*/a/").matches());\r
- assertFalse(Validator.ACTION_CHARS.matcher("1234//a").matches());\r
-\r
- assertFalse(Validator.INST_CHARS.matcher("1234+235gd").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("1234-235gd").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("1234-23_5gd").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("1234-235g,d").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("m1234@shb.dd.com").matches());\r
- assertTrue(Validator.INST_CHARS.matcher("1234-235gd(Version12)").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("123#4-23@5g:d").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("123#4-23@5g:d").matches());\r
- assertFalse(Validator.INST_CHARS.matcher("").matches());\r
-\r
- for (char c = 0x20; c < 0x7F; ++c) {\r
- boolean b;\r
- switch (c) {\r
- case '?':\r
- case '|':\r
- case '*':\r
- continue; // test separately\r
- case '~':\r
- case ',':\r
- b = false;\r
- break;\r
- default:\r
- b = true;\r
- }\r
- }\r
-\r
- assertFalse(Validator.ID_CHARS.matcher("abc").matches());\r
- assertFalse(Validator.ID_CHARS.matcher("").matches());\r
- assertTrue(Validator.ID_CHARS.matcher("abc@att.com").matches());\r
- assertTrue(Validator.ID_CHARS.matcher("ab-me@att.com").matches());\r
- assertTrue(Validator.ID_CHARS.matcher("ab-me_.x@att._-com").matches());\r
-\r
- assertFalse(Validator.NAME_CHARS.matcher("ab-me_.x@att._-com").matches());\r
- assertTrue(Validator.NAME_CHARS.matcher("ab-me").matches());\r
- assertTrue(Validator.NAME_CHARS.matcher("ab-me_.xatt._-com").matches());\r
-\r
- // 7/22/2016\r
- assertTrue(Validator.INST_CHARS.matcher("/!com.att.*/role/write").matches());\r
- assertTrue(Validator.INST_CHARS.matcher(":!com.att.*:role:write").matches());\r
-\r
- }\r
-\r
- @Test\r
- public void permNotOk() {\r
-\r
- Result<PermDAO.Data> rpd = Result.err(1, "ERR_Security");\r
-\r
- validator.perm(rpd);\r
- assertTrue(validator.errs().equals("ERR_Security\n"));\r
-\r
- }\r
-\r
- @Test\r
- public void permOkNull() {\r
-\r
- Result rpd = Result.ok();\r
-\r
- validator.perm(rpd);\r
- assertTrue(validator.errs().equals("Perm Data is null.\n"));\r
-\r
- }\r
-\r
- @Test\r
- public void roleOkNull() {\r
-\r
- Result rrd = Result.ok();\r
-\r
- validator.role(rrd);\r
- assertTrue(validator.errs().equals("Role Data is null.\n"));\r
- }\r
-\r
- @Test\r
- public void roleOk() {\r
- RoleDAO.Data to = new RoleDAO.Data();\r
- to.ns = "namespace";\r
- to.name = "name";\r
- to.description = "description";\r
- Set<String> permissions = new HashSet<String>();\r
- permissions.add("perm1");\r
- to.perms = permissions;\r
-\r
- Result<RoleDAO.Data> rrd = Result.ok(to);\r
-\r
- validator.role(rrd);\r
- assertTrue(\r
- validator.errs().equals("Perm [perm1] in Role [namespace.name] is not correctly separated with '|'\n"));\r
- }\r
-\r
- @Test\r
- public void roleNotOk() {\r
-\r
- Result rrd = Result.err(1, "ERR_Security");\r
-\r
- validator.role(rrd);\r
- assertTrue(validator.errs().equals("ERR_Security\n"));\r
- }\r
-\r
-}\r
+++ /dev/null
-
-LIB=/media/sf_Users/sg481n/AAF-DOC/authz/authz-service/target/opt/app/aaf/authz-service/lib
-
-ETC=/media/sf_Users/sg481n/AAF-DOC/authz/authz-service/target/opt/app/aaf/authz-service/etc
-DME2REG=/media/sf_Users/sg481n/AAF-DOC/authz/authz-service/target/opt/dme2reg
-
-echo "this is LIB" $LIB
-echo "this is ETC" $ETC
-echo "this is DME2REG" $DME2REG
-
-CLASSPATH=$ETC
-for FILE in `find $LIB -name *.jar`; do
- CLASSPATH=$CLASSPATH:$FILE
-done
-java -classpath $CLASSPATH -DDME2_EP_REGISTRY_CLASS=DME2FS -DAFT_DME2_EP_REGISTRY_FS_DIR=$DME2REG org.onap.aaf.authz.service.AuthAPI
-
-
-
-
-
-