Initial search service commit 05/3905/2
authorDaniel Silverthorn <daniel.silverthorn@amdocs.com>
Thu, 4 May 2017 17:08:13 +0000 (13:08 -0400)
committerDaniel Silverthorn <daniel.silverthorn@amdocs.com>
Tue, 9 May 2017 16:05:00 +0000 (12:05 -0400)
Changing common logging dep

Change-Id: I454697a9df0ee63f43d7b7d2a3818fe2d9b7bcf2
Signed-off-by: Daniel Silverthorn <daniel.silverthorn@amdocs.com>
138 files changed:
.gitreview [new file with mode: 0644]
License.txt [new file with mode: 0644]
README.md [new file with mode: 0644]
ajsc-shared-config/README.txt [new file with mode: 0644]
ajsc-shared-config/etc/basic-logback_root_logger_level_off.xml [new file with mode: 0644]
ajsc-shared-config/etc/logback.xml [new file with mode: 0644]
ajsc-shared-config/etc/spm2.jks [new file with mode: 0644]
antBuild/build.xml [new file with mode: 0644]
bundleconfig-local/README.txt [new file with mode: 0644]
bundleconfig-local/RELEASE_NOTES.txt [new file with mode: 0644]
bundleconfig-local/etc/appprops/PostProcessorInterceptors.properties [new file with mode: 0644]
bundleconfig-local/etc/appprops/PreProcessorInterceptors.properties [new file with mode: 0644]
bundleconfig-local/etc/appprops/app-intercepts.properties [new file with mode: 0644]
bundleconfig-local/etc/appprops/methodMapper.properties [new file with mode: 0644]
bundleconfig-local/etc/sysprops/sys-props.properties [new file with mode: 0644]
pom.xml [new file with mode: 0644]
services/README.txt [new file with mode: 0644]
src/main/ajsc/search-data-service_v1/search-data-service/v1/conf/jaxrsBeans.groovy [new file with mode: 0644]
src/main/ajsc/search-data-service_v1/search-data-service/v1/conf/searchBeans.groovy [new file with mode: 0644]
src/main/ajsc/search-data-service_v1/search-data-service/v1/docs/README.txt [new file with mode: 0644]
src/main/ajsc/search-data-service_v1/search-data-service/v1/lib/README.txt [new file with mode: 0644]
src/main/ajsc/search-data-service_v1/search-data-service/v1/props/module.props [new file with mode: 0644]
src/main/ajsc/search-data-service_v1/search-data-service/v1/routes/errorMessage.route [new file with mode: 0644]
src/main/ajsc/search-data-service_v1/search-data-service/v1/routes/jaxrsExample.route [new file with mode: 0644]
src/main/ajsc/search-data-service_v1/search-data-service/v1/routes/searchEngine.route [new file with mode: 0644]
src/main/assemble/ajsc_module_assembly.xml [new file with mode: 0644]
src/main/assemble/ajsc_props_assembly.xml [new file with mode: 0644]
src/main/assemble/ajsc_runtime_assembly.xml [new file with mode: 0644]
src/main/bin/start.sh [new file with mode: 0644]
src/main/config/ajsc-chef.jks [new file with mode: 0644]
src/main/config/ajsc-jetty.xml [new file with mode: 0644]
src/main/config/ajsc-override-web.xml [new file with mode: 0644]
src/main/config/ajscJetty.jks [new file with mode: 0644]
src/main/config/jul-redirect.properties [new file with mode: 0644]
src/main/config/keyfile [new file with mode: 0644]
src/main/config/runner-web.xml [new file with mode: 0644]
src/main/docker/Dockerfile [new file with mode: 0644]
src/main/java/org/openecomp/sa/auth/SearchDbServiceAuth.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/auth/SearchDbServiceAuthCore.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/rest/AnalysisConfiguration.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/rest/AnalyzerApi.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/rest/ApiUtils.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/rest/BulkApi.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/rest/BulkMetaData.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/rest/BulkOperation.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/rest/BulkRequest.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/rest/Document.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/rest/DocumentApi.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/rest/IndexApi.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/rest/SearchServiceApi.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/JaxrsEchoService.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/JaxrsUserService.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/config/ElasticSearchConfig.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreDataEntity.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreDataEntityImpl.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreInterface.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchBulkOperationResult.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchCause.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchError.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchHttpController.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchOperationStatus.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchResultItem.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchShardStatus.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/exception/DocumentStoreOperationException.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationBucket.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationResult.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationResults.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/entity/Document.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/entity/DocumentOperationResult.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/entity/ErrorResult.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/entity/OperationResult.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchHit.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchHits.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchOperationResult.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/logging/SearchDbMsgs.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/AbstractAggregation.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Aggregation.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationStatement.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateHistogramAggregation.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRange.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeAggregation.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Filter.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/GroupByAggregation.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/ParsedQuery.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Query.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/QueryStatement.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/RangeQuery.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/SearchStatement.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Sort.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/TermQuery.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/service/SearchService.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/util/AggregationParsingUtil.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/util/DocumentSchemaUtil.java [new file with mode: 0644]
src/main/java/org/openecomp/sa/searchdbabstraction/util/SearchDbConstants.java [new file with mode: 0644]
src/main/resources/json/schema/analyzer.schema.json [new file with mode: 0644]
src/main/resources/json/schema/document-field.schema.json [new file with mode: 0644]
src/main/resources/json/schema/document.schema.json [new file with mode: 0644]
src/main/resources/json/schema/filter.schema.json [new file with mode: 0644]
src/main/resources/logging/SearchDbMsgs.properties [new file with mode: 0644]
src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context [new file with mode: 0644]
src/main/runtime/context/default#0.context [new file with mode: 0644]
src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json [new file with mode: 0644]
src/main/runtime/shiroRole/ajscadmin.json [new file with mode: 0644]
src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json [new file with mode: 0644]
src/main/runtime/shiroRole/contextadmin#default.json [new file with mode: 0644]
src/main/runtime/shiroUser/ajsc.json [new file with mode: 0644]
src/main/runtime/shiroUserRole/ajsc#ajscadmin.json [new file with mode: 0644]
src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json [new file with mode: 0644]
src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json [new file with mode: 0644]
src/test/java/org/openecomp/sa/rest/ApiUtilsTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/rest/BulkApiTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/rest/DocumentApiTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/rest/DocumentSchemaTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/rest/IndexApiTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/rest/SearchServiceApiHarness.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/rest/StubEsController.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/rest/TestUtils.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/AggregationResponseParsingTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchHttpControllerTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationStatementTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateHistogramAggregationTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeAggregationTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/FilterTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/GroupByAggregationTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/QueryTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/SearchStatementTest.java [new file with mode: 0644]
src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/SortTest.java [new file with mode: 0644]
src/test/resources/json/analysis-config.json [new file with mode: 0644]
src/test/resources/json/bulk-ops-invalid.json [new file with mode: 0644]
src/test/resources/json/bulk-ops-valid.json [new file with mode: 0644]
src/test/resources/json/filter-config.json [new file with mode: 0644]
src/test/resources/json/nested-document.json [new file with mode: 0644]
src/test/resources/json/queries/query-with-subrange.json [new file with mode: 0644]
src/test/resources/json/queries/simple-parsed-query.json [new file with mode: 0644]
src/test/resources/json/simpleDocument.json [new file with mode: 0644]
src/test/resources/json/tier-support-document.json [new file with mode: 0644]

diff --git a/.gitreview b/.gitreview
new file mode 100644 (file)
index 0000000..767d5bf
--- /dev/null
@@ -0,0 +1,4 @@
+[gerrit]
+host=gerrit.onap.org
+port=29418
+project=aai/search-data-service
diff --git a/License.txt b/License.txt
new file mode 100644 (file)
index 0000000..e3a22bf
--- /dev/null
@@ -0,0 +1,22 @@
+============LICENSE_START=======================================================
+Search Data Service
+================================================================================
+Copyright © 2017 AT&T Intellectual Property.
+Copyright © 2017 Amdocs
+All rights reserved.
+================================================================================
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License ati
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+============LICENSE_END=========================================================
+
+ECOMP and OpenECOMP are trademarks
+and service marks of AT&T Intellectual Property.
diff --git a/README.md b/README.md
new file mode 100644 (file)
index 0000000..97831cc
--- /dev/null
+++ b/README.md
@@ -0,0 +1,273 @@
+# Search Engine Micro Service
+
+The _Search Engine_ micro service exposes APIs via REST which allow clients to interact with the search database back end without requiring direct knowledge of or interaction with the underlying technology.
+## High Level Concepts
+This section establishes some of the terminology and concepts that relate to interacting with the _Search Engine_ service.
+A much more detailed examination of these concepts can be found on the  [Search Engine Design Share](http://d2athenaconf:8090/confluence/display/AAI/AAI-4633%3A+Search+DB+Abstraction%3A+Expose+REST+Interface) Confluence page.
+
+### Documents
+_Documents_ are the _things_ that we want to put into our document store.  At its most basic, a _document_ is a collection of _fields_ which contain the data that we want to be able to store and query.
+
+_Fields_ are defined as having a name, a type, and optional parameters indicating whether or not the field is intended to be searchable, and if so, how it should be indexed.
+
+### Indexes
+An _index_ is essentially a collection of _documents_.  It is the top-level container into which we will store our documents.  A single data store may have multiple _indexes_ for the purposes of segregating different types of data (most queries are performed across all documents within  *single* instance).
+
+---
+## Getting Started
+
+### Building The Micro Service
+
+After checking out the project, execute the following Maven command from the project's top level directory:
+
+    > mvn clean install
+    
+### Running The Micro Service Locally
+To run the microservice in your local environment, execute the following Maven command from the project's top level directory:
+
+    > mvn -P runAjsc
+
+### Running The Micro Service Within An Eclipse Environment
+It is often extremely useful to be able to run a micro service from within Eclipse in order to set breakpoints and perform general debugging activities.
+
+For a good reference on how to launch any of the D2 micro services from within an Eclipse environment, refer to the following Confluence page: [Running An AJSC Container Within Eclipse](http://d2athenaconf:8090/confluence/pages/viewpage.action?pageId=1840887#DevelopingMicroserviceswithAT&T-RunninganAJSCContainerwithinEclipse)
+
+---
+
+## Public Interfaces
+
+### Echo Service
+The _Search Database Abstraction_ micro service supports the standard echo service to allow it to be 'pinged' to verify that the service is up and responding.
+
+The echo service is reachable via the following REST end point:
+
+    http://{host}:9509/services/search-data-service/v1/jaxrsExample/jaxrs-services/echo/{input}
+
+### Indexes
+The _Search Engine_ service supports simple creation and deletion of document indexes via the following REST API calls:
+
+##### Create Index
+    Method         : POST
+    URL            : https://<host>:9509/services/search-data-service/v1/search/indexes/<index>/
+    URL Params     : index - The name of the index to be created.
+    Request Payload:
+        A document structure expressed as json.
+        
+    Response Payload:
+        {"url": "< resource location of the index >"
+
+##### Delete Index
+    Method         : DELETE
+    URL            : http://<host>:9509/services/search-data-service/v1/search/indexes/<index>/
+    URL Params     : index - The name of the index to be deleted.
+    Request Payload:
+        None    
+        
+   
+### Documents
+##### Create Document Without Specifying a Document Identifier
+Documents can be created via a POST request with no document identifier specified.  In this case the document store will generate an identifier to associate with the document.
+
+    Method         : POST
+    URL            : https://<host>:9509/services/search-data-service/v1/search/indexes/<index>/documents/
+    URL Params     : index       - The name of the index to create the document in.
+    Request Payload:
+        Document contents expressed as a JSON object containing key/value pairs.
+        
+    Response Payload:
+        { "etag": "string", "url": "string" }
+        
+##### Create or Update Document With a Specified Document Identifier
+Documents can also be created via a PUT request which includes an identifier to associate with the document.  The put endpoint is actually used for both creates and updates, where this is distinguished as follows:
+* If the request header DOES NOT include a value in the If-Match field, then the request is assumed to be a document create.
+* If the request header DOES contain a value in the If-Match field, then the request is assumed to be a document update.
+
+    Method         : PUT
+    URL            : https://<host>:9509/services/search-data-service/v1/search/indexes/<index>/documents<document id>
+    URL Params     : index       - The name of the index to create or update the document in.
+                     document id - The identifier of the document to be created or updated.
+    Request Payload:
+        Document contents expressed as a JSON object containing key/value pairs.
+        
+    Response Payload:
+        { "etag": "string", "url": "string"}
+        
+##### Delete a Document
+
+    Method:        : DELETE
+    URL            : https://<host>:9509/services/search-data-service/v1/search/indexes/<index>/documents<document id>
+    URL Params     : index       - The name of the index to remove the document from.
+                     document id - the identifier of the document to be deleted.
+    Request Payload:
+        None.
+        
+##### Retrieve a Document
+
+    Method:        : GET
+    URL            : https://<host>:9509/services/search-data-service/v1/search/indexes/<index>/documents<document id>
+    URL Params     : index       - The name of the index to retrieve the document from.
+                     document id - the identifier of the document to be retrieved.
+    Request Payload:
+        None.
+        
+
+### Searching the Document Store
+Search statements are passed to the _Search Data Service_ as a JSON object which is structured as follows:
+
+_Filters_
+* A "filter" stanza defines a set of queries to be run in _non-scoring-mode_ to reduce the document set to a smaller subset to be searched.
+* The filter stanza is optional - omitting it implies that the query is _unfiltered_.
+* This stanza is represented as a JSON object with the following structure:
+
+    "filter": {
+                "all": [ { query }, { query },....{ query }],
+                "any": [ { query }, { query },....{ query }]
+    },
+
+Where: 
+* the _all_ list defines a set of queryies such that ALL queries in the list must be satisfied for the document to pass the filter.
+* the _any_ list defines a set of queryies such that ANY single query in the list must be satisfied for the document to pass the filter. 
+
+_Queries_
+The following types of query statements are supported by the _Search Data Service_:
+
+_Term Query_:
+
+A term query attempts to match the literal value of a field, with no advanced parsing or analysis of the query string.  This type of query is most appropriate for structured data like numbers, dates and enums, rather than full text fields.
+
+     // Find documents where the specified field contains the supplied value
+    "match": {
+        "field": "value"
+    }
+  
+    // Find documents where the specified field DOES NOT contain the supplied value
+    "not-match": {
+        "field": "value"
+    }
+    
+_Parsed Query_:
+
+Parsed queries apply a query parser to the supplied query string in order to determine the exact query to apply to the specified field.
+The query string is parsed into a series of terms and operators, as described below:
+
+Terms may be any of the following:
+* single words
+* exact phrases, as denoted by enclosing the phrase in open and close quotations.  Example: "this is my exact phrase"
+* regular expressions, as denoted by wrapping the expressing in forward slash ( / ) character.  Example: /joh?n(ath[oa]n)/
+
+The supported operators are as follows:
+* AND - Both terms to the left or right of the operator MUST be present
+* OR  - Either the term to the left or right of the operator MUST be present
+* NOT - The term to the right of the operator MUST NOT be present.
+
+    "parsed-query": {
+        "field": "fieldname",
+        "query-string": "string"
+    }
+    
+_Range Query_:
+
+ Range queries match fields whose term value falls within the specified numeric or date range.
+ Supported bounds operators include:
+ * gt  - Greater than
+ * gte - Greater than or equal to
+ * lt  - Less than
+ * lte - Less than or equal to
+     "range": {
+        "field": "fieldname",
+        "operator": "value",
+        "operator": "value"
+     }
+        
+##### Examples
+The following snippet illustrates a search statement describing a filtered query which uses examples of all of the supported query types:
+
+    {
+        "filter": {
+            "all": [{"range": {"field": "timestamp", "lte": "2016-12-01T00:00:00.558+03:00"}}],
+            "any": [ ]
+        },
+        
+        "queries": [
+            {"match": {"field": "name", "value": "Bob"}},
+            {"parsed-query": {"field": "street-name", "query-string": "Main OR First"}},
+            {"range": {"field": "street-number", "gt": 10, "lt": 50}}
+        ]
+    }
+
+##### REST Endpoint
+
+    Method:        : POST
+    URL            : https://<host>:9509/services/search-data-service/v1/search/indexes/<index>/query
+    URL Params     : index       - The name of the index to apply the query to.
+
+    Request Payload:
+        {
+            "filter": {
+                "all": [ { query }, { query },....{ query }],
+                "any": [ { query }, { query },....{ query }]
+            },
+            
+            "queries": [
+                { query },
+                    .
+                    .
+                { query }
+            ]
+        }
+
+### Bulk Operations
+Bulk operations allow the client to bundle a number of actions into a single REST request.
+It is important to note that individual operations bundled into a bulk request are considered by the _Search Service_ to be completely independent operations.  This has a few important consequences:
+* No guarantees are made with respect to the order in which the individual operations will be processed by the document store.
+* There is no implied transactionality between the operations.  Individual operations my succeed or fail independently of one another, and it is entirely possible for the client to receive back a result set indicating a mix of success and failure results for the individual operations.
+
+##### Submit Bulk Request
+    Method        : POST
+    URL           : http://<host>:9509/services/search-data-service/v1/search/bulk/
+    URL Params    : NONE
+    Request Payload:
+        A json structure containing all of the bundled actions to be performed.
+        It must correspond to the following format:
+            [
+                { "operation": {{<metaData>}, {<document>},
+                { "operation": {{<metaData>}, {<document>},
+                            .
+                            .
+                { "operation": {{<metaData>}, {<document>},
+            ]
+            
+        Where,
+            operation - Is one of:  "create", "update", or "delete"
+            
+            metaData  - A structure containing meta-data associated with the individual operation to be performed.  Valid fields include:
+                "url"   - The resource identifier of the document to be operated on.
+                "etag" - Identifies the version of the document to be acted on.  Required for "update" and "delete" operations.
+                
+            document - The document contents for "create" and "update" operations.
+            
+        Example Payload:
+        [
+            {"create": {"metaData": {"url": "/services/search-data-service/v1/indexes/the-index/documents/1"}, "document": {"f1": "v1", "f2": "v2"}}},
+            {"create": {"metaData": {"url": "/services/search-data-service/indexes/the-index/documents/2"}, "document": {"f1": "v1", "f2": "v2"}}},
+            {"update": {"metaData": {"url": "/services/search-data-service/v1/search/indexes/the-index/documents/8", "etag": "1"}, "document": {"f1": "v1a", "f2": "v2a"}}},
+            {"delete": {"metaData": {"url": "/services/search-data-service/v1/search/indexes/the-index/documents/99", "etag": "3"}}}
+        ]
+        
+    Response Payload:
+        The response body will contain an aggregation of the collective results as well as separate status codes for each of the operations in the request.
+        Example:
+        { 
+            "total_operations": 4, 
+            "total_success": 1, 
+            "total_fails": 3, 
+                       "results": [
+                               {"operation": "create", "url": "/services/search-data-service/v1/search/indexes/the-index/documents/1", "etag": "1", "status-code": "201", "status-message": "OK"}, 
+                               {"operation": "create", "url": "/services/search-data-service/v1/search/indexes/the-index/documents/2", "etag": "1", "status-code": "201", "status-message": "OK"}, 
+                               {"operation": "update", "url": "/services/search-data-service/v1/search/indexes/the-index/documents/8", "etag": "2", "status-code": "200", "status-message": "OK"}, 
+                               {"operation": "delete", "url": "/services/search-data-service/v1/search/indexes/the-index/documents/2", "status-code": "200", "status-message": "OK"}
+    ]
+}
\ No newline at end of file
diff --git a/ajsc-shared-config/README.txt b/ajsc-shared-config/README.txt
new file mode 100644 (file)
index 0000000..a82eb64
--- /dev/null
@@ -0,0 +1,6 @@
+The ajsc-shared-config folder is included in the service project to provide the functionality of the AJSC_SHARED_CONFIG 
+location that will exist in CSI envs. This includes the logback.xml for logging configurations, and some csm related 
+artifacts necessary for proper functionality of the csm framework within the CSI env. Within the 2 profiles that can 
+be utilized to run the AJSC locally, "runLocal" and "runAjsc", the system propery, "AJSC_SHARED_CONFIG", has been set
+to point to this directory. The files in this folder will NOT be copied/moved anywhere within the AJSC SWM package. These 
+files will already be in existence within the CSI env.
\ No newline at end of file
diff --git a/ajsc-shared-config/etc/basic-logback_root_logger_level_off.xml b/ajsc-shared-config/etc/basic-logback_root_logger_level_off.xml
new file mode 100644 (file)
index 0000000..4ebe2db
--- /dev/null
@@ -0,0 +1,87 @@
+<!-- 
+ Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+-->
+<configuration scan="true" scanPeriod="3 seconds" debug="true">
+       <property name="logDirectory" value="${AJSC_HOME}/log" />
+       <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>ERROR</level>
+               </filter>
+               <encoder>
+                       <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1024} - %msg%n
+                       </pattern>
+               </encoder>
+       </appender>
+
+       <appender name="INFO"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>DEBUG</level>
+               </filter>
+               <file>${logDirectory}/info_ajsc.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
+                       <fileNamePattern>${logDirectory}/info_ajsc.%i.log.zip
+                       </fileNamePattern>
+                       <minIndex>1</minIndex>
+                       <maxIndex>9</maxIndex>
+               </rollingPolicy>
+               <triggeringPolicy
+                       class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
+                       <maxFileSize>5MB</maxFileSize>
+               </triggeringPolicy>
+               <encoder>
+                       <pattern>"%d [%thread] %-5level %logger{1024} - %msg%n"</pattern>
+               </encoder>
+       </appender>
+       <appender name="ERROR"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>ERROR</level>
+               </filter>
+               <file>${logDirectory}/error_ajsc.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
+                       <fileNamePattern>${logDirectory}/error_ajsc.%i.log.zip
+                       </fileNamePattern>
+                       <minIndex>1</minIndex>
+                       <maxIndex>9</maxIndex>
+               </rollingPolicy>
+               <triggeringPolicy
+                       class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
+                       <maxFileSize>5MB</maxFileSize>
+               </triggeringPolicy>
+               <encoder>
+                       <!-- <pattern>"%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1024} - %msg%n"</pattern> -->
+                       <pattern>"%d [%thread] %-5level %logger{1024} - %msg%n"</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="AJSC-AUDIT" class="ch.qos.logback.classic.net.SyslogAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>INFO</level>
+               </filter>
+               <syslogHost>localhost</syslogHost>
+               <facility>USER</facility>
+               <!-- Note the colon character below - it is important part of "TAG" message 
+                       format You need a colon to determine where the TAG field ends and the CONTENT 
+                       begins -->
+               <suffixPattern>AJSC_AUDIT: [%thread] [%logger] %msg</suffixPattern>
+       </appender>
+       <appender name="CONTROLLER-AUDIT" class="ch.qos.logback.classic.net.SyslogAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>INFO</level>
+               </filter>
+               <syslogHost>localhost</syslogHost>
+               <facility>USER</facility>
+               <!-- Note the colon character below - it is important part of "TAG" message 
+                       format You need a colon to determine where the TAG field ends and the CONTENT 
+                       begins -->
+               <suffixPattern>AJSC_AUDIT: [%thread] [%logger] mdc:[%mdc] %msg
+               </suffixPattern>
+       </appender>
+
+       <root level="off">
+               <appender-ref ref="ERROR" />
+               <appender-ref ref="INFO" />
+               <appender-ref ref="STDOUT" />
+       </root>
+</configuration>
diff --git a/ajsc-shared-config/etc/logback.xml b/ajsc-shared-config/etc/logback.xml
new file mode 100644 (file)
index 0000000..8e0585e
--- /dev/null
@@ -0,0 +1,212 @@
+<configuration scan="true" scanPeriod="3 seconds" debug="false">
+  <!--<jmxConfigurator /> -->
+  <!-- directory path for all other type logs -->
+  
+  <property name="logDir"  value="${AJSC_HOME}/logs" />
+  
+  
+  <!--  specify the component name 
+       <ECOMP-component-name>::= "MSO" | "DCAE" | "ASDC " | "AAI" |"Policy" | "SDNC" | "AC"  -->
+  <property name="componentName" value="AAI-SDB"></property>
+  
+  <!--  default eelf log file names -->
+  <property name="generalLogName" value="error" />
+  <property name="metricsLogName" value="metrics" />
+  <property name="auditLogName" value="audit" />
+  <property name="debugLogName" value="debug" />
+  
+  <property name="errorLogPattern" value="%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%mdc{RequestId}|%thread|SearchDataService|%mdc{PartnerName}|%logger||%.-5level|%msg%n" />
+  <property name="auditMetricPattern" value="%m%n" />
+
+  <property name="logDirectory" value="${logDir}/${componentName}" />
+  
+  <!-- Example evaluator filter applied against console appender -->
+  <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+    <encoder>
+      <pattern>${errorLogPattern}</pattern>
+    </encoder>
+  </appender>
+
+  <!-- ============================================================================ -->
+  <!-- EELF Appenders -->
+  <!-- ============================================================================ -->
+  
+  <!-- The EELFAppender is used to record events to the general application 
+       log -->
+  
+  <appender name="EELF"
+            class="ch.qos.logback.core.rolling.RollingFileAppender">
+    <file>${logDirectory}/${generalLogName}.log</file>
+    <rollingPolicy
+        class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+      <fileNamePattern>${logDirectory}/${generalLogName}.%d{yyyy-MM-dd}.log.zip
+      </fileNamePattern>
+      <maxHistory>60</maxHistory>
+    </rollingPolicy>
+    <encoder>
+      <pattern>${errorLogPattern}</pattern>
+    </encoder>
+  </appender>
+  <appender name="asyncEELF" class="ch.qos.logback.classic.AsyncAppender">
+    <!-- deny all events with a level below INFO, that is TRACE and DEBUG -->
+    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+      <level>INFO</level>
+    </filter>
+    <queueSize>256</queueSize>
+    <appender-ref ref="EELF" />
+  </appender>
+
+  
+  <!-- EELF Audit Appender. This appender is used to record audit engine 
+       related logging events. The audit logger and appender are specializations 
+       of the EELF application root logger and appender. This can be used to segregate 
+       Policy engine events from other components, or it can be eliminated to record 
+       these events as part of the application root log. -->
+  
+  <appender name="EELFAudit"
+            class="ch.qos.logback.core.rolling.RollingFileAppender">
+    <file>${logDirectory}/${auditLogName}.log</file>
+    <rollingPolicy
+        class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+      <fileNamePattern>${logDirectory}/${auditLogName}.%d{yyyy-MM-dd}.log.zip
+      </fileNamePattern>
+      <maxHistory>60</maxHistory>
+    </rollingPolicy>
+    <encoder>
+      <pattern>${auditMetricPattern}</pattern>
+    </encoder>
+  </appender>
+  <appender name="asyncEELFAudit" class="ch.qos.logback.classic.AsyncAppender">
+    <queueSize>256</queueSize>
+    <appender-ref ref="EELFAudit" />
+  </appender>
+  
+  <appender name="EELFMetrics"
+            class="ch.qos.logback.core.rolling.RollingFileAppender">
+    <file>${logDirectory}/${metricsLogName}.log</file>
+    <rollingPolicy
+        class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+      <fileNamePattern>${logDirectory}/${metricsLogName}.%d{yyyy-MM-dd}.log.zip
+      </fileNamePattern>
+      <maxHistory>60</maxHistory>
+    </rollingPolicy>
+    <encoder>
+      <!-- <pattern>"%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1024} - 
+           %msg%n"</pattern> -->
+      <pattern>${auditMetricPattern}</pattern>
+    </encoder>
+  </appender>
+  
+  
+  <appender name="asyncEELFMetrics" class="ch.qos.logback.classic.AsyncAppender">
+    <queueSize>256</queueSize>
+    <appender-ref ref="EELFMetrics"/>
+  </appender>
+  
+  <appender name="EELFDebug"
+            class="ch.qos.logback.core.rolling.RollingFileAppender">
+    <file>${logDirectory}/${debugLogName}.log</file>
+    <rollingPolicy
+        class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+      <fileNamePattern>${logDirectory}/${debugLogName}.%d{yyyy-MM-dd}.log.zip
+      </fileNamePattern>
+      <maxHistory>60</maxHistory>
+    </rollingPolicy>
+    <encoder>
+      <pattern>${errorLogPattern}</pattern>
+    </encoder>
+  </appender>
+  
+  <appender name="asyncEELFDebug" class="ch.qos.logback.classic.AsyncAppender">
+    <queueSize>256</queueSize>
+    <appender-ref ref="EELFDebug" />
+    <includeCallerData>false</includeCallerData>
+  </appender>
+  
+  
+  <!-- ============================================================================ -->
+  <!--  EELF loggers -->
+  <!-- ============================================================================ -->
+  <logger name="com.att.eelf" level="info" additivity="false">
+    <appender-ref ref="asyncEELF" />
+    <appender-ref ref="asyncEELFDebug" />
+  </logger>
+
+  <logger name="com.att.eelf.security" level="info" additivity="false">
+    <appender-ref ref="asyncEELFSecurity" /> 
+  </logger>
+  <logger name="com.att.eelf.perf" level="info" additivity="false">
+    <appender-ref ref="asyncEELFPerformance" />
+  </logger>
+  <logger name="com.att.eelf.server" level="info" additivity="false">
+    <appender-ref ref="asyncEELFServer" />
+  </logger>
+  <logger name="com.att.eelf.policy" level="info" additivity="false">
+    <appender-ref ref="asyncEELFPolicy" />
+  </logger>
+  <logger name="com.att.eelf.audit" level="info" additivity="false">
+    <appender-ref ref="asyncEELFAudit" />
+  </logger>
+  <logger name="com.att.eelf.metrics" level="info" additivity="false">
+    <appender-ref ref="asyncEELFMetrics" />
+  </logger>
+  
+  <!-- Spring related loggers -->
+  <logger name="org.springframework" level="WARN" />
+  <logger name="org.springframework.beans" level="WARN" />
+  <logger name="org.springframework.web" level="WARN" />
+  <logger name="com.blog.spring.jms" level="WARN" />
+
+  <!-- AJSC Services (bootstrap services) -->
+  <logger name="ajsc" level="WARN" />
+  <logger name="ajsc.RouteMgmtService" level="WARN" />
+  <logger name="ajsc.ComputeService" level="WARN" />
+  <logger name="ajsc.VandelayService" level="WARN" />
+  <logger name="ajsc.FilePersistenceService" level="WARN" />
+  <logger name="ajsc.UserDefinedJarService" level="WARN" />
+  <logger name="ajsc.UserDefinedBeansDefService" level="WARN" />
+  <logger name="ajsc.LoggingConfigurationService" level="WARN" />
+  <logger name="ajsc.ErrorMessageLookupService" level="WARN" />
+  
+  <!-- AJSC related loggers (DME2 Registration, csi logging, restlet, servlet 
+       logging) -->
+  <logger name="ajsc.utils" level="WARN" />
+  <logger name="ajsc.utils.DME2Helper" level="WARN" />
+  <logger name="ajsc.filters" level="WARN" />
+  <logger name="ajsc.beans.interceptors" level="WARN" />
+  <logger name="ajsc.restlet" level="WARN" />
+  <logger name="ajsc.servlet" level="WARN" />
+  <logger name="com.att" level="INFO" />
+  <logger name="com.att.ajsc.csi.logging" level="WARN" />
+  <logger name="com.att.ajsc.filemonitor" level="WARN" />
+
+  <!-- SearchDB loggers -->
+  <logger name="org.openecomp.sa" level="INFO" />
+
+  <!-- Other Loggers that may help troubleshoot -->
+  <logger name="net.sf" level="WARN" />
+  <logger name="org.apache.commons.httpclient" level="WARN" />
+  <logger name="org.apache.commons" level="WARN" />
+  <logger name="org.apache.coyote" level="WARN" />
+  <logger name="org.apache.jasper" level="WARN" />
+
+  <!-- Camel Related Loggers (including restlet/servlet/jaxrs/cxf logging. 
+       May aid in troubleshooting) -->
+  <logger name="org.apache.camel" level="WARN" />
+  <logger name="org.apache.cxf" level="WARN" />
+  <logger name="org.apache.camel.processor.interceptor" level="WARN" />
+  <logger name="org.apache.cxf.jaxrs.interceptor" level="WARN" />
+  <logger name="org.apache.cxf.service" level="WARN" />
+  <logger name="org.restlet" level="WARN" />
+  <logger name="org.apache.camel.component.restlet" level="WARN" />
+
+  <!-- logback internals logging -->
+  <logger name="ch.qos.logback.classic" level="WARN" />
+  <logger name="ch.qos.logback.core" level="WARN" />
+
+  <root>
+    <appender-ref ref="asyncEELF" /> 
+    <!-- <appender-ref ref="asyncEELFDebug" /> -->
+  </root>
+  
+</configuration>
diff --git a/ajsc-shared-config/etc/spm2.jks b/ajsc-shared-config/etc/spm2.jks
new file mode 100644 (file)
index 0000000..8ff2a00
Binary files /dev/null and b/ajsc-shared-config/etc/spm2.jks differ
diff --git a/antBuild/build.xml b/antBuild/build.xml
new file mode 100644 (file)
index 0000000..49386fa
--- /dev/null
@@ -0,0 +1,230 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+ Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+-->
+<project>
+       <target name="runLocal">
+               <java dir="${basedir}" fork="yes" newenvironment="true"
+                       failonerror="true" classname="com.att.ajsc.runner.Runner">
+                       <classpath
+                               path="${classpath}:${basedir}/ajsc-shared-config/etc:${runAjscHome}/lib/ajsc-runner-${ajscRuntimeVersion}.jar" />
+
+                       <!-- Windows Users may need to add a jvmarg arg to create a temp directory 
+                               properly. -->
+                       <!-- <jvmarg value="-Djava.io.tmpdir=C:/yourTempDirectory"/> -->
+
+                       <!-- Uncomment the following 2 jvmarg values to enable Remote Debugging. 
+                        -->
+                       <!-- <jvmarg value="-Xdebug" /> -->
+                       <!-- <jvmarg value="-Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5432" 
+                               /> -->
+
+                       <jvmarg value="-XX:MaxPermSize=512m" />
+                       <jvmarg value="-Xmx1024m" />
+
+                       <!-- Main ajsc Variables below (Variables necessary for proper startup 
+                               of AJSC) -->
+                       <env key="AJSC_HOME" value="${runAjscHome}" />
+                       <sysproperty key="AJSC_HOME" value="${runAjscHome}" />
+                       <!-- you may specify any external location for AJSC_CONF_HOME where etc 
+                               folder & all other configs can be found under it. If not specified, it will 
+                               default to AJSC_HOME -->
+                       <sysproperty key="AJSC_CONF_HOME" value="${basedir}/bundleconfig-local" />
+                       <sysproperty key="AJSC_SHARED_CONFIG" value="${basedir}/ajsc-shared-config" />
+
+                       <!-- Location of logback.xml file used for logging configurations. Please, 
+                               note, when deploying a service to either CSI or NON-CSI environment, this 
+                               system property will be set in sys-props.properties file. We are setting 
+                               it here for running locally due to the ease of use of maven variable for 
+                               basedir. -->
+                       <sysproperty key="logback.configurationFile"
+                               value="${basedir}/ajsc-shared-config/etc/logback.xml" />
+
+                       <!-- Setting system properties for the AJSC external libs and properties 
+                               folders below. When deploying to a node, these properties will be set within 
+                               the bundleconfig/etc/sysprops/sys-props.properties file. However, when running 
+                               locally, the ${basedir} substitution works more efficiently in this manner. -->
+                       <sysproperty key="AJSC_EXTERNAL_LIB_FOLDERS" value="${basedir}/target/commonLibs" />
+                       <sysproperty key="AJSC_EXTERNAL_PROPERTIES_FOLDERS"
+                               value="${basedir}/ajsc-shared-config/etc" />
+
+                       <!-- End of Main ajsc Variables below (Variables necessary for proper 
+                               startup of AJSC) -->
+
+                       <!-- Uncomment the following line to add oauthentication to your Service -->
+                       <!-- <sysproperty key="spring.profiles.active" value="oauth" /> -->
+
+                       <!-- If using Cassandra as Database, Enter the ip/host and port below 
+                               based on your known configuration -->
+                       <!-- <sysproperty key="cassandra.ip" value="hostname" /> -->
+                       <!-- <sysproperty key="cassandra.port" value="9042" /> -->
+
+                       <!-- The APP_SERVLET_URL_PATTERN variable is defaulted to "/services" 
+                               within the initial configuration of the AJSC. If you are changing the CamelServlet 
+                               Filter within the ajsc-override-web.xml, you should use that url-pattern 
+                               here. This is necessary to properly register your service with dme2. An empty 
+                               value, "", is used when NO value is wanted (url-pattern would be /* for CamelServlet 
+                               Filter) -->
+                       <!-- As of 4.5.1, this property is no longer needed -->
+                       <!-- <sysproperty key="APP_SERVLET_URL_PATTERN" value="/services" /> -->
+
+                       <!-- GRM/DME2 System Properties below -->
+                       <sysproperty key="AJSC_SERVICE_NAMESPACE" value="${module.ajsc.namespace.name}" />
+                       <sysproperty key="AJSC_SERVICE_VERSION" value="${module.ajsc.namespace.version}" />
+                       <sysproperty key="SOACLOUD_SERVICE_VERSION" value="${project.version}" />
+                       <!-- End of GRM/DME2 System Property Variables -->
+
+                       <!-- The following server.port variable was necessary for the proper registration 
+                               of the AJSC to dme2. This value may still need to be used if the Developer 
+                               is hardcoding their port (example: 8080). Then, the server.port value="8080". 
+                               The default functionality for the AJSC is to use EPHEMERAL ports. In this 
+                               case, you do NOT need to set the server.port value. The AJSC will find the 
+                               proper port value and register to dme2 correctly -->
+                       <!-- <sysproperty key="server.port" value="${serverPort}" /> -->
+
+                       <!-- Command Line Arguments to add to the java command. Here, you can 
+                               specify the port as well as the Context you want your service to run in. 
+                               Use context=/ to run in an unnamed Context (Root Context). The default configuration 
+                               of the AJSC is to run under the /ajsc Context. Setting the port here can 
+                               aid during the development phase of your service. However, you can leave 
+                               this argument out entirely, and the AJSC will default to using an Ephemeral 
+                               port. -->
+                       <arg line="context=/ port=${serverPort} sslport=${sslport}" />
+               </java>
+       </target>
+       <target name="prep_home_directory_for_swm_pkgcreate">
+
+<!-- ********* GENERATE CADI KEY AND ENCRYPTED PASSWORD ***********
+     
+            Uncomment the following if your cadi key get corrupted , It would 
+                       generate the Cadi key and password in the package phase and keep the key 
+                       in the 'src/main/config/ajscKey' and password in the bottom of cadi.properties(you 
+                       need to modify the 'aaf_pass' variable with this value . Plese modify the 
+                       template.cadi.properties as well before uploading to SOA node 
+-->
+
+<!-- 
+               <java jar="${basedir}/target/userjars/cadi-core-1.2.5.jar" fork="true"> 
+               <arg value="keygen" /> <arg value="src/main/config/ajscKey" /> 
+               </java> 
+               
+               <echo>***Cadi Key file generated ****</echo> 
+               
+               <java jar="${basedir}/target/userjars/cadi-core-1.2.5.jar" 
+               fork="true" append="true" output="${basedir}/src/main/config/cadi.properties"> 
+               <arg value="digest" /> <arg value="ajscRocks!" /> <arg value="src/main/config/ajscKey" 
+               /> 
+               </java> 
+               
+-->
+
+
+
+               <!-- These tasks are copying contents from the installHomeDirectory into 
+                       the eventual $AJSC_HOME directory for running locally and soa cloud installation -->
+               <echo message="ENTERING 'prep_home_directory_for_swm_pkgcreate' ant tasks" />
+
+               <!-- Please, NOTE: The ajsc-archetype is setup for a default CSI Env deployment. 
+                       If you are deploying to a CSI Env, you should NOT have to change anything 
+                       within this build file. However, if you are NOT deploying to a CSI Env, you 
+                       should comment OUT the CSI related portion of this build.xml. -->
+
+               <!-- The following code snippet is copying the bundleconfig-csi directory 
+                       to the proper installation/bundleconfig directory used in CSI envs. If you 
+                       are NOT installing to a CSI node, you should comment out (or delete) the 
+                       following snippet, and uncomment the NON-CSI copy task to copy EVERYTHING 
+                       to the installation/bundleconfig directory. -->
+
+               <!-- CSI related bundleconfig copy task. If you are NOT deploying to a 
+                       CSI Env, please COMMENT OUT or delete the following copy task code snippet. -->
+               <!--<copy toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/bundleconfig" 
+                       failonerror="true"> <fileset dir="${basedir}/bundleconfig-csi" /> </copy> -->
+               <!-- End of CSI related bundleconfig copy task -->
+
+               <!-- NOTE: If you are NOT deploying to CSI environment, and you are NOT 
+                       using an AJSC_SHARED_CONFIG location on a node, you should go ahead and copy 
+                       EVERYTHING from bundleconfig and ajsc-shared-config (logback.xml) directory 
+                       to utilize proper logging from logback.xml. Simply, uncomment the following 
+                       code snippet below to copy EVERYTHING and comment out the CSI related build 
+                       script above. -->
+               <!-- NON-CSI related build copy task. Please, uncomment the following code 
+                       snippet to deploy the proper artifacts to a NON-CSI Env. -->
+               <copy
+                       toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/bundleconfig"
+                       failonerror="true">
+                       <fileset dir="${basedir}/bundleconfig-local" includes="**/**" />
+               </copy>
+               <copy
+                       toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/bundleconfig/etc"
+                       failonerror="true">
+                       <fileset dir="${basedir}/ajsc-shared-config/etc" includes="**/**" />
+               </copy>
+               <!-- End of NON-CSI related build copy task. -->
+
+               <!-- Copying any zips (deployment packages) to $AJSC_HOME/services for 
+                       auto-deployment -->
+               <copy
+                       toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/services"
+                       failonerror="false">
+                       <fileset dir="${basedir}/services" includes="*.zip" />
+               </copy>
+
+               <!-- Copying runtimeEnvironment zip file to $AJSC_HOME/runtime and renaming 
+                       runtimeEnvironment.zip for proper auto-deployment of ajsc services. 
+               <copy
+                       tofile="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/runtime/runtimeEnvironment.zip">
+                       <fileset dir="target" includes="*-runtimeEnvironment.zip" />
+               </copy>-->
+
+               <!-- Copying dependencies from the service project (not provided by AJSC 
+                       Container) to the $AJSC_HOME/extJars folder to be accessible on the classpath -->
+               <copy
+                       toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/extJars"
+                       failonerror="false">
+                       <fileset dir="target/userjars" includes="*" />
+               </copy>
+
+               <!-- extApps directory MUST be created for ajsc-runner to run correctly, 
+                       even if empty. DO NOT REMOVE!!! -->
+               <!-- extApps directory created to deploy other war files on startup or 
+                       hot deploy War files after ajsc starts up. -->
+               <mkdir
+                       dir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/extApps" />
+
+               <!-- Copying any extra wars to $AJSC_HOME/extApps to be deployed within 
+                       AJSC -->
+               <copy
+                       toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/extApps"
+                       failonerror="false">
+                       <fileset dir="${basedir}/src/main/resources/extApps"
+                               includes="*" />
+               </copy>
+
+               <!-- staticContent folder is for serving static content within an ajsc 
+                       service. Any static content to be served will be copyied to the ultimate 
+                       $AJSC_HOME/staticContent folder -->
+               <!-- Uncomment the following snippet to copy items from staticContent folder 
+                       to ultimate $AJSC_HOME/staticConent -->
+               <!-- <copy toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/staticContent" 
+                       failonerror="false"> <fileset dir="${basedir}/staticContent" includes="**/**" 
+                       /> </copy> -->
+
+               <!-- Copying extra jar files that have been labeled as dependencies in 
+                       service project to /extJars folder to be made available on the classpath 
+                       for your service -->
+               <copy
+                       toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/extJars"
+                       failonerror="false">
+                       <fileset dir="target" includes="*.jar" />
+               </copy>
+
+               <!-- Copying deployment packages created within the project to the $AJSC_HOME/services 
+                       folder to be auto deployed. -->
+               <copy
+                       toDir="${basedir}/target/swm/package/nix/dist_files${distFilesRoot}/services">
+                       <fileset dir="target" includes="*.zip" excludes="*-runtimeEnvironment.zip" />
+               </copy>
+
+               <echo message="EXITING 'prep_assembly_output_for_swm_plugin' ant tasks" />
+       </target>
+</project>
diff --git a/bundleconfig-local/README.txt b/bundleconfig-local/README.txt
new file mode 100644 (file)
index 0000000..37f2670
--- /dev/null
@@ -0,0 +1,2 @@
+#Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+The bundleconfig-local directory contains the necessary configuration files 
\ No newline at end of file
diff --git a/bundleconfig-local/RELEASE_NOTES.txt b/bundleconfig-local/RELEASE_NOTES.txt
new file mode 100644 (file)
index 0000000..3cc5590
--- /dev/null
@@ -0,0 +1,2 @@
+#Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+Place Release Notes here to provide updated Release information 
\ No newline at end of file
diff --git a/bundleconfig-local/etc/appprops/PostProcessorInterceptors.properties b/bundleconfig-local/etc/appprops/PostProcessorInterceptors.properties
new file mode 100644 (file)
index 0000000..08ffefa
--- /dev/null
@@ -0,0 +1,3 @@
+#Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+#This properties file is for defining any PostProcessorInterceptors that have been created for your AJSC service.
+
diff --git a/bundleconfig-local/etc/appprops/PreProcessorInterceptors.properties b/bundleconfig-local/etc/appprops/PreProcessorInterceptors.properties
new file mode 100644 (file)
index 0000000..1383071
--- /dev/null
@@ -0,0 +1,4 @@
+#Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+#This properties file is for defining any PreProcessorInterceptors that have been created for your AJSC service. 
+
+/**=com.att.ajsc.csi.restmethodmap.RestMethodMapInterceptor
diff --git a/bundleconfig-local/etc/appprops/app-intercepts.properties b/bundleconfig-local/etc/appprops/app-intercepts.properties
new file mode 100644 (file)
index 0000000..4674a1e
--- /dev/null
@@ -0,0 +1,8 @@
+#Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+
+#This is where all your application intercept strategies must be configured. AJSC reads this property file and adds
+#the list of intercepts specified here to the camel context. This can be useful for accessing every exchange object transferred from/to
+#each endpoint in the request/response flow and can allow for more precise debugging and/or processing of the exchange. 
+
+#e.g. 
+#intercepts=org.openecomp.search-data-service.JaxrsEchoService,packagename.class1name,packagename.class2name
diff --git a/bundleconfig-local/etc/appprops/methodMapper.properties b/bundleconfig-local/etc/appprops/methodMapper.properties
new file mode 100644 (file)
index 0000000..061f0b0
--- /dev/null
@@ -0,0 +1,46 @@
+//
+//Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+//     Json object holds the method mapping.Update the JSON object with the proper route to logical mapping based 
+//     on the example provided below : 
+//     "helloWorld"  = Service Name
+//     "method"   = http method
+//     "url" = the url component from the route
+//     "logicalName"=  When a combination of method and url from the route matches the json object , 
+//     the logical name is put in the http header as "x-CSI-ServiceName" and "x-CSI-MethodName" 
+//     "dme2url"= if provided it register the endpoint to GRM, it is optional. This is useful for JAX-RS services.
+  
+{
+    "helloWorld": [
+        {
+            "method": "get",
+            "url": "/rest/search-data-service/v1/helloWorld",
+            "logicalName": "GetMethod(Logical)"
+        },
+        {
+            "method": "get",
+            "url": "/services/search-data-service/v1/jaxrsExample/jaxrs-services/echo/{input}",
+            "logicalName": "GetJaxrsExampleEcho(Logical)",
+            "dme2url": "/services/search-data-service/v1/jaxrsExample/jaxrs-services/echo/{input}"
+        },
+        {
+            "method": "get",
+            "url": "/services/search-data-service/v1/jaxrsExample/jaxrs-services/property/{fileName}/{input}",
+            "logicalName": "GetJaxrsExampleProperty(Logical)",
+            "dme2url": "/services/search-data-service/v1/jaxrsExample/jaxrs-services/property/{fileName}/{input}"
+        }
+    ],
+            "errormessage":
+           [
+               {
+                       "method": "get",
+                       "url": "/services/search-data-service/v1/jaxrsExample/errormessage/emls",
+                       "logicalName": "setCAETHeaders(Logical)"
+               },
+               {
+                       "method": "get",
+                       "url": "/services/search-data-service/v1/errorMessageLookupService2",
+                       "logicalName": "setCAETHeaders(Logical)"
+               }        
+           
+           ]
+}
\ No newline at end of file
diff --git a/bundleconfig-local/etc/sysprops/sys-props.properties b/bundleconfig-local/etc/sysprops/sys-props.properties
new file mode 100644 (file)
index 0000000..a55e6ea
--- /dev/null
@@ -0,0 +1,116 @@
+#Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+#This file is used for defining AJSC system properties for different configuration schemes and is necessary for the AJSC to run properly.
+#The sys-props.properties file is used for running locally. The template.sys-props.properties file will be used when deployed
+#to a SOA/CSI Cloud node. 
+
+#AJSC System Properties. The following properties are required for ALL AJSC services. If you are adding System Properties for your
+#particular service, please add them AFTER all AJSC related System Properties. 
+
+#For Cadi Authorization, use value="authentication-scheme-1
+CadiAuthN=authentication-scheme-1
+
+#For Basic Authorization, use value="authentication-scheme-1
+authN=authentication-scheme-2
+
+#Persistence used for AJSC meta-data storage. For most environments, "file" should be used.
+ajscPersistence=file
+
+#For Direct Invocation to be enabled (values=true/false)
+directInvocationEnable=false
+
+# If using hawtio for local development, these properties will allow for faster server startup and usage for local development
+
+hawtio.authenticationEnabled=false
+hawtio.config.pullOnStartup=false
+
+#Removes the extraneous restlet console output
+org.restlet.engine.loggerFacadeClass=org.restlet.ext.slf4j.Slf4jLoggerFacade
+
+#server.host property to be enabled for local DME2 related testing
+#server.host=<Your network IP address> 
+
+#Enable/disable SSL (values=true/false). This property also determines which protocol to use (https if true, http otherwise), to register services into GRM through DME2.
+enableSSL=false
+
+
+#Enable/disable EJB Container
+ENABLE_EJB=false
+
+#Enable/disable OSGI
+isOSGIEnable=false
+
+#Generate/Skip api docs
+isApiDoc=false
+
+#CSI related variables for CSM framework
+csm.hostname=servername
+
+
+#SOA_CLOUD_ENV is used to register your service with dme2 and can be turned off for local development (values=true/false).
+SOA_CLOUD_ENV=false
+
+#CONTINUE_ON_LISTENER_EXCEPTION will exit the application if there is a DME2 exception at the time of registration.
+CONTINUE_ON_LISTENER_EXCEPTION=false
+
+#Jetty Container ThreadCount Configuration Variables
+AJSC_JETTY_ThreadCount_MIN=1
+AJSC_JETTY_ThreadCount_MAX=200
+AJSC_JETTY_IDLETIME_MAX=3000
+
+#Camel Context level default threadPool Profile configuration
+CAMEL_POOL_SIZE=10
+CAMEL_MAX_POOL_SIZE=20
+CAMEL_KEEP_ALIVE_TIME=60
+CAMEL_MAX_QUEUE_SIZE=1000
+
+#GRM/DME2 System Properties
+AFT_DME2_CONN_IDLE_TIMEOUTMS=5000
+AJSC_ENV=SOACLOUD
+
+SOACLOUD_NAMESPACE=com.att.ajsc
+SOACLOUD_ENV_CONTEXT=DEV
+SOACLOUD_PROTOCOL=http
+SOACLOUD_ROUTE_OFFER=DEFAULT
+
+AFT_LATITUDE=23.4
+AFT_LONGITUDE=33.6
+AFT_ENVIRONMENT=AFTUAT
+
+#Restlet Component Default Properties
+RESTLET_COMPONENT_CONTROLLER_DAEMON=true
+RESTLET_COMPONENT_CONTROLLER_SLEEP_TIME_MS=100
+RESTLET_COMPONENT_INBOUND_BUFFER_SIZE=8192
+RESTLET_COMPONENT_MIN_THREADS=1
+RESTLET_COMPONENT_MAX_THREADS=10
+RESTLET_COMPONENT_LOW_THREADS=8
+RESTLET_COMPONENT_MAX_QUEUED=0
+RESTLET_COMPONENT_MAX_CONNECTIONS_PER_HOST=-1
+RESTLET_COMPONENT_MAX_TOTAL_CONNECTIONS=-1
+RESTLET_COMPONENT_OUTBOUND_BUFFER_SIZE=8192
+RESTLET_COMPONENT_PERSISTING_CONNECTIONS=true
+RESTLET_COMPONENT_PIPELINING_CONNECTIONS=false
+RESTLET_COMPONENT_THREAD_MAX_IDLE_TIME_MS=60000
+RESTLET_COMPONENT_USE_FORWARDED_HEADER=false
+RESTLET_COMPONENT_REUSE_ADDRESS=true
+
+#Externalized jar and properties file location. In CSI environments, there are a few libs that have been externalized to aid
+#in CSTEM maintenance of the versions of these libs. The most important to the AJSC is the DME2 lib. Not only is this lib necessary
+#for proper registration of your AJSC service on a node, but it is also necessary for running locally as well. Another framework
+#used in CSI envs is the CSM framework. These 2 framework libs are shown as "provided" dependencies within the pom.xml. These
+#dependencies will be copied into the target/commonLibs folder with the normal "mvn clean package" goal of the AJSC. They will
+#then be added to the classpath via AJSC_EXTERNAL_LIB_FOLDERS system property. Any files (mainly property files) that need
+#to be on the classpath should be added to the AJSC_EXTERNAL_PROPERTIES_FOLDERS system property. The default scenario when 
+#testing your AJSC service locally will utilize the target/commonLibs directory for DME2 and CSM related artifacts and 2 
+#default csm properties files will be used for local testing with anything CSM knorelated.
+#NOTE: we are using maven-replacer-plugin to replace "(doubleUnderscore)basedir(doubleUnderscore)" with ${basedir} within the 
+#target directory for running locally. Multiple folder locations can be separated by the pipe ("|") character.
+#Please, NOTE: for running locally, we are setting this system property in the antBuild/build.xml "runLocal" target and in the 
+#"runAjsc" profile within the pom.xml. This is to most effectively use maven variables (${basedir}, most specifically. Therefore,
+#when running locally, the following 2 properties should be set within the profile(s) themselves. 
+#Example: target/commonLibs|target/otherLibs
+#AJSC_EXTERNAL_LIB_FOLDERS=__basedir__/target/commonLibs
+#AJSC_EXTERNAL_PROPERTIES_FOLDERS=__basedir__/ajsc-shared-config/etc
+#End of AJSC System Properties
+
+#Service System Properties. Please, place any Service related System Properties below.
+
diff --git a/pom.xml b/pom.xml
new file mode 100644 (file)
index 0000000..6ebec57
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,369 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <artifactId>ajsc-archetype-parent</artifactId>
+        <groupId>com.att.ajsc</groupId>
+        <version>2.0.0</version>
+    </parent>
+    <groupId>org.openecomp.aai</groupId>
+    <artifactId>search-data-service</artifactId>
+    <version>1.0.0</version>
+
+    <properties>
+        <module.ajsc.namespace.name>search-data-service</module.ajsc.namespace.name>
+        <module.ajsc.namespace.version>v1</module.ajsc.namespace.version>
+        <ajscRuntimeVersion>2.0.0</ajscRuntimeVersion>
+        <absoluteDistFilesRoot>/appl/${project.artifactId}</absoluteDistFilesRoot>
+
+        <!-- For NO Versioning, REMOVE the /${project.version} from the <distFilesRoot>
+            property, below. PLEASE, NOTE: If your ${project.version} is a "-SNAPSHOT"
+            version, THIS will be used as your directory structure. If you do NOT want
+            this, simply remove the "-SNAPSHOT" from your <version> declaration at the
+            top of pom.xml -->
+        <distFilesRoot>/appl/${project.artifactId}/${project.version}</distFilesRoot>
+        <runAjscHome>${basedir}/target/swm/package/nix/dist_files${distFilesRoot}</runAjscHome>
+
+        <!-- For SOA Cloud Installation -->
+        <installOwnerUser>aaiadmin</installOwnerUser>
+        <installOwnerGroup>aaiadmin</installOwnerGroup>
+        <ownerManagementGroup>com.att.csid.lab</ownerManagementGroup>
+
+        <!-- Port Selection. A value of 0 will allow for dynamic port selection.
+            For local testing, you may choose to hardcode this value to something like
+            8080 -->
+        <serverPort>8080</serverPort>
+        <sslport>9509</sslport>
+
+        <testRouteOffer>workstation</testRouteOffer>
+        <testEnv>DEV</testEnv>
+        <generatedSourceDir>${basedir}/src/main/java-gen</generatedSourceDir>
+        <checkstyle.config.location>google_checks.xml</checkstyle.config.location>
+    </properties>
+
+    <dependencies>
+
+        <dependency>
+            <groupId>com.googlecode.json-simple</groupId>
+            <artifactId>json-simple</artifactId>
+            <version>1.1.1</version>
+        </dependency>
+        <dependency>
+            <groupId>dom4j</groupId>
+            <artifactId>dom4j</artifactId>
+            <version>1.6.1</version>
+            <scope>provided</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>com.att.aft</groupId>
+            <artifactId>dme2</artifactId>
+            <version>3.1.200</version>
+            <scope>provided</scope>
+        </dependency>
+
+
+        <dependency>
+            <groupId>org.elasticsearch</groupId>
+            <artifactId>elasticsearch</artifactId>
+            <version>2.3.1</version>
+        </dependency>
+        <dependency>
+            <groupId>commons-io</groupId>
+            <artifactId>commons-io</artifactId>
+            <version>2.4</version>
+        </dependency>
+
+
+        <!-- Common logging framework -->
+        <dependency>
+          <groupId>org.openecomp.aai.logging-service</groupId>
+          <artifactId>common-logging</artifactId>
+          <version>1.0.0-SNAPSHOT</version>
+        </dependency>
+
+        <!--  Jersey Test Framework. -->
+        <dependency>
+            <groupId>org.glassfish.jersey.test-framework.providers</groupId>
+            <artifactId>jersey-test-framework-provider-grizzly2</artifactId>
+            <version>2.23.2</version>
+            <scope>test</scope>
+        </dependency>
+
+        <!-- For JSON Mapping Support. -->
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-databind</artifactId>
+            <version>2.7.8</version>
+        </dependency>
+
+    </dependencies>
+
+    <profiles>
+        <profile>
+            <id>runAjsc</id>
+            <build>
+                <defaultGoal>initialize</defaultGoal>
+                <plugins>
+                    <plugin>
+                        <groupId>org.codehaus.mojo</groupId>
+                        <artifactId>exec-maven-plugin</artifactId>
+                        <version>1.3.2</version>
+                        <executions>
+                            <execution>
+                                <phase>initialize</phase>
+                                <goals>
+                                    <goal>java</goal>
+                                </goals>
+                                <configuration>
+                                    <includeProjectDependencies>false</includeProjectDependencies>
+                                    <includePluginDependencies>true</includePluginDependencies>
+                                    <executable>java</executable>
+                                    <mainClass>com.att.ajsc.runner.Runner</mainClass>
+                                    <executableDependency>
+                                        <groupId>com.att.ajsc</groupId>
+                                        <artifactId>ajsc-runner</artifactId>
+                                    </executableDependency>
+                                    <additionalClasspathElements>
+                                        <additionalClasspathElement>${basedir}/ajsc-shared-config/etc
+                                        </additionalClasspathElement>
+                                    </additionalClasspathElements>
+
+                                    <environmentVariables>
+                                        <AJSC_HOME>${runAjscHome}</AJSC_HOME>
+                                    </environmentVariables>
+
+                                    <!-- Main AJSC System Properties below (necessary for proper startup) -->
+                                    <systemProperties>
+                                        <systemProperty>
+                                            <key>AJSC_HOME</key>
+                                            <value>${runAjscHome}</value>
+                                        </systemProperty>
+                                        <systemProperty>
+                                            <key>CONFIG_HOME</key>
+                                            <value>${basedir}/appconfig-local/</value>
+                                        </systemProperty>
+                                        <systemProperty>
+                                            <key>AJSC_CONF_HOME</key>
+                                            <value>${basedir}/bundleconfig-local</value>
+                                        </systemProperty>
+                                        <systemProperty>
+                                            <key>logback.configurationFile</key>
+                                            <value>${basedir}/ajsc-shared-config/etc/logback.xml</value>
+                                        </systemProperty>
+                                        <systemProperty>
+                                            <key>AJSC_SHARED_CONFIG</key>
+                                            <value>${basedir}/ajsc-shared-config</value>
+                                        </systemProperty>
+
+                                        <sysproperty>
+                                            <key>AJSC_EXTERNAL_LIB_FOLDERS</key>
+                                            <value>${basedir}/target/commonLibs</value>
+                                        </sysproperty>
+                                        <sysproperty>
+                                            <key>AJSC_EXTERNAL_PROPERTIES_FOLDERS</key>
+                                            <value>${basedir}/ajsc-shared-config/etc</value>
+                                        </sysproperty>
+
+                                        <systemProperty>
+                                            <key>AJSC_SERVICE_NAMESPACE</key>
+                                            <value>${module.ajsc.namespace.name}</value>
+                                        </systemProperty>
+                                        <systemProperty>
+                                            <key>AJSC_SERVICE_VERSION</key>
+                                            <value>${module.ajsc.namespace.version}</value>
+                                        </systemProperty>
+                                        <systemProperty>
+                                            <key>SOACLOUD_SERVICE_VERSION</key>
+                                            <value>${project.version}</value>
+                                        </systemProperty>
+                                        <systemProperty>
+                                            <key>server.port</key>
+                                            <value>${serverPort}</value>
+                                        </systemProperty>
+                                    </systemProperties>
+
+                                    <!-- Command Line Arguments to add to the java command. Here, you
+                                        can specify the port as well as the Context you want your service to run
+                                        in. Use context=/ to run in an unnamed Context (Root Context). The default
+                                        configuration of the AJSC is to run under the / Context. Setting the port
+                                        here can aid during the development phase of your service. However, you can
+                                        leave this argument out entirely, and the AJSC will default to using an Ephemeral
+                                        port. -->
+                                    <arguments>
+                                        <argument>context=//</argument>
+                                        <argument>port=${serverPort}</argument>
+                                        <argument>sslport=${sslport}</argument>
+                                    </arguments>
+                                </configuration>
+                            </execution>
+                        </executions>
+                        <configuration>
+                            <executable>java</executable>
+                        </configuration>
+                        <dependencies>
+                            <dependency>
+                                <groupId>com.att.ajsc</groupId>
+                                <artifactId>ajsc-runner</artifactId>
+                                <version>${ajscRuntimeVersion}</version>
+                            </dependency>
+                        </dependencies>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+    </profiles>
+    <dependencyManagement>
+        <dependencies>
+            <dependency>
+                <groupId>ch.qos.logback</groupId>
+                <artifactId>logback-classic</artifactId>
+                <version>1.1.1</version>
+            </dependency>
+            <dependency>
+                <groupId>com.fasterxml.jackson.core</groupId>
+                <artifactId>jackson-core</artifactId>
+                <version>2.7.8</version>
+            </dependency>
+        </dependencies>
+    </dependencyManagement>
+
+    <build>
+        <plugins>
+            <!-- Checkstyle plugin - used to report on compliance with -->
+            <!-- the Google style guide. -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-site-plugin</artifactId>
+                <version>3.3</version>
+                <configuration>
+                    <reportPlugins>
+                        <plugin>
+                            <groupId>org.apache.maven.plugins</groupId>
+                            <artifactId>maven-checkstyle-plugin</artifactId>
+                            <version>2.17</version>
+                            <reportSets>
+                                <reportSet>
+                                    <reports>
+                                        <report>checkstyle</report>
+                                    </reports>
+                                </reportSet>
+                            </reportSets>
+                        </plugin>
+                    </reportPlugins>
+                </configuration>
+            </plugin>
+            <!-- This plugin overrides the compiler settings to use java 1.8 -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>3.1</version>
+                <configuration>
+                    <compilerId>groovy-eclipse-compiler</compilerId>
+                    <verbose>true</verbose>
+                    <source>1.8</source>
+                    <target>1.8</target>
+                </configuration>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.codehaus.groovy</groupId>
+                        <artifactId>groovy-eclipse-compiler</artifactId>
+                        <version>2.9.0-01</version>
+                    </dependency>
+                    <dependency>
+                        <groupId>org.codehaus.groovy</groupId>
+                        <artifactId>groovy-eclipse-batch</artifactId>
+                        <version>2.3.4-01</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+
+            <!-- This plugin is used to generate Java POJO's from json format schema
+                file. -->
+            <plugin>
+                <groupId>org.jsonschema2pojo</groupId>
+                <artifactId>jsonschema2pojo-maven-plugin</artifactId>
+                <version>0.4.26</version>
+                <configuration>
+                    <addCompileSourceRoot>true</addCompileSourceRoot>
+                    <sourceDirectory>${basedir}/src/main/resources/json/schema</sourceDirectory>
+                    <outputDirectory>${generatedSourceDir}</outputDirectory>
+                    <targetPackage>org.openecomp.sa.rest</targetPackage>
+                </configuration>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>generate</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-resources-plugin</artifactId>
+                <version>2.7</version>
+                <executions>
+                    <execution>
+                        <id>copy-docker-file</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>copy-resources</goal>
+                        </goals>
+                        <configuration>
+                            <outputDirectory>target</outputDirectory>
+                            <overwrite>true</overwrite>
+                            <resources>
+                                <resource>
+                                    <directory>${basedir}/src/main/docker</directory>
+                                    <filtering>true</filtering>
+                                    <includes>
+                                        <include>**/*</include>
+                                    </includes>
+                                </resource>
+                                <resource>
+                                    <directory>${basedir}/src/main/bin/</directory>
+                                </resource>
+                            </resources>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <!-- license plugin -->
+            <plugin>
+                <groupId>com.mycila</groupId>
+                <artifactId>license-maven-plugin</artifactId>
+                <version>3.0</version>
+                <configuration>
+                    <header>License.txt</header>
+                    <includes>
+                        <include>src/main/java/**</include>
+                        <include>src/test/java/**</include>
+                    </includes>
+                </configuration>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>format</goal>
+                        </goals>
+                        <phase>process-sources</phase>
+                    </execution>
+                </executions>
+            </plugin>
+            <!-- This plugin adds the generated sources directory to the clean lifecycle
+                so that automatically generated code will get cleaned up properly. -->
+            <plugin>
+                <artifactId>maven-clean-plugin</artifactId>
+                <version>3.0.0</version>
+                <configuration>
+                    <filesets>
+                        <fileset>
+                            <directory>${generatedSourceDir}</directory>
+                        </fileset>
+                    </filesets>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+</project>
diff --git a/services/README.txt b/services/README.txt
new file mode 100644 (file)
index 0000000..c20898d
--- /dev/null
@@ -0,0 +1,8 @@
+Place any Deployment Packages (zips) in this folder to be deployed with your service.
+This can be used for importing older Nimbus deployment packages to the AJSC. Not all
+prior Nimbus services are available and therefore, not ALL former Nimbus deployment
+packages will convert directly to AJSC. However, for Service Development, you may create
+other Services (Deployment Packages) as separate services and test by themselves. Then, 
+simply take the created zip (deployment package) of the service and place in THIS services folder for it
+to be deployed within this AJSC Container.  This folder will be copied to the ultimate AJSC_HOME/services
+folder from which all services are deployed.
\ No newline at end of file
diff --git a/src/main/ajsc/search-data-service_v1/search-data-service/v1/conf/jaxrsBeans.groovy b/src/main/ajsc/search-data-service_v1/search-data-service/v1/conf/jaxrsBeans.groovy
new file mode 100644 (file)
index 0000000..24ec0da
--- /dev/null
@@ -0,0 +1,14 @@
+beans {
+    xmlns cxf: "http://camel.apache.org/schema/cxf"
+    xmlns jaxrs: "http://cxf.apache.org/jaxrs"
+    xmlns util: "http://www.springframework.org/schema/util"
+
+    echoService(org.openecomp.sa.searchdbabstraction.JaxrsEchoService)
+    userService(org.openecomp.sa.searchdbabstraction.JaxrsUserService)
+    searchService(org.openecomp.sa.searchdbabstraction.service.SearchService)
+
+    util.list(id: 'jaxrsServices') {
+        ref(bean: 'echoService')
+        ref(bean: 'userService')
+    }
+}
diff --git a/src/main/ajsc/search-data-service_v1/search-data-service/v1/conf/searchBeans.groovy b/src/main/ajsc/search-data-service_v1/search-data-service/v1/conf/searchBeans.groovy
new file mode 100644 (file)
index 0000000..dddedfa
--- /dev/null
@@ -0,0 +1,13 @@
+import org.openecomp.sa.rest.SearchServiceApi
+
+beans {
+    xmlns cxf: "http://camel.apache.org/schema/cxf"
+    xmlns jaxrs: "http://cxf.apache.org/jaxrs"
+    xmlns util: "http://www.springframework.org/schema/util"
+
+    searchServiceAPI(SearchServiceApi)
+
+    util.list(id: 'searchServices') {
+        ref(bean: 'searchServiceAPI')
+    }
+}
\ No newline at end of file
diff --git a/src/main/ajsc/search-data-service_v1/search-data-service/v1/docs/README.txt b/src/main/ajsc/search-data-service_v1/search-data-service/v1/docs/README.txt
new file mode 100644 (file)
index 0000000..3707179
--- /dev/null
@@ -0,0 +1 @@
+Place any docs here that you want to access within the ajsc upon deployment of your service.
diff --git a/src/main/ajsc/search-data-service_v1/search-data-service/v1/lib/README.txt b/src/main/ajsc/search-data-service_v1/search-data-service/v1/lib/README.txt
new file mode 100644 (file)
index 0000000..639e21b
--- /dev/null
@@ -0,0 +1 @@
+3rd party JAR's needed by your jars (if any) for a ajsc deployment package go here...
\ No newline at end of file
diff --git a/src/main/ajsc/search-data-service_v1/search-data-service/v1/props/module.props b/src/main/ajsc/search-data-service_v1/search-data-service/v1/props/module.props
new file mode 100644 (file)
index 0000000..17ebc08
--- /dev/null
@@ -0,0 +1 @@
+EXAMPLE.PROPERTY=EXAMLE_VALUE
\ No newline at end of file
diff --git a/src/main/ajsc/search-data-service_v1/search-data-service/v1/routes/errorMessage.route b/src/main/ajsc/search-data-service_v1/search-data-service/v1/routes/errorMessage.route
new file mode 100644 (file)
index 0000000..7c08576
--- /dev/null
@@ -0,0 +1,4 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+    <from uri="servlet:/__module_ajsc_namespace_name__/__module_ajsc_namespace_version__/errorMessageLookupService2"/>
+    <to uri="bean:errorMessageLookupService?method=getExceptionDetails"/>
+</route>
\ No newline at end of file
diff --git a/src/main/ajsc/search-data-service_v1/search-data-service/v1/routes/jaxrsExample.route b/src/main/ajsc/search-data-service_v1/search-data-service/v1/routes/jaxrsExample.route
new file mode 100644 (file)
index 0000000..367812e
--- /dev/null
@@ -0,0 +1,4 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+  <from uri="servlet:///__module_ajsc_namespace_name__/__module_ajsc_namespace_version__/jaxrsExample?matchOnUriPrefix=true" />
+  <to uri="cxfbean:jaxrsServices" />
+</route>
\ No newline at end of file
diff --git a/src/main/ajsc/search-data-service_v1/search-data-service/v1/routes/searchEngine.route b/src/main/ajsc/search-data-service_v1/search-data-service/v1/routes/searchEngine.route
new file mode 100644 (file)
index 0000000..ef8e950
--- /dev/null
@@ -0,0 +1,4 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+  <from uri="servlet:///__module_ajsc_namespace_name__/__module_ajsc_namespace_version__/search?matchOnUriPrefix=true" />
+  <to uri="cxfbean:searchServices" />
+</route>
\ No newline at end of file
diff --git a/src/main/assemble/ajsc_module_assembly.xml b/src/main/assemble/ajsc_module_assembly.xml
new file mode 100644 (file)
index 0000000..8b5a5ec
--- /dev/null
@@ -0,0 +1,69 @@
+<!-- 
+ Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+-->
+<assembly
+        xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+        xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+    <id>${version}</id>
+    <includeBaseDirectory>false</includeBaseDirectory>
+    <formats>
+        <format>zip</format>
+    </formats>
+    <fileSets>
+        <fileSet>
+            <directory>${project.basedir}/target/versioned-ajsc/routes/</directory>
+            <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/routes/</outputDirectory>
+            <includes>
+                <include>*.route</include>
+            </includes>
+
+        </fileSet>
+
+        <fileSet>
+            <directory>${project.basedir}/target/versioned-ajsc/docs/</directory>
+            <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/docs/</outputDirectory>
+            <includes>
+                <include>*.*</include>
+                <!-- <include>*.vm</include>  -->
+            </includes>
+
+        </fileSet>
+
+        <fileSet>
+            <directory>${project.basedir}/target/versioned-ajsc/lib/</directory>
+            <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/lib/</outputDirectory>
+            <includes>
+                <include>*.jar</include>
+            </includes>
+
+        </fileSet>
+        <fileSet>
+            <directory>${project.basedir}/target/versioned-ajsc/extJars/</directory>
+            <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/extJars/</outputDirectory>
+            <includes>
+                <include>*.jar</include>
+            </includes>
+        </fileSet>
+
+        <!-- also try to grab outputs from the "jar" plugin's package phase -->
+        <fileSet>
+            <directory>${project.basedir}/target/</directory>
+            <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/lib/</outputDirectory>
+            <includes>
+                <include>*.jar</include>
+            </includes>
+        </fileSet>
+
+        <fileSet>
+            <directory>${project.basedir}/target/versioned-ajsc/conf/</directory>
+            <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/conf/</outputDirectory>
+            <includes>
+                <include>*.*</include>
+            </includes>
+
+        </fileSet>
+    </fileSets>
+
+</assembly>
+
diff --git a/src/main/assemble/ajsc_props_assembly.xml b/src/main/assemble/ajsc_props_assembly.xml
new file mode 100644 (file)
index 0000000..5e4bcba
--- /dev/null
@@ -0,0 +1,26 @@
+<!-- 
+ Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+-->
+<assembly
+        xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+        xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+    <id>${version}_properties</id>
+    <includeBaseDirectory>false</includeBaseDirectory>
+    <formats>
+        <format>zip</format>
+    </formats>
+    <fileSets>
+        <fileSet>
+            <directory>${project.basedir}/target/versioned-ajsc/props</directory>
+            <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/props/</outputDirectory>
+            <includes>
+                <include>*.props</include>
+            </includes>
+
+        </fileSet>
+
+    </fileSets>
+
+</assembly>
+
diff --git a/src/main/assemble/ajsc_runtime_assembly.xml b/src/main/assemble/ajsc_runtime_assembly.xml
new file mode 100644 (file)
index 0000000..f280fb2
--- /dev/null
@@ -0,0 +1,50 @@
+<!-- 
+ Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+-->
+<assembly
+        xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+        xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+    <id>runtimeEnvironment</id>
+    <includeBaseDirectory>false</includeBaseDirectory>
+    <formats>
+        <format>zip</format>
+    </formats>
+    <fileSets>
+        <fileSet>
+            <directory>${project.basedir}/target/versioned-runtime/context/</directory>
+            <outputDirectory>runtime/context/</outputDirectory>
+            <includes>
+                <include>*.context</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>${project.basedir}/target/versioned-runtime/serviceProperties/</directory>
+            <outputDirectory>runtime/serviceProperties/</outputDirectory>
+            <includes>
+                <include>*.props</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>${project.basedir}/target/versioned-runtime/shiroRole</directory>
+            <outputDirectory>runtime/shiroRole/</outputDirectory>
+            <includes>
+                <include>*.json</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>${project.basedir}/target/versioned-runtime/shiroUser</directory>
+            <outputDirectory>runtime/shiroUser/</outputDirectory>
+            <includes>
+                <include>*.json</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>${project.basedir}/target/versioned-runtime/shiroUserRole</directory>
+            <outputDirectory>runtime/shiroUserRole</outputDirectory>
+            <includes>
+                <include>*.json</include>
+            </includes>
+        </fileSet>
+    </fileSets>
+</assembly>
\ No newline at end of file
diff --git a/src/main/bin/start.sh b/src/main/bin/start.sh
new file mode 100644 (file)
index 0000000..ea3f8bd
--- /dev/null
@@ -0,0 +1,40 @@
+#!/bin/sh
+
+BASEDIR="/opt/app/search-data-service/"
+AJSC_HOME="$BASEDIR"
+AJSC_CONF_HOME="$BASEDIR/bundleconfig/"
+
+if [ -z "$CONFIG_HOME" ]; then
+       echo "CONFIG_HOME must be set in order to start up process"
+       exit 1
+fi
+
+if [ -z "$KEY_STORE_PASSWORD" ]; then
+       echo "KEY_STORE_PASSWORD must be set in order to start up process"
+       exit 1
+else
+       echo "KEY_STORE_PASSWORD=$KEY_STORE_PASSWORD\n" >> $AJSC_CONF_HOME/etc/sysprops/sys-props.properties
+fi
+
+if [ -z "$KEY_MANAGER_PASSWORD" ]; then
+       echo "KEY_MANAGER_PASSWORD must be set in order to start up process"
+       exit 1
+else
+       echo "KEY_MANAGER_PASSWORD=$KEY_MANAGER_PASSWORD\n" >> $AJSC_CONF_HOME/etc/sysprops/sys-props.properties
+fi
+
+CLASSPATH="$AJSC_HOME/lib/*"
+CLASSPATH="$CLASSPATH:$AJSC_HOME/extJars/"
+CLASSPATH="$CLASSPATH:$AJSC_HOME/etc/"
+PROPS="-DAJSC_HOME=$AJSC_HOME"
+PROPS="$PROPS -DAJSC_CONF_HOME=$BASEDIR/bundleconfig/"
+PROPS="$PROPS -Dlogback.configurationFile=$BASEDIR/bundleconfig/etc/logback.xml"
+PROPS="$PROPS -DAJSC_SHARED_CONFIG=$AJSC_CONF_HOME"
+PROPS="$PROPS -DAJSC_SERVICE_NAMESPACE=search-data-service"
+PROPS="$PROPS -DAJSC_SERVICE_VERSION=v1"
+PROPS="$PROPS -Dserver.port=9509"
+PROPS="$PROPS -DCONFIG_HOME=$CONFIG_HOME"
+
+echo $CLASSPATH
+
+java -Xms1024m -Xmx4096m -XX:PermSize=2024m $PROPS -classpath $CLASSPATH com.att.ajsc.runner.Runner context=// sslport=9509
diff --git a/src/main/config/ajsc-chef.jks b/src/main/config/ajsc-chef.jks
new file mode 100644 (file)
index 0000000..aeca770
Binary files /dev/null and b/src/main/config/ajsc-chef.jks differ
diff --git a/src/main/config/ajsc-jetty.xml b/src/main/config/ajsc-jetty.xml
new file mode 100644 (file)
index 0000000..6f1821f
--- /dev/null
@@ -0,0 +1,133 @@
+<?xml version="1.0"  encoding="UTF-8"?>
+<!DOCTYPE Configure PUBLIC "-//Jetty//Configure//EN" "http://www.eclipse.org/jetty/configure_9_0.dtd">
+<Configure id="ajsc-server" class="org.eclipse.jetty.server.Server">
+    <!-- DO NOT REMOVE!!!! This is setting up the AJSC Context -->
+    <New id="ajscContext" class="org.eclipse.jetty.webapp.WebAppContext">
+        <Set name="contextPath">
+            <SystemProperty name="AJSC_CONTEXT_PATH"/>
+        </Set>
+        <Set name="extractWAR">true</Set>
+        <Set name="tempDirectory">
+            <SystemProperty name="AJSC_TEMP_DIR"/>
+        </Set>
+        <Set name="war">
+            <SystemProperty name="AJSC_WAR_PATH"/>
+        </Set>
+        <Set name="descriptor"><SystemProperty name="AJSC_HOME"/>/etc/runner-web.xml
+        </Set>
+        <Set name="overrideDescriptor"><SystemProperty name="AJSC_HOME"/>/etc/ajsc-override-web.xml
+        </Set>
+        <Set name="throwUnavailableOnStartupException">true</Set>
+        <Set name="servletHandler">
+            <New class="org.eclipse.jetty.servlet.ServletHandler">
+                <Set name="startWithUnavailable">false</Set>
+            </New>
+        </Set>
+    </New>
+
+    <Set name="handler">
+        <New id="Contexts" class="org.eclipse.jetty.server.handler.ContextHandlerCollection">
+            <Set name="Handlers">
+                <Array type="org.eclipse.jetty.webapp.WebAppContext">
+                    <Item>
+                        <Ref refid="ajscContext"/>
+                    </Item>
+                </Array>
+            </Set>
+        </New>
+    </Set>
+
+    <Call name="addBean">
+        <Arg>
+            <New id="DeploymentManager" class="org.eclipse.jetty.deploy.DeploymentManager">
+                <Set name="contexts">
+                    <Ref refid="Contexts"/>
+                </Set>
+                <Call id="extAppHotDeployProvider" name="addAppProvider">
+                    <Arg>
+                        <New class="org.eclipse.jetty.deploy.providers.WebAppProvider">
+                            <Set name="monitoredDirName"><SystemProperty name="AJSC_HOME"/>/extApps
+                            </Set>
+                            <Set name="scanInterval">10</Set>
+                            <Set name="extractWars">true</Set>
+                        </New>
+                    </Arg>
+                </Call>
+            </New>
+        </Arg>
+    </Call>
+
+    <New id="sslContextFactory" class="org.eclipse.jetty.util.ssl.SslContextFactory">
+        <Set name="keyStorePath">file:<SystemProperty name="CONFIG_HOME"/>/auth/tomcat_keystore
+        </Set>
+        <Set name="KeyStorePassword">
+            <Call class="org.eclipse.jetty.util.security.Password" name="deobfuscate">
+                <Arg>
+                    <SystemProperty name="KEY_STORE_PASSWORD"/>
+                </Arg>
+            </Call>
+        </Set>
+        <Set name="KeyManagerPassword">
+            <Call class="org.eclipse.jetty.util.security.Password" name="deobfuscate">
+                <Arg>
+                    <SystemProperty name="KEY_MANAGER_PASSWORD"/>
+                </Arg>
+            </Call>
+        </Set>
+        <Set name="needClientAuth">true</Set>
+        <Set name="wantClientAuth">true</Set>
+    </New>
+
+    <Call id="sslConnector" name="addConnector">
+        <Arg>
+            <New class="org.eclipse.jetty.server.ServerConnector">
+                <Arg name="server">
+                    <Ref refid="ajsc-server"/>
+                </Arg>
+                <Arg name="factories">
+                    <Array type="org.eclipse.jetty.server.ConnectionFactory">
+                        <Item>
+                            <New class="org.eclipse.jetty.server.SslConnectionFactory">
+                                <Arg name="next">http/1.1</Arg>
+                                <Arg name="sslContextFactory">
+                                    <Ref refid="sslContextFactory"/>
+                                </Arg>
+                            </New>
+                        </Item>
+                        <Item>
+                            <New class="org.eclipse.jetty.server.HttpConnectionFactory">
+                                <Arg name="config">
+                                    <New class="org.eclipse.jetty.server.HttpConfiguration">
+                                        <Call name="addCustomizer">
+                                            <Arg>
+                                                <New class="org.eclipse.jetty.server.SecureRequestCustomizer"/>
+                                            </Arg>
+                                        </Call>
+                                    </New>
+                                </Arg>
+                            </New>
+                        </Item>
+                    </Array>
+                </Arg>
+                <Set name="port">
+                    <SystemProperty name="AJSC_HTTPS_PORT" default="9509"/>
+                </Set>
+                <Set name="idleTimeout">30000</Set>
+            </New>
+        </Arg>
+    </Call>
+
+    <Get name="ThreadPool">
+        <Set name="minThreads">
+            <SystemProperty name="AJSC_JETTY_ThreadCount_MIN"/>
+        </Set>
+        <Set name="maxThreads">
+            <SystemProperty name="AJSC_JETTY_ThreadCount_MAX"/>
+        </Set>
+        <Set name="idleTimeout">
+            <SystemProperty name="AJSC_JETTY_IDLETIME_MAX"/>
+        </Set>
+        <Set name="detailedDump">false</Set>
+    </Get>
+
+</Configure>
\ No newline at end of file
diff --git a/src/main/config/ajsc-override-web.xml b/src/main/config/ajsc-override-web.xml
new file mode 100644 (file)
index 0000000..1bffe69
--- /dev/null
@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!-- 
+ Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+-->
+<web-app xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
+         metadata-complete="false" version="3.0">
+
+    <filter-mapping>
+        <filter-name>InterceptorFilter</filter-name>
+        <url-pattern>/services/*</url-pattern>
+    </filter-mapping>
+    <filter-mapping>
+        <filter-name>InterceptorFilter</filter-name>
+        <url-pattern>/rest/*</url-pattern>
+    </filter-mapping>
+
+    <filter-mapping>
+        <filter-name>springSecurityFilterChain</filter-name>
+        <url-pattern>/*</url-pattern>
+    </filter-mapping>
+
+    <servlet-mapping>
+        <servlet-name>ManagementServlet</servlet-name>
+        <url-pattern>/mgmt</url-pattern>
+    </servlet-mapping>
+
+    <servlet-mapping>
+        <servlet-name>RestletServlet</servlet-name>
+        <url-pattern>/rest/*</url-pattern>
+    </servlet-mapping>
+
+    <servlet-mapping>
+        <servlet-name>CamelServlet</servlet-name>
+        <url-pattern>/services/*</url-pattern>
+    </servlet-mapping>
+
+    <servlet-mapping>
+        <servlet-name>jsp</servlet-name>
+        <url-pattern>*.jsp</url-pattern>
+        <url-pattern>*.jspf</url-pattern>
+        <url-pattern>*.jspx</url-pattern>
+        <url-pattern>*.xsp</url-pattern>
+        <url-pattern>*.JSP</url-pattern>
+        <url-pattern>*.JSPF</url-pattern>
+        <url-pattern>*.JSPX</url-pattern>
+        <url-pattern>*.XSP</url-pattern>
+    </servlet-mapping>
+    <servlet-mapping>
+        <servlet-name>default</servlet-name>
+        <url-pattern>/*</url-pattern>
+    </servlet-mapping>
+</web-app>
\ No newline at end of file
diff --git a/src/main/config/ajscJetty.jks b/src/main/config/ajscJetty.jks
new file mode 100644 (file)
index 0000000..48cdbff
Binary files /dev/null and b/src/main/config/ajscJetty.jks differ
diff --git a/src/main/config/jul-redirect.properties b/src/main/config/jul-redirect.properties
new file mode 100644 (file)
index 0000000..c3b949a
--- /dev/null
@@ -0,0 +1,11 @@
+#      Bridge JUL->slf4j Logging Configuration File
+#
+# This file bridges the JUL logging infrastructure into
+# SLF4J so JUL logs go to logback implementation provided
+# in this project.  SLF4J also captures log4j and has 
+# other framework options as well providing a common
+# logging infrastructure for capturing all logs from different
+# libraries using different frameworks in one place.
+#      Global properties
+handlers=org.slf4j.bridge.SLF4JBridgeHandler
+.level=ALL
diff --git a/src/main/config/keyfile b/src/main/config/keyfile
new file mode 100644 (file)
index 0000000..882e86a
--- /dev/null
@@ -0,0 +1,27 @@
+ZuIwp0TkyVPDeX1Up-8JtkMWvjsCpoiu1_VKeWrtrvxunvAke8_tiFyHPPyb2nkhepFYj6tXzpfS
+rGz5XF_TH9NbsKaP8u0HV5clz2WriYQRvHS85vjY7hXxkpFuLb7zkLAPqTyIDpj7FiW61NzsRUAq
+TM8jH16jr7mBNnb56w24mNGOwznMPcIZKcjgZU1ekaPDFpWyhQElU7Y0q_94P_Gkk45r66Hj22sU
+OiOaaftmudZlswLw8-8Zaakqf2yW9HjMVfuYCwSodBHCW5rdB3Ctb5W36rnD_AQco3Ky2PgPmqvk
+QkJYuUHpbuDqVHqLOajlKSIGMTIqAIBg51fRaaONtD-Q5xzY8E5wO1YWTLKcP5tsNvUpzM8Wu3NS
+ynpGpUcvlTqWWsGzTbzOyamyKkdNdx97sSqjM25Zh1-ps48h6cddGYWpab7SUvqRCS11QBUyLTry
+2iwTEHMhHRIbo7PO99ALQfuq9gI1zKGfurJdvLBeBaFs5SCF0AiCZ3WcDO8Rv3HpxVZ2_ShbDxb0
+eMoO6SotXu51fj8Y3-WqsfZziQyEsHyqpg5uQ6yUtz01h5YHLEoVuotF1U4agmQR6kEkYk-wNOiZ
+v-8gaA9gtbLoAdKhuKFxQgQLNMf6GzVzZNujbmDzLoZAP_mXAv29aBPaf64Ugzv-Oa5GZdBgD-Xd
+_pahML-ionw99r0TnkpShYmDqMKhMdjaP3m87WIAZkIB-L-VTyKcEsJ4340VSzCOsv3waiM0S89u
+4cMcG5y-PLY8IoipIlLUPTWD3SjcQ9DV1Dt3T5KjdWLsj48D3W4K4e9PB8yxs0gtUjgVUR2_xEir
+G5eDO9Ac1eHFWGDFFP0SgG-TbHJUKlvy9mwLzmU0fC3xPjhqmIr-v0HxF7HN-tmb1LHDorno8tSN
+u7kUGcKSchIiFfvkd066crUb2mH7PnXTaWmAjyVj9VsBExFUYEdpHMAV4sAP9-RxZGDRt46UhrDK
+QZvvNhBVyOEjHPHWI4vl1r1v8HNH1_2jZu5DVJWyHWR56aCo1lhFH9_X6UAHUHbnXViDONZOVXlT
+9-WD0tk2zJGuwrhdZDAnPnAmjfwbwbpnr5Hmex1i1JiD7WVyP1kbfoej2TmdiYbxr9oBYaGQ29JI
+aHod7MQCLtvL1z5XgnDPLZ4y3_9SbqHKYbNa8UgZkTLF5EacGThYVFDLA9cbafHDtR1kMGE3vv4D
+EJ-0pAYTOGmKlVI7DwNyKsY9JTyudrxTqhOxi9jgcJNWiUaNe9yhL8Pyc2YBqUTTYhh_a2d1rvkZ
+0Gh1crviVxqBrIkRKaMRXZ4f1vDLz-3NvG_vwPOo8WRFo5nGmSdTw7CjBaigJ_cYCfDhoP11pEnw
+cndsZNcHs-v05LlxeIIMDD_f5Bvz-il_DLA4eK2HqgLdxh8ziSDl2azk14MJY4amzz6reEXUuKLV
+RsZGf_jbDGKhE2HuDQ5ovoLOi4OqE1oRuqh-dGxitrYouP2SN1l_1tCEMRth86FMV-6AQtZsvdUo
+y9MtQ7e35atjA8nHtgADlDTmJBKQiUHUsOZ77p1qp17HAFMovUkc739opfEYnKUn6Itpw5Ipm_Is
+ra6chJUfMpOFof5rb5OjqFAN27c_-mPo1lQU3ndYlKGh_n5V8ufX6v2Yri8WzOPf6hjVYotkmoMP
+NPAICDCB8W5ddBjsopzLVVEtaXDu9Qj6-zf77hT4iQ7rBd2Ner8iLqN3Kis0dvkNM3_uH8onau1G
+Y_YYw7PPSZyd2S_7Dd6G-IG4ayO6e5DD6oUwwekyiQI_3rTXNa_wldGxqW9u818010ekE4Qdlfcj
+beIn7fAeaOjReZ87hRgWyMs-EgTVHw8RL3yI_O6VvRTVRONRF1Y4C_-IYa8z-bfrwXx3BBd9TTgb
+EnS9wVOyC2OgUN6BhPLGLhxzkJ05nEjizXEc9t5EPYoSRwesajGGrrG_0-qWbuU5hKLPLkyeJLHb
+5HXOTVsrUR59Vov2M3_EswkxcImblox3k3VS2yihZMGyfqLzZIUXgd8ufkevKKU6DxwacGTb
\ No newline at end of file
diff --git a/src/main/config/runner-web.xml b/src/main/config/runner-web.xml
new file mode 100644 (file)
index 0000000..5e62081
--- /dev/null
@@ -0,0 +1,94 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!-- 
+ Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+-->
+<web-app xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
+         metadata-complete="false" version="3.0">
+
+    <context-param>
+        <param-name>contextConfigLocation</param-name>
+        <param-value>/WEB-INF/spring-servlet.xml,
+            classpath:applicationContext.xml
+        </param-value>
+    </context-param>
+
+    <context-param>
+        <param-name>spring.profiles.default</param-name>
+        <param-value>nooauth</param-value>
+    </context-param>
+
+    <listener>
+        <listener-class>org.springframework.web.context.ContextLoaderListener</listener-class>
+    </listener>
+
+    <servlet>
+        <servlet-name>ManagementServlet</servlet-name>
+        <servlet-class>ajsc.ManagementServlet</servlet-class>
+    </servlet>
+
+
+    <filter>
+        <filter-name>InterceptorFilter</filter-name>
+        <filter-class>ajsc.filters.InterceptorFilter</filter-class>
+        <init-param>
+            <param-name>preProcessor_interceptor_config_file</param-name>
+            <param-value>/etc/PreProcessorInterceptors.properties</param-value>
+        </init-param>
+        <init-param>
+            <param-name>postProcessor_interceptor_config_file</param-name>
+            <param-value>/etc/PostProcessorInterceptors.properties</param-value>
+        </init-param>
+
+    </filter>
+
+    <servlet>
+        <servlet-name>RestletServlet</servlet-name>
+        <servlet-class>ajsc.restlet.RestletSpringServlet</servlet-class>
+        <init-param>
+            <param-name>org.restlet.component</param-name>
+            <param-value>restletComponent</param-value>
+        </init-param>
+    </servlet>
+
+    <servlet>
+        <servlet-name>CamelServlet</servlet-name>
+        <servlet-class>ajsc.servlet.AjscCamelServlet</servlet-class>
+    </servlet>
+
+
+    <filter>
+        <filter-name>springSecurityFilterChain</filter-name>
+        <filter-class>org.springframework.web.filter.DelegatingFilterProxy</filter-class>
+    </filter>
+
+    <servlet>
+        <servlet-name>spring</servlet-name>
+        <servlet-class>org.springframework.web.servlet.DispatcherServlet</servlet-class>
+        <load-on-startup>1</load-on-startup>
+    </servlet>
+
+    <!--       <servlet-mapping>
+            <servlet-name>spring</servlet-name>
+            <url-pattern>/</url-pattern>
+        </servlet-mapping>-->
+
+    <!-- BEGIN jsp -->
+
+    <servlet id="jsp">
+        <servlet-name>jsp</servlet-name>
+        <servlet-class>org.apache.jasper.servlet.JspServlet</servlet-class>
+    </servlet>
+
+
+    <!-- BEGIN static content -->
+    <servlet>
+        <servlet-name>default</servlet-name>
+        <servlet-class>org.eclipse.jetty.servlet.DefaultServlet</servlet-class>
+        <init-param>
+            <param-name>dirAllowed</param-name>
+            <param-value>true</param-value>
+        </init-param>
+    </servlet>
+    <!-- END static content -->
+</web-app>
diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile
new file mode 100644 (file)
index 0000000..6182c35
--- /dev/null
@@ -0,0 +1,26 @@
+FROM ubuntu:14.04
+
+ARG MICRO_HOME=/opt/app/search-data-service
+ARG BIN_HOME=$MICRO_HOME/bin
+
+RUN apt-get update
+
+# Install and setup java8
+RUN apt-get update && apt-get install -y software-properties-common
+## sudo -E is required to preserve the environment. If you remove that line, it will most like freeze at this step
+RUN sudo -E add-apt-repository ppa:openjdk-r/ppa && apt-get update && apt-get install -y openjdk-8-jdk
+## Setup JAVA_HOME, this is useful for docker commandline
+ENV JAVA_HOME usr/lib/jvm/java-8-openjdk-amd64
+RUN export JAVA_HOME
+
+# Build up the deployment folder structure
+RUN mkdir -p $MICRO_HOME
+ADD swm/package/nix/dist_files/appl/search-data-service/* $MICRO_HOME/
+RUN mkdir -p $BIN_HOME
+COPY *.sh $BIN_HOME
+RUN chmod 755 $BIN_HOME/*
+RUN ln -s /logs $MICRO_HOME/logs
+
+EXPOSE 9509 9509
+
+CMD /opt/app/search-data-service/bin/start.sh
diff --git a/src/main/java/org/openecomp/sa/auth/SearchDbServiceAuth.java b/src/main/java/org/openecomp/sa/auth/SearchDbServiceAuth.java
new file mode 100644 (file)
index 0000000..3039cf6
--- /dev/null
@@ -0,0 +1,63 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.auth;
+
+import javax.ws.rs.core.Cookie;
+import javax.ws.rs.core.HttpHeaders;
+
+public class SearchDbServiceAuth {
+
+  public SearchDbServiceAuth() {
+  }
+
+  public boolean authBasic(String username, String authFunction) {
+    return SearchDbServiceAuthCore.authorize(username, authFunction);
+  }
+
+  public String authUser(HttpHeaders headers, String authUser, String authFunction) {
+
+
+    SearchDbServiceAuth aaiAuth = new SearchDbServiceAuth();
+
+    StringBuilder username = new StringBuilder();
+
+    username.append(authUser);
+    if (aaiAuth.authBasic(username.toString(), authFunction) == false) {
+      return "AAI_9101";
+
+    }
+    return "OK";
+  }
+
+  public boolean authCookie(Cookie cookie, String authFunction, StringBuilder username) {
+
+    // String result = "no value";
+    if (cookie == null) {
+      return false;
+    }
+
+    return SearchDbServiceAuthCore.authorize(username.toString(), authFunction);
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/auth/SearchDbServiceAuthCore.java b/src/main/java/org/openecomp/sa/auth/SearchDbServiceAuthCore.java
new file mode 100644 (file)
index 0000000..b0d823a
--- /dev/null
@@ -0,0 +1,256 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.auth;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.openecomp.cl.api.Logger;
+import org.openecomp.cl.eelf.LoggerFactory;
+import org.openecomp.sa.searchdbabstraction.util.SearchDbConstants;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Timer;
+
+public class SearchDbServiceAuthCore {
+
+  private static Logger logger = LoggerFactory.getInstance()
+      .getLogger(SearchDbServiceAuthCore.class.getName());
+
+  private static String GlobalAuthFileName = SearchDbConstants.SDB_AUTH_CONFIG_FILENAME;
+
+  private static enum HTTP_METHODS {
+    POST, GET, PUT, DELETE
+  }
+
+  ;
+
+  // Don't instantiate
+  private SearchDbServiceAuthCore() {
+  }
+
+  private static boolean usersInitialized = false;
+  private static HashMap<String, SearchDbAuthUser> users;
+  private static boolean timerSet = false;
+  private static Timer timer = null;
+
+  public synchronized static void init() {
+
+
+    SearchDbServiceAuthCore.getConfigFile();
+    SearchDbServiceAuthCore.reloadUsers();
+
+  }
+
+  public static void cleanup() {
+    timer.cancel();
+  }
+
+  public static String getConfigFile() {
+    if (GlobalAuthFileName == null) {
+      String nc = SearchDbConstants.SDB_AUTH_CONFIG_FILENAME;
+      if (nc == null) {
+        nc = "/home/aaiadmin/etc/aaipolicy.json";
+      }
+
+      GlobalAuthFileName = nc;
+    }
+    return GlobalAuthFileName;
+  }
+
+  public synchronized static void reloadUsers() {
+    users = new HashMap<String, SearchDbAuthUser>();
+
+
+    ObjectMapper mapper = new ObjectMapper(); // can reuse, share globally
+    JSONParser parser = new JSONParser();
+    try {
+      Object obj = parser.parse(new FileReader(GlobalAuthFileName));
+      // aailogger.debug(logline, "Reading from " + GlobalAuthFileName);
+      JsonNode rootNode = mapper.readTree(new File(GlobalAuthFileName));
+      JsonNode rolesNode = rootNode.path("roles");
+
+      for (JsonNode roleNode : rolesNode) {
+        String roleName = roleNode.path("name").asText();
+
+        TabularAuthRole authRole = new TabularAuthRole();
+        JsonNode usersNode = roleNode.path("users");
+        JsonNode functionsNode = roleNode.path("functions");
+        for (JsonNode functionNode : functionsNode) {
+          String function = functionNode.path("name").asText();
+          JsonNode methodsNode = functionNode.path("methods");
+          boolean hasMethods = false;
+          for (JsonNode methodNode : methodsNode) {
+            String methodName = methodNode.path("name").asText();
+            hasMethods = true;
+            String thisFunction = methodName + ":" + function;
+
+            authRole.addAllowedFunction(thisFunction);
+          }
+
+          if (hasMethods == false) {
+            // iterate the list from HTTP_METHODS
+            for (HTTP_METHODS meth : HTTP_METHODS.values()) {
+              String thisFunction = meth.toString() + ":" + function;
+
+              authRole.addAllowedFunction(thisFunction);
+            }
+          }
+
+        }
+        for (JsonNode userNode : usersNode) {
+          // make the user lower case
+          String username = userNode.path("username").asText().toLowerCase();
+          SearchDbAuthUser authUser = null;
+          if (users.containsKey(username)) {
+            authUser = users.get(username);
+          } else {
+            authUser = new SearchDbAuthUser();
+          }
+
+
+          authUser.setUser(username);
+          authUser.addRole(roleName, authRole);
+          users.put(username, authUser);
+        }
+      }
+    } catch (FileNotFoundException fnfe) {
+      logger.debug("Failed to load the policy file ");
+
+    } catch (ParseException e) {
+      logger.debug("Failed to Parse the policy file ");
+
+    } catch (JsonProcessingException e) {
+      logger.debug("JSON processing error while parsing policy file: " + e.getMessage());
+
+    } catch (IOException e) {
+      logger.debug("IO Exception while parsing policy file: " + e.getMessage());
+    }
+
+    usersInitialized = true;
+
+  }
+
+  public static class SearchDbAuthUser {
+    public SearchDbAuthUser() {
+      this.roles = new HashMap<String, TabularAuthRole>();
+    }
+
+    private String username;
+    private HashMap<String, TabularAuthRole> roles;
+
+    public String getUser() {
+      return this.username;
+    }
+
+    public HashMap<String, TabularAuthRole> getRoles() {
+      return this.roles;
+    }
+
+    public void addRole(String roleName, TabularAuthRole authRole) {
+      this.roles.put(roleName, authRole);
+    }
+
+    public boolean checkAllowed(String checkFunc) {
+      for (Map.Entry<String, TabularAuthRole> roleEntry : this.roles.entrySet()) {
+        TabularAuthRole role = roleEntry.getValue();
+        if (role.hasAllowedFunction(checkFunc)) {
+          // break out as soon as we find it
+          return true;
+        }
+      }
+      // we would have got positive confirmation had it been there
+      return false;
+    }
+
+    public void setUser(String myuser) {
+      this.username = myuser;
+    }
+
+  }
+
+  public static class TabularAuthRole {
+    public TabularAuthRole() {
+      this.allowedFunctions = new ArrayList<String>();
+    }
+
+    private List<String> allowedFunctions;
+
+    public void addAllowedFunction(String func) {
+      this.allowedFunctions.add(func);
+    }
+
+    public void delAllowedFunction(String delFunc) {
+      if (this.allowedFunctions.contains(delFunc)) {
+        this.allowedFunctions.remove(delFunc);
+      }
+    }
+
+    public boolean hasAllowedFunction(String afunc) {
+      if (this.allowedFunctions.contains(afunc)) {
+        return true;
+      } else {
+        return false;
+      }
+    }
+  }
+
+  public static HashMap<String, SearchDbAuthUser> getUsers(String key) {
+    if (!usersInitialized || (users == null)) {
+      reloadUsers();
+    }
+    return users;
+  }
+
+  public static boolean authorize(String username, String authFunction) {
+    // logline.init(component, transId, fromAppId, "authorize()");
+
+    if (!usersInitialized || (users == null)) {
+      init();
+    }
+    if (users.containsKey(username)) {
+      if (users.get(username).checkAllowed(authFunction) == true) {
+
+        return true;
+      } else {
+
+
+        return false;
+      }
+    } else {
+
+      return false;
+    }
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/rest/AnalysisConfiguration.java b/src/main/java/org/openecomp/sa/rest/AnalysisConfiguration.java
new file mode 100644 (file)
index 0000000..6218d9c
--- /dev/null
@@ -0,0 +1,243 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.openecomp.cl.api.Logger;
+import org.openecomp.cl.eelf.LoggerFactory;
+import org.openecomp.sa.rest.AnalyzerSchema;
+import org.openecomp.sa.rest.FilterSchema;
+import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs;
+import org.openecomp.sa.searchdbabstraction.util.SearchDbConstants;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ * This class encapsulates the configuration of the predefined
+ * Analyzer and Filter behaviours that help to tell the document
+ * store how to index the documents that are provided to it.
+ */
+public class AnalysisConfiguration {
+
+  /**
+   * Contains all of the predefined indexing filters.
+   */
+  private FilterSchema[] customFilters;
+
+  /**
+   * Contains all of the predefined indexing analyzers.
+   */
+  private AnalyzerSchema[] customAnalysers;
+
+  /**
+   * Indicates whether or not we have imported the filter and
+   * analyzer configurations.
+   */
+  private AtomicBoolean configured = new AtomicBoolean(false);
+
+  /**
+   * A json format string which is readable by Elastic Search and defines
+   * all of the custom filters and analyzers that we need Elastic Search
+   * to know about.
+   */
+  private static String esSettings = null;
+
+  private static Logger logger = LoggerFactory.getInstance()
+      .getLogger(AnalysisConfiguration.class.getName());
+
+
+  /**
+   * Imports the filter and analyzer configuration files and
+   * builds an Elastic Search readable settings file from the
+   * contents.
+   *
+   * @param filterConfigFile   - Location of filter configuration json file
+   * @param analyzerConfigFile - Location of analyzer configuration json file
+   */
+  public void init(String filterConfigFile, String analyzerConfigFile) {
+
+    if (configured.compareAndSet(false, true)) {
+      ObjectMapper mapper = new ObjectMapper();
+
+      File filtersConfig = new File(filterConfigFile);
+      try {
+        customFilters = mapper.readValue(filtersConfig, FilterSchema[].class);
+      } catch (IOException e) {
+
+        // generate log
+        logger.warn(SearchDbMsgs.FILTERS_CONFIG_FAILURE, filterConfigFile, e.getMessage());
+      }
+
+      File analysersConfig = new File(analyzerConfigFile);
+      try {
+        customAnalysers = mapper.readValue(analysersConfig, AnalyzerSchema[].class);
+      } catch (IOException e) {
+
+        // generate log
+        logger.warn(SearchDbMsgs.ANALYSYS_CONFIG_FAILURE, analyzerConfigFile, e.getMessage());
+      }
+
+      esSettings = buildEsIndexSettings();
+    }
+  }
+
+
+  /**
+   * Returns the set of pre-configured filters.
+   *
+   * @return - An array of filters.
+   */
+  public FilterSchema[] getFilters() {
+    return customFilters;
+  }
+
+
+  /**
+   * Returns the set of pre-configured analyzers.
+   *
+   * @return - An array of analyzers.
+   */
+  public AnalyzerSchema[] getAnalyzers() {
+    init(SearchDbConstants.SDB_FILTER_CONFIG_FILE, SearchDbConstants.SDB_ANALYSIS_CONFIG_FILE);
+    return customAnalysers;
+  }
+
+
+  /**
+   * Imports the filter and analyzer configurations and translates those
+   * into a settings string that will be parseable by Elastic Search.
+   *
+   * @return - Elastic Search formatted settings string.
+   */
+  public String getEsIndexSettings() {
+
+    // Generate the es-settings string from our filter and analyzer
+    // configurations if we have not already done so.
+    init(SearchDbConstants.SDB_FILTER_CONFIG_FILE, SearchDbConstants.SDB_ANALYSIS_CONFIG_FILE);
+
+    // Now, return the es-settings string.
+    return esSettings;
+  }
+
+
+  /**
+   * Constructs a settings string that is readable by Elastic Search based
+   * on the contents of the filter and analyzer configuration files.
+   *
+   * @return Elastic Search formatted settings string.
+   */
+  public String buildEsIndexSettings() {
+
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("{");
+    sb.append("\"analysis\": {");
+
+    // Define the custom filters.
+    boolean atLeastOneFilter = false;
+    sb.append("\"filter\": {");
+    AtomicBoolean firstFilter = new AtomicBoolean(true);
+    for (FilterSchema filter : customFilters) {
+
+      // Append a comma before the next entry, unless it is the
+      // first one.
+      if (!firstFilter.compareAndSet(true, false)) {
+        sb.append(", ");
+      }
+
+      // Now, build the filter entry.
+      buildFilterEntry(filter, sb);
+      atLeastOneFilter = true;
+    }
+    sb.append((atLeastOneFilter) ? "}," : "}");
+
+    // Define the custom analyzers.
+    sb.append("\"analyzer\": {");
+    AtomicBoolean firstAnalyzer = new AtomicBoolean(true);
+    for (AnalyzerSchema analyzer : customAnalysers) {
+
+      // Prepend a comma before the entry, unless it is the
+      // first one.
+      if (!firstAnalyzer.compareAndSet(true, false)) {
+        sb.append(",");
+      }
+
+      // Now, construct the entry for this analyzer.
+      buildAnalyzerEntry(analyzer, sb);
+    }
+    sb.append("}");
+
+    sb.append("}");
+    sb.append("}");
+
+    return sb.toString();
+  }
+
+
+  /**
+   * Constructs an ElasticSearch friendly custom filter definition.
+   *
+   * @param filter - The filter to generate ElasticSearch json for.
+   * @param sb     - The string builder to append the filter definition
+   *               to.
+   */
+  private void buildFilterEntry(FilterSchema filter, StringBuilder sb) {
+
+    sb.append("\"" + filter.getName()).append("\": {");
+
+    sb.append(filter.getConfiguration());
+
+    sb.append("}");
+  }
+
+
+  /**
+   * Constructs an ElasticSearch friendly custom analyzer definition.
+   *
+   * @param analyzer - The analyzer to generate ElasticSearch json for.
+   * @param sb       - The string builder to append the analyzer definition
+   *                 to.
+   */
+  private void buildAnalyzerEntry(AnalyzerSchema analyzer, StringBuilder sb) {
+
+    sb.append("\"").append(analyzer.getName()).append("\": {");
+    sb.append("\"type\": \"custom\",");
+    sb.append("\"tokenizer\": ").append("\"").append(analyzer.getTokenizer()).append("\",");
+    sb.append("\"filter\": [");
+    boolean firstFilter = true;
+    for (String filter : analyzer.getFilters()) {
+      if (!firstFilter) {
+        sb.append(",");
+      } else {
+        firstFilter = false;
+      }
+      sb.append("\"").append(filter).append("\"");
+    }
+    sb.append("]");
+    sb.append("}");
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/rest/AnalyzerApi.java b/src/main/java/org/openecomp/sa/rest/AnalyzerApi.java
new file mode 100644 (file)
index 0000000..430b7a6
--- /dev/null
@@ -0,0 +1,160 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import org.openecomp.cl.api.LogFields;
+import org.openecomp.cl.api.LogLine;
+import org.openecomp.cl.api.Logger;
+import org.openecomp.cl.eelf.LoggerFactory;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.ElasticSearchHttpController;
+import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs;
+
+import java.util.concurrent.atomic.AtomicBoolean;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+
+@Path("/analyzers")
+public class AnalyzerApi {
+
+  private SearchServiceApi searchService = null;
+
+  // Set up the loggers.
+  private static Logger logger = LoggerFactory.getInstance().getLogger(IndexApi.class.getName());
+  private static Logger auditLogger = LoggerFactory.getInstance()
+      .getAuditLogger(IndexApi.class.getName());
+
+  public AnalyzerApi(SearchServiceApi searchService) {
+    this.searchService = searchService;
+  }
+
+  @GET
+  public Response processGet(@Context HttpServletRequest request,
+                             @Context HttpHeaders headers,
+                             ApiUtils apiUtils) {
+
+    Response.Status responseCode = Response.Status.INTERNAL_SERVER_ERROR;
+    String responseString = "Undefined error";
+
+    // Initialize the MDC Context for logging purposes.
+    ApiUtils.initMdcContext(request, headers);
+
+    // Validate that the request is correctly authenticated before going
+    // any further.
+    try {
+
+      if (!searchService.validateRequest(headers, request,
+          ApiUtils.Action.GET, ApiUtils.SEARCH_AUTH_POLICY_NAME)) {
+        logger.warn(SearchDbMsgs.GET_ANALYZERS_FAILURE, "Authentication failure.");
+        return Response.status(Response.Status.FORBIDDEN).entity("Authentication failure.").build();
+      }
+
+    } catch (Exception e) {
+
+      logger.warn(SearchDbMsgs.GET_ANALYZERS_FAILURE,
+          "Unexpected authentication failure - cause: " + e.getMessage());
+      return Response.status(Response.Status.FORBIDDEN).entity("Authentication failure.").build();
+    }
+
+
+    // Now, build the list of analyzers.
+    try {
+      responseString = buildAnalyzerList(ElasticSearchHttpController.getInstance()
+          .getAnalysisConfig());
+      responseCode = Response.Status.OK;
+
+    } catch (Exception e) {
+
+      logger.warn(SearchDbMsgs.GET_ANALYZERS_FAILURE,
+          "Unexpected failure retrieving analysis configuration - cause: " + e.getMessage());
+      responseString = "Failed to retrieve analysis configuration.  Cause: " + e.getMessage();
+    }
+
+    // Build the HTTP response.
+    Response response = Response.status(responseCode).entity(responseString).build();
+
+    // Generate our audit log.
+    auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, responseCode.getStatusCode())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, responseCode.getStatusCode()),
+        (request != null) ? request.getMethod() : "Unknown",
+        (request != null) ? request.getRequestURL().toString() : "Unknown",
+        (request != null) ? request.getRemoteHost() : "Unknown",
+        Integer.toString(response.getStatus()));
+
+    // Clear the MDC context so that no other transaction inadvertently
+    // uses our transaction id.
+    ApiUtils.clearMdcContext();
+
+    return response;
+  }
+
+
+  /**
+   * This method takes a list of analyzer objects and generates a simple json
+   * structure to enumerate them.
+   *
+   * <p>Note, this includes only the aspects of the analyzer object that we want
+   * to make public to an external client.
+   *
+   * @param analysisConfig - The analysis configuration object to extract the
+   *                       analyzers from.
+   * @return - A json string enumerating the defined analyzers.
+   */
+  private String buildAnalyzerList(AnalysisConfiguration analysisConfig) {
+
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("{");
+    AtomicBoolean firstAnalyzer = new AtomicBoolean(true);
+    for (AnalyzerSchema analyzer : analysisConfig.getAnalyzers()) {
+
+      if (!firstAnalyzer.compareAndSet(true, false)) {
+        sb.append(", ");
+      }
+
+      sb.append("{");
+      sb.append("\"name\": \"").append(analyzer.getName()).append("\", ");
+      sb.append("\"description\": \"").append(analyzer.getDescription()).append("\", ");
+      sb.append("\"behaviours\": [");
+      AtomicBoolean firstBehaviour = new AtomicBoolean(true);
+      for (String behaviour : analyzer.getBehaviours()) {
+        if (!firstBehaviour.compareAndSet(true, false)) {
+          sb.append(", ");
+        }
+        sb.append("\"").append(behaviour).append("\"");
+      }
+      sb.append("]");
+      sb.append("}");
+    }
+    sb.append("}");
+
+    return sb.toString();
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/rest/ApiUtils.java b/src/main/java/org/openecomp/sa/rest/ApiUtils.java
new file mode 100644 (file)
index 0000000..4c043d4
--- /dev/null
@@ -0,0 +1,180 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import org.openecomp.cl.mdc.MdcContext;
+import org.openecomp.sa.searchdbabstraction.util.SearchDbConstants;
+import org.slf4j.MDC;
+
+import java.util.UUID;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+
+
+public class ApiUtils {
+
+  public static final String SEARCH_AUTH_POLICY_NAME = "search";
+  public static final String URL_PREFIX = "services/search-data-service/v1/search";
+
+  public enum Action {
+    POST, GET, PUT, DELETE
+  }
+
+  ;
+
+
+  /**
+   * This method uses the contents of the supplied HTTP headers and request
+   * structures to populate the MDC Context used for logging purposes.
+   *
+   * @param httpReq - HTTP request structure.
+   * @param headers - HTTP headers
+   */
+  protected static void initMdcContext(HttpServletRequest httpReq, HttpHeaders headers) {
+
+    // Auto generate a transaction if we were not provided one.
+    String transId = null;
+    if (headers != null) {
+      transId = headers.getRequestHeaders().getFirst("X-TransactionId");
+
+      if ((transId == null) || (transId.equals(""))) {
+        transId = UUID.randomUUID().toString();
+      }
+    }
+
+    String fromIp = (httpReq != null) ? httpReq.getRemoteAddr() : "";
+    String fromApp = (headers != null) ? headers.getRequestHeaders().getFirst("X-FromAppId") : "";
+
+    MdcContext.initialize(transId, SearchDbConstants.SDB_SERVICE_NAME, "", fromApp, fromIp);
+  }
+
+
+  protected static void clearMdcContext() {
+    MDC.clear();
+  }
+
+  public static String buildIndexUri(String index) {
+
+    return (URL_PREFIX + "/indexes/") + index;
+  }
+
+  public static String buildDocumentUri(String index, String documentId) {
+
+    return buildIndexUri(index) + "/documents/" + documentId;
+  }
+
+  public static boolean validateIndexUri(String uri) {
+
+    // If the URI starts with a leading '/' character, remove it.
+    uri = uri.startsWith("/") ? uri.substring(1) : uri;
+
+    // Now, tokenize the URI string.
+    String[] tokens = uri.split("/");
+
+    return (tokens.length == 6) && (tokens[4].equals("indexes"));
+
+  }
+
+  public static boolean validateDocumentUri(String uri, boolean requireId) {
+
+    // If the URI starts with a leading '/' character, remove it.
+    uri = uri.startsWith("/") ? uri.substring(1) : uri;
+
+    // Now, tokenize the URI string.
+    String[] tokens = uri.split("/");
+
+    if (requireId) {
+      return (tokens.length == 8) && (tokens[4].equals("indexes")
+          && (tokens[6].equals("documents")));
+    } else {
+      return ((tokens.length == 8) || (tokens.length == 7))
+          && (tokens[4].equals("indexes") && (tokens[6].equals("documents")));
+    }
+  }
+
+  public static String extractIndexFromUri(String uri) {
+
+    // If the URI starts with a leading '/' character, remove it.
+    uri = uri.startsWith("/") ? uri.substring(1) : uri;
+
+    // Now, tokenize the URI string.
+    String[] tokens = uri.split("/");
+
+    int i = 0;
+    for (String token : tokens) {
+      if (token.equals("indexes")) {
+        if (i + 1 < tokens.length) {
+          return tokens[i + 1];
+        }
+      }
+      i++;
+    }
+
+    return null;
+  }
+
+  public static String extractIdFromUri(String uri) {
+
+    // If the URI starts with a leading '/' character, remove it.
+    uri = uri.startsWith("/") ? uri.substring(1) : uri;
+
+    // Now, tokenize the URI string.
+    String[] tokens = uri.split("/");
+
+    int i = 0;
+    for (String token : tokens) {
+      if (token.equals("documents")) {
+        if (i + 1 < tokens.length) {
+          return tokens[i + 1];
+        }
+      }
+      i++;
+    }
+
+    return null;
+  }
+
+  public static String getHttpStatusString(int httpStatusCode) {
+    // Some of the status codes we use are still in draft state in the standards, and are not
+    // recognized in the javax library.  We need to manually translate these to human-readable
+    // strings.
+    String statusString = "Unknown";
+    Response.Status status = Response.Status.fromStatusCode(httpStatusCode);
+
+    if (status == null) {
+      switch (httpStatusCode) {
+        case 207:
+          statusString = "Multi Status";
+          break;
+        default:
+      }
+    } else {
+      statusString = status.toString();
+    }
+
+    return statusString;
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/rest/BulkApi.java b/src/main/java/org/openecomp/sa/rest/BulkApi.java
new file mode 100644 (file)
index 0000000..f74a3c2
--- /dev/null
@@ -0,0 +1,257 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.github.fge.jsonschema.main.JsonSchema;
+import com.github.fge.jsonschema.main.JsonSchemaFactory;
+import org.openecomp.cl.api.LogFields;
+import org.openecomp.cl.api.LogLine;
+import org.openecomp.cl.api.Logger;
+import org.openecomp.cl.eelf.LoggerFactory;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreInterface;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.exception.DocumentStoreOperationException;
+import org.openecomp.sa.searchdbabstraction.entity.OperationResult;
+import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs;
+
+import java.io.IOException;
+import java.util.concurrent.atomic.AtomicBoolean;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.Path;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+
+
+/**
+ * This class encapsulates the REST end points associated with performing
+ * bulk operations against the document store.
+ */
+@Path("/bulk")
+public class BulkApi {
+
+  /**
+   * Indicates whether or not we have performed the one-time static
+   * initialization required for performing schema validation.
+   */
+  protected static AtomicBoolean validationInitialized = new AtomicBoolean(false);
+
+  /**
+   * Factory used for importing our payload schema for validation purposes.
+   */
+  protected static JsonSchemaFactory schemaFactory = null;
+
+  /**
+   * Imported payload schema that will be used by our validation methods.
+   */
+  protected static JsonSchema schema = null;
+
+  protected SearchServiceApi searchService = null;
+
+  // Instantiate the loggers.
+  private static Logger logger = LoggerFactory.getInstance().getLogger(BulkApi.class.getName());
+  private static Logger auditLogger = LoggerFactory.getInstance()
+      .getAuditLogger(BulkApi.class.getName());
+
+
+  /**
+   * Create a new instance of the BulkApi end point.
+   */
+  public BulkApi(SearchServiceApi searchService) {
+    this.searchService = searchService;
+  }
+
+
+  /**
+   * Processes client requests containing a set of operations to be
+   * performed in bulk.
+   *
+   * <p>Method: POST
+   *
+   * @param operations - JSON structure enumerating the operations to be
+   *                   performed.
+   * @param request    - Raw HTTP request.
+   * @param headers    - HTTP headers.
+   * @return - A standard REST response structure.
+   */
+  public Response processPost(String operations,
+                              HttpServletRequest request,
+                              HttpHeaders headers,
+                              DocumentStoreInterface documentStore,
+                              ApiUtils apiUtils) {
+
+
+    // Initialize the MDC Context for logging purposes.
+    ApiUtils.initMdcContext(request, headers);
+
+    // Set a default result code and entity string for the request.
+    int resultCode = 500;
+    String resultString = "Unexpected error";
+
+    if (logger.isDebugEnabled()) {
+      logger.debug("SEARCH: Process Bulk Request - operations = ["
+          + operations.replaceAll("\n", "") + " ]");
+    }
+
+    try {
+
+      // Validate that the request is correctly authenticated before going
+      // any further.
+      if (!searchService.validateRequest(headers, request,
+          ApiUtils.Action.POST, ApiUtils.SEARCH_AUTH_POLICY_NAME)) {
+        logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, "Authentication failure.");
+
+        return buildResponse(Response.Status.FORBIDDEN.getStatusCode(),
+            "Authentication failure.", request, apiUtils);
+      }
+
+    } catch (Exception e) {
+
+      // This is a catch all for any unexpected failure trying to perform
+      // the authentication.
+      logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE,
+          "Unexpected authentication failure - cause: " + e.getMessage());
+      if (logger.isDebugEnabled()) {
+        logger.debug("Stack Trace:\n" + e.getStackTrace());
+      }
+
+      return buildResponse(Response.Status.FORBIDDEN.getStatusCode(),
+          "Authentication failure - cause " + e.getMessage(),
+          request,
+          apiUtils);
+    }
+
+    // We expect a payload containing a JSON structure enumerating the
+    // operations to be performed.
+    if (operations == null) {
+      logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, "Missing operations list payload");
+
+      return buildResponse(resultCode, "Missing payload", request, apiUtils);
+    }
+
+
+    // Marshal the supplied json string into a Java object.
+    ObjectMapper mapper = new ObjectMapper();
+    BulkRequest[] requests = null;
+    try {
+      requests = mapper.readValue(operations, BulkRequest[].class);
+
+    } catch (IOException e) {
+
+      logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE,
+          "Failed to marshal operations list: " + e.getMessage());
+      if (logger.isDebugEnabled()) {
+        logger.debug("Stack Trace:\n" + e.getStackTrace());
+      }
+
+      // Populate the result code and entity string for our HTTP response
+      // and return the response to the client..
+      return buildResponse(javax.ws.rs.core.Response.Status.BAD_REQUEST.getStatusCode(),
+          "Unable to marshal operations: " + e.getMessage(),
+          request,
+          apiUtils);
+    }
+
+    // Verify that our parsed operations list actually contains some valid
+    // operations.
+    if (requests.length == 0) {
+      logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, "Empty operations list in bulk request");
+
+
+      // Populate the result code and entity string for our HTTP response
+      // and return the response to the client..
+      return buildResponse(javax.ws.rs.core.Response.Status.BAD_REQUEST.getStatusCode(),
+          "Empty operations list in bulk request",
+          request,
+          apiUtils);
+    }
+    try {
+
+      // Now, forward the set of bulk operations to the DAO for processing.
+      OperationResult result = documentStore.performBulkOperations(requests);
+
+      // Populate the result code and entity string for our HTTP response.
+      resultCode = result.getResultCode();
+      resultString = (result.getFailureCause() == null)
+          ? result.getResult() : result.getFailureCause();
+
+    } catch (DocumentStoreOperationException e) {
+
+      logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE,
+          "Unexpected failure communicating with document store: " + e.getMessage());
+      if (logger.isDebugEnabled()) {
+        logger.debug("Stack Trace:\n" + e.getStackTrace());
+      }
+
+      // Populate the result code and entity string for our HTTP response.
+      resultCode = javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR.getStatusCode();
+      resultString = "Unexpected failure processing bulk operations: " + e.getMessage();
+    }
+
+    // Build our HTTP response.
+    Response response = Response.status(resultCode).entity(resultString).build();
+
+    // Log the result.
+    if ((response.getStatus() >= 200) && (response.getStatus() < 300)) {
+      logger.info(SearchDbMsgs.PROCESSED_BULK_OPERATIONS);
+    } else {
+      logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, (String) response.getEntity());
+    }
+
+    // Finally, return the HTTP response to the client.
+    return buildResponse(resultCode, resultString, request, apiUtils);
+  }
+
+
+  /**
+   * This method generates an audit log and returns an HTTP response object.
+   *
+   * @param resultCode   - The result code to report.
+   * @param resultString - The result string to report.
+   * @param request       - The HTTP request to extract data from for the audit log.
+   * @return - An HTTP response object.
+   */
+  private Response buildResponse(int resultCode, String resultString,
+                                 HttpServletRequest request, ApiUtils apiUtils) {
+
+    Response response = Response.status(resultCode).entity(resultString).build();
+
+    // Generate our audit log.
+    auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, resultCode)
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION,
+                ApiUtils.getHttpStatusString(resultCode)),
+        (request != null) ? request.getMethod() : "Unknown",
+        (request != null) ? request.getRequestURL().toString() : "Unknown",
+        (request != null) ? request.getRemoteHost() : "Unknown",
+        Integer.toString(response.getStatus()));
+
+    // Clear the MDC context so that no other transaction inadvertently
+    // uses our transaction id.
+    ApiUtils.clearMdcContext();
+
+    return response;
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/rest/BulkMetaData.java b/src/main/java/org/openecomp/sa/rest/BulkMetaData.java
new file mode 100644 (file)
index 0000000..8a1a6d6
--- /dev/null
@@ -0,0 +1,53 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+public class BulkMetaData {
+
+  private String url;
+  private String etag;
+
+
+  public String getUrl() {
+    return url;
+  }
+
+  public void setUrl(String url) {
+    this.url = url;
+  }
+
+  public String getEtag() {
+    return etag;
+  }
+
+  public void setEtag(String anEtag) {
+    this.etag = anEtag;
+  }
+
+  @Override
+  public String toString() {
+    return "MetaData: [url=" + url + ", etag=" + etag + "]";
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/rest/BulkOperation.java b/src/main/java/org/openecomp/sa/rest/BulkOperation.java
new file mode 100644 (file)
index 0000000..cf71ed5
--- /dev/null
@@ -0,0 +1,65 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+/**
+ * This class represents a single instance of an operation to be performed
+ * as part of a bulk request.
+ */
+public class BulkOperation {
+
+  /**
+   * Contains meta data to be passed to the document store for it
+   * to perform the operation.
+   */
+  private BulkMetaData metaData;
+
+  /**
+   * Contains the contents of the document to be acted on.
+   */
+  private Document document;
+
+
+  public void setMetaData(BulkMetaData metaData) {
+    this.metaData = metaData;
+  }
+
+  public BulkMetaData getMetaData() {
+    return metaData;
+  }
+
+  public Document getDocument() {
+    return document;
+  }
+
+  public void setDocument(Document document) {
+    this.document = document;
+  }
+
+  @Override
+  public String toString() {
+    return "Operation: [" + metaData.toString() + ", " + document + "]";
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/rest/BulkRequest.java b/src/main/java/org/openecomp/sa/rest/BulkRequest.java
new file mode 100644 (file)
index 0000000..261f238
--- /dev/null
@@ -0,0 +1,115 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+
+/**
+ * This class represents a single instance of a request from the search client
+ * that would be part of a bundle of such operations sent in a single bulk
+ * request.
+ */
+public class BulkRequest {
+
+  public enum OperationType {
+    CREATE,
+    UPDATE,
+    DELETE
+  }
+
+  private BulkOperation create;
+  private BulkOperation update;
+  private BulkOperation delete;
+
+  public BulkOperation getCreate() {
+    return create;
+  }
+
+  public void setCreate(BulkOperation create) {
+    this.create = create;
+  }
+
+  public BulkOperation getUpdate() {
+    return update;
+  }
+
+  public void setUpdate(BulkOperation update) {
+    this.update = update;
+  }
+
+  public BulkOperation getDelete() {
+    return delete;
+  }
+
+  public void setDelete(BulkOperation delete) {
+    this.delete = delete;
+  }
+
+  public OperationType getOperationType() {
+
+    if (create != null) {
+      return OperationType.CREATE;
+    } else if (update != null) {
+      return OperationType.UPDATE;
+    } else if (delete != null) {
+      return OperationType.DELETE;
+    } else {
+      return null;
+    }
+  }
+
+  public BulkOperation getOperation() {
+    if (create != null) {
+      return create;
+    } else if (update != null) {
+      return update;
+    } else if (delete != null) {
+      return delete;
+    } else {
+      return null;
+    }
+  }
+
+  public String getIndex() {
+    return ApiUtils.extractIndexFromUri(getOperation().getMetaData().getUrl());
+  }
+
+  public String getId() {
+    return ApiUtils.extractIdFromUri(getOperation().getMetaData().getUrl());
+  }
+
+  @Override
+  public String toString() {
+
+    if (create != null) {
+      return "create: [" + create.toString() + "]\n";
+    } else if (update != null) {
+      return "update: [" + update.toString() + "]\n";
+    } else if (delete != null) {
+      return "delete: [" + delete.toString() + "]\n";
+    } else {
+      return "UNDEFINED";
+    }
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/rest/Document.java b/src/main/java/org/openecomp/sa/rest/Document.java
new file mode 100644 (file)
index 0000000..a0be736
--- /dev/null
@@ -0,0 +1,65 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class Document {
+  private Map<String, Object> fields = new HashMap<String, Object>();
+
+  @JsonAnyGetter
+  public Map<String, Object> getFields() {
+    return fields;
+  }
+
+  @JsonAnySetter
+  public void setField(String name, Object value) {
+    fields.put(name, value);
+  }
+
+  public String toJson() throws JsonProcessingException {
+
+    ObjectMapper mapper = new ObjectMapper();
+    return mapper.writeValueAsString(this);
+  }
+
+
+  @Override
+  public String toString() {
+    String str = "Document: [";
+    for (String key : fields.keySet()) {
+      str += key + ": " + fields.get(key);
+    }
+    str += "]";
+
+    return str;
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/rest/DocumentApi.java b/src/main/java/org/openecomp/sa/rest/DocumentApi.java
new file mode 100644 (file)
index 0000000..e3c15a5
--- /dev/null
@@ -0,0 +1,505 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.openecomp.cl.api.LogFields;
+import org.openecomp.cl.api.LogLine;
+import org.openecomp.cl.api.Logger;
+import org.openecomp.cl.eelf.LoggerFactory;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreDataEntityImpl;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreInterface;
+import org.openecomp.sa.searchdbabstraction.entity.AggregationResults;
+import org.openecomp.sa.searchdbabstraction.entity.DocumentOperationResult;
+import org.openecomp.sa.searchdbabstraction.entity.SearchOperationResult;
+import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs;
+import org.openecomp.sa.searchdbabstraction.searchapi.SearchStatement;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.Status;
+
+public class DocumentApi {
+  private static final String REQUEST_HEADER_RESOURCE_VERSION = "If-Match";
+  private static final String RESPONSE_HEADER_RESOURCE_VERSION = "ETag";
+
+  protected SearchServiceApi searchService = null;
+
+  private Logger logger = LoggerFactory.getInstance().getLogger(DocumentApi.class.getName());
+  private Logger auditLogger = LoggerFactory.getInstance()
+      .getAuditLogger(DocumentApi.class.getName());
+
+  public DocumentApi(SearchServiceApi searchService) {
+    this.searchService = searchService;
+  }
+
+  public Response processPost(String content, HttpServletRequest request, HttpHeaders headers,
+                              HttpServletResponse httpResponse, String index,
+                              DocumentStoreInterface documentStore) {
+
+    // Initialize the MDC Context for logging purposes.
+    ApiUtils.initMdcContext(request, headers);
+
+    try {
+      ObjectMapper mapper = new ObjectMapper();
+      mapper.setSerializationInclusion(Include.NON_EMPTY);
+      if (content == null) {
+        return handleError(request, content, Status.BAD_REQUEST);
+      }
+
+      boolean isValid;
+      try {
+        isValid = searchService.validateRequest(headers, request, ApiUtils.Action.POST,
+            ApiUtils.SEARCH_AUTH_POLICY_NAME);
+      } catch (Exception e) {
+        logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL,
+            "DocumentApi.processPost",
+            e.getMessage());
+        return handleError(request, content, Status.FORBIDDEN);
+      }
+
+      if (!isValid) {
+        return handleError(request, content, Status.FORBIDDEN);
+      }
+
+      DocumentStoreDataEntityImpl document = new DocumentStoreDataEntityImpl();
+      document.setContent(content);
+
+      DocumentOperationResult result = documentStore.createDocument(index, document);
+      String output = null;
+      if (result.getResultCode() >= 200 && result.getResultCode() <= 299) {
+        output = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getDocument());
+      } else {
+        output = result.getError() != null
+            ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError())
+            : result.getFailureCause();
+      }
+
+      if (httpResponse != null) {
+        httpResponse.setHeader(RESPONSE_HEADER_RESOURCE_VERSION, result.getResultVersion());
+      }
+      Response response = Response.status(result.getResultCode()).entity(output).build();
+      logResult(request, Response.Status.fromStatusCode(response.getStatus()));
+
+      // Clear the MDC context so that no other transaction inadvertently
+      // uses our transaction id.
+      ApiUtils.clearMdcContext();
+
+      return response;
+    } catch (Exception e) {
+      return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  public Response processPut(String content, HttpServletRequest request, HttpHeaders headers,
+                             HttpServletResponse httpResponse, String index,
+                             String id, DocumentStoreInterface documentStore) {
+
+    // Initialize the MDC Context for logging purposes.
+    ApiUtils.initMdcContext(request, headers);
+
+    try {
+      ObjectMapper mapper = new ObjectMapper();
+      mapper.setSerializationInclusion(Include.NON_EMPTY);
+      if (content == null) {
+        return handleError(request, content, Status.BAD_REQUEST);
+      }
+
+      boolean isValid;
+      try {
+        isValid = searchService.validateRequest(headers, request, ApiUtils.Action.PUT,
+            ApiUtils.SEARCH_AUTH_POLICY_NAME);
+      } catch (Exception e) {
+        logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL,
+            "DocumentApi.processPut",
+            e.getMessage());
+        return handleError(request, content, Status.FORBIDDEN);
+      }
+
+      if (!isValid) {
+        return handleError(request, content, Status.FORBIDDEN);
+      }
+
+      String resourceVersion = headers.getRequestHeaders()
+          .getFirst(REQUEST_HEADER_RESOURCE_VERSION);
+
+      DocumentStoreDataEntityImpl document = new DocumentStoreDataEntityImpl();
+      document.setId(id);
+      document.setContent(content);
+      document.setVersion(resourceVersion);
+
+      DocumentOperationResult result = null;
+      if (resourceVersion == null) {
+        result = documentStore.createDocument(index, document);
+      } else {
+        result = documentStore.updateDocument(index, document);
+      }
+
+      String output = null;
+      if (result.getResultCode() >= 200 && result.getResultCode() <= 299) {
+        output = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getDocument());
+      } else {
+        output = result.getError() != null
+            ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError())
+            : result.getFailureCause();
+      }
+      if (httpResponse != null) {
+        httpResponse.setHeader(RESPONSE_HEADER_RESOURCE_VERSION, result.getResultVersion());
+      }
+      Response response = Response.status(result.getResultCode()).entity(output).build();
+      logResult(request, Response.Status.fromStatusCode(response.getStatus()));
+
+      // Clear the MDC context so that no other transaction inadvertently
+      // uses our transaction id.
+      ApiUtils.clearMdcContext();
+
+      return response;
+    } catch (Exception e) {
+      return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  public Response processDelete(String content, HttpServletRequest request, HttpHeaders headers,
+                                HttpServletResponse httpResponse, String index, String id,
+                                DocumentStoreInterface documentStore) {
+
+    // Initialize the MDC Context for logging purposes.
+    ApiUtils.initMdcContext(request, headers);
+
+    try {
+      ObjectMapper mapper = new ObjectMapper();
+      mapper.setSerializationInclusion(Include.NON_EMPTY);
+      boolean isValid;
+      try {
+        isValid = searchService.validateRequest(headers, request, ApiUtils.Action.DELETE,
+            ApiUtils.SEARCH_AUTH_POLICY_NAME);
+      } catch (Exception e) {
+        logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL,
+            "DocumentApi.processDelete",
+            e.getMessage());
+        return handleError(request, content, Status.FORBIDDEN);
+      }
+
+      if (!isValid) {
+        return handleError(request, content, Status.FORBIDDEN);
+      }
+
+      String resourceVersion = headers.getRequestHeaders()
+          .getFirst(REQUEST_HEADER_RESOURCE_VERSION);
+      if (resourceVersion == null || resourceVersion.isEmpty()) {
+        return handleError(request, "Request header 'If-Match' missing",
+            javax.ws.rs.core.Response.Status.BAD_REQUEST);
+      }
+
+      DocumentStoreDataEntityImpl document = new DocumentStoreDataEntityImpl();
+      document.setId(id);
+      document.setVersion(resourceVersion);
+
+      DocumentOperationResult result = documentStore.deleteDocument(index, document);
+      String output = null;
+      if (!(result.getResultCode() >= 200 && result.getResultCode() <= 299)) { //
+        output = result.getError() != null
+            ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError())
+            : result.getFailureCause();
+      }
+
+      if (httpResponse != null) {
+        httpResponse.setHeader(RESPONSE_HEADER_RESOURCE_VERSION, result.getResultVersion());
+      }
+      Response response;
+      if (output == null) {
+        response = Response.status(result.getResultCode()).build();
+      } else {
+        response = Response.status(result.getResultCode()).entity(output).build();
+      }
+
+      logResult(request, Response.Status.fromStatusCode(response.getStatus()));
+
+      // Clear the MDC context so that no other transaction inadvertently
+      // uses our transaction id.
+      ApiUtils.clearMdcContext();
+
+      return response;
+    } catch (Exception e) {
+      return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  public Response processGet(String content, HttpServletRequest request, HttpHeaders headers,
+                             HttpServletResponse httpResponse, String index, String id,
+                             DocumentStoreInterface documentStore) {
+
+    // Initialize the MDC Context for logging purposes.
+    ApiUtils.initMdcContext(request, headers);
+
+    try {
+      ObjectMapper mapper = new ObjectMapper();
+      mapper.setSerializationInclusion(Include.NON_EMPTY);
+      boolean isValid;
+      try {
+        isValid = searchService.validateRequest(headers, request, ApiUtils.Action.GET,
+            ApiUtils.SEARCH_AUTH_POLICY_NAME);
+      } catch (Exception e) {
+        logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL,
+            "DocumentApi.processGet",
+            e.getMessage());
+        return handleError(request, content, Status.FORBIDDEN);
+      }
+
+      if (!isValid) {
+        return handleError(request, content, Status.FORBIDDEN);
+      }
+
+      String resourceVersion = headers.getRequestHeaders()
+          .getFirst(REQUEST_HEADER_RESOURCE_VERSION);
+
+      DocumentStoreDataEntityImpl document = new DocumentStoreDataEntityImpl();
+      document.setId(id);
+      document.setVersion(resourceVersion);
+
+      DocumentOperationResult result = documentStore.getDocument(index, document);
+      String output = null;
+      if (result.getResultCode() >= 200 && result.getResultCode() <= 299) {
+        output = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getDocument());
+      } else {
+        output = result.getError() != null
+            ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError())
+            : result.getFailureCause();
+      }
+      if (httpResponse != null) {
+        httpResponse.setHeader(RESPONSE_HEADER_RESOURCE_VERSION, result.getResultVersion());
+      }
+      Response response = Response.status(result.getResultCode()).entity(output).build();
+      logResult(request, Response.Status.fromStatusCode(response.getStatus()));
+
+      // Clear the MDC context so that no other transaction inadvertently
+      // uses our transaction id.
+      ApiUtils.clearMdcContext();
+
+      return response;
+    } catch (Exception e) {
+      return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  public Response processSearchWithGet(String content, HttpServletRequest request,
+                                       HttpHeaders headers, String index,
+                                       String queryText, DocumentStoreInterface documentStore) {
+
+    // Initialize the MDC Context for logging purposes.
+    ApiUtils.initMdcContext(request, headers);
+
+    try {
+      ObjectMapper mapper = new ObjectMapper();
+      mapper.setSerializationInclusion(Include.NON_EMPTY);
+
+      boolean isValid;
+      try {
+        isValid = searchService.validateRequest(headers, request, ApiUtils.Action.GET,
+            ApiUtils.SEARCH_AUTH_POLICY_NAME);
+      } catch (Exception e) {
+        logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL,
+            "processSearchWithGet",
+            e.getMessage());
+        return handleError(request, content, Status.FORBIDDEN);
+      }
+
+      if (!isValid) {
+        return handleError(request, content, Status.FORBIDDEN);
+      }
+
+      SearchOperationResult result = documentStore.search(index, queryText);
+      String output = null;
+      if (result.getResultCode() >= 200 && result.getResultCode() <= 299) {
+        output = mapper.writerWithDefaultPrettyPrinter()
+            .writeValueAsString(result.getSearchResult());
+      } else {
+        output = result.getError() != null
+            ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError())
+            : result.getFailureCause();
+      }
+      Response response = Response.status(result.getResultCode()).entity(output).build();
+
+      // Clear the MDC context so that no other transaction inadvertently
+      // uses our transaction id.
+      ApiUtils.clearMdcContext();
+
+      return response;
+    } catch (Exception e) {
+      return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  public Response queryWithGetWithPayload(String content, HttpServletRequest request,
+                                          HttpHeaders headers, String index,
+                                          DocumentStoreInterface documentStore) {
+
+    // Initialize the MDC Context for logging purposes.
+    ApiUtils.initMdcContext(request, headers);
+
+    logger.info(SearchDbMsgs.PROCESS_PAYLOAD_QUERY, "GET", (request != null)
+        ? request.getRequestURL().toString() : "");
+    if (logger.isDebugEnabled()) {
+      logger.debug("Request Body: " + content);
+    }
+    return processQuery(index, content, request, headers, documentStore);
+  }
+
+  public Response processSearchWithPost(String content, HttpServletRequest request,
+                                        HttpHeaders headers, String index,
+                                        DocumentStoreInterface documentStore) {
+
+    // Initialize the MDC Context for logging purposes.
+    ApiUtils.initMdcContext(request, headers);
+
+    logger.info(SearchDbMsgs.PROCESS_PAYLOAD_QUERY, "POST", (request != null)
+        ? request.getRequestURL().toString() : "");
+    if (logger.isDebugEnabled()) {
+      logger.debug("Request Body: " + content);
+    }
+
+    return processQuery(index, content, request, headers, documentStore);
+  }
+
+  /**
+   * Common handler for query requests. This is called by both the GET with
+   * payload and POST with payload variants of the query endpoint.
+   *
+   * @param index   - The index to be queried against.
+   * @param content - The payload containing the query structure.
+   * @param request - The HTTP request.
+   * @param headers - The HTTP headers.
+   * @return - A standard HTTP response.
+   */
+  private Response processQuery(String index, String content, HttpServletRequest request,
+                                HttpHeaders headers, DocumentStoreInterface documentStore) {
+
+    try {
+      ObjectMapper mapper = new ObjectMapper();
+      mapper.setSerializationInclusion(Include.NON_EMPTY);
+
+      // Make sure that we were supplied a payload before proceeding.
+      if (content == null) {
+        return handleError(request, content, Status.BAD_REQUEST);
+      }
+
+      // Validate that the request has the appropriate authorization.
+      boolean isValid;
+      try {
+        isValid = searchService.validateRequest(headers, request, ApiUtils.Action.POST,
+            ApiUtils.SEARCH_AUTH_POLICY_NAME);
+
+      } catch (Exception e) {
+        logger.info(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL,
+            "processQuery",
+            e.getMessage());
+        return handleError(request, content, Status.FORBIDDEN);
+      }
+
+      if (!isValid) {
+        return handleError(request, content, Status.FORBIDDEN);
+      }
+
+      SearchStatement searchStatement;
+
+      try {
+        // Marshall the supplied request payload into a search statement
+        // object.
+        searchStatement = mapper.readValue(content, SearchStatement.class);
+
+      } catch (Exception e) {
+        return handleError(request, e.getMessage(), Status.BAD_REQUEST);
+      }
+
+      // Now, submit the search statement, translated into
+      // ElasticSearch syntax, to the document store DAO.
+      SearchOperationResult result = documentStore.searchWithPayload(index,
+          searchStatement.toElasticSearch());
+      String output = null;
+      if (result.getResultCode() >= 200 && result.getResultCode() <= 299) {
+        output = prepareOutput(mapper, result);
+      } else {
+        output = result.getError() != null
+            ? mapper.writerWithDefaultPrettyPrinter().writeValueAsString(result.getError())
+            : result.getFailureCause();
+      }
+      Response response = Response.status(result.getResultCode()).entity(output).build();
+
+      // Clear the MDC context so that no other transaction inadvertently
+      // uses our transaction id.
+      ApiUtils.clearMdcContext();
+
+      return response;
+
+    } catch (Exception e) {
+      return handleError(request, e.getMessage(), Status.INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  private String prepareOutput(ObjectMapper mapper, SearchOperationResult result)
+      throws JsonProcessingException {
+    StringBuffer output = new StringBuffer();
+    output.append("{\r\n\"searchResult\":");
+    output.append(mapper.writerWithDefaultPrettyPrinter()
+        .writeValueAsString(result.getSearchResult()));
+    AggregationResults aggs = result.getAggregationResult();
+    if (aggs != null) {
+      output.append(",\r\n\"aggregationResult\":");
+      output.append(mapper.setSerializationInclusion(Include.NON_NULL)
+          .writerWithDefaultPrettyPrinter().writeValueAsString(aggs));
+    }
+    output.append("\r\n}");
+    return output.toString();
+  }
+
+  private Response handleError(HttpServletRequest request, String message, Status status) {
+    logResult(request, status);
+    return Response.status(status).entity(message).type(MediaType.APPLICATION_JSON).build();
+  }
+
+  void logResult(HttpServletRequest request, Response.Status status) {
+
+    logger.info(SearchDbMsgs.PROCESS_REST_REQUEST, (request != null) ? request.getMethod() : "",
+        (request != null) ? request.getRequestURL().toString() : "",
+        (request != null) ? request.getRemoteHost() : "", Integer.toString(status.getStatusCode()));
+
+    auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, status.getStatusCode())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, status.getReasonPhrase()),
+        (request != null) ? request.getMethod() : "",
+        (request != null) ? request.getRequestURL().toString() : "",
+        (request != null) ? request.getRemoteHost() : "", Integer.toString(status.getStatusCode()));
+
+    // Clear the MDC context so that no other transaction inadvertently
+    // uses our transaction id.
+    ApiUtils.clearMdcContext();
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/rest/IndexApi.java b/src/main/java/org/openecomp/sa/rest/IndexApi.java
new file mode 100644 (file)
index 0000000..2af2f72
--- /dev/null
@@ -0,0 +1,378 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.openecomp.cl.api.LogFields;
+import org.openecomp.cl.api.LogLine;
+import org.openecomp.cl.api.Logger;
+import org.openecomp.cl.eelf.LoggerFactory;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreInterface;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.exception.DocumentStoreOperationException;
+import org.openecomp.sa.searchdbabstraction.entity.OperationResult;
+import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+
+
+/**
+ * This class encapsulates the REST end points associated with manipulating
+ * indexes in the document store.
+ */
+public class IndexApi {
+
+  protected SearchServiceApi searchService = null;
+
+  /**
+   * Configuration for the custom analyzers that will be used for indexing.
+   */
+  protected AnalysisConfiguration analysisConfig;
+
+  // Set up the loggers.
+  private static Logger logger = LoggerFactory.getInstance()
+      .getLogger(IndexApi.class.getName());
+  private static Logger auditLogger = LoggerFactory.getInstance()
+      .getAuditLogger(IndexApi.class.getName());
+
+
+  public IndexApi(SearchServiceApi searchService) {
+    this.searchService = searchService;
+    init();
+  }
+
+
+  /**
+   * Initializes the end point.
+   *
+   * @throws FileNotFoundException
+   * @throws IOException
+   * @throws DocumentStoreOperationException
+   */
+  public void init() {
+
+    // Instantiate our analysis configuration object.
+    analysisConfig = new AnalysisConfiguration();
+  }
+
+
+  /**
+   * Processes client requests to create a new index and document type in the
+   * document store.
+   *
+   * @param documentSchema - The contents of the request body which is expected
+   *                       to be a JSON structure which corresponds to the
+   *                       schema defined in document.schema.json
+   * @param index          - The name of the index to create.
+   * @return - A Standard REST response
+   */
+  public Response processCreateIndex(String documentSchema,
+                                     HttpServletRequest request,
+                                     HttpHeaders headers,
+                                     String index,
+                                     DocumentStoreInterface documentStore) {
+
+    int resultCode = 500;
+    String resultString = "Unexpected error";
+
+    // Initialize the MDC Context for logging purposes.
+    ApiUtils.initMdcContext(request, headers);
+
+    // Validate that the request is correctly authenticated before going
+    // any further.
+    try {
+
+      if (!searchService.validateRequest(headers, request,
+          ApiUtils.Action.POST, ApiUtils.SEARCH_AUTH_POLICY_NAME)) {
+        logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index, "Authentication failure.");
+        return errorResponse(Response.Status.FORBIDDEN, "Authentication failure.", request);
+      }
+
+    } catch (Exception e) {
+
+      logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index,
+          "Unexpected authentication failure - cause: " + e.getMessage());
+      return errorResponse(Response.Status.FORBIDDEN, "Authentication failure.", request);
+    }
+
+
+    // We expect a payload containing the document schema.  Make sure
+    // it is present.
+    if (documentSchema == null) {
+      logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index, "Missing document schema payload");
+      return errorResponse(Response.Status.fromStatusCode(resultCode), "Missing payload", request);
+    }
+
+    try {
+
+      // Marshal the supplied json string into a document schema object.
+      ObjectMapper mapper = new ObjectMapper();
+      DocumentSchema schema = mapper.readValue(documentSchema, DocumentSchema.class);
+
+      // Now, ask the DAO to create the index.
+      OperationResult result = documentStore.createIndex(index, schema);
+
+      // Extract the result code and string from the OperationResult
+      // object so that we can use them to generate a standard REST
+      // response.
+      // Note that we want to return a 201 result code on a successful
+      // create, so if we get back a 200 from the document store,
+      // translate that int a 201.
+      resultCode = (result.getResultCode() == 200) ? 201 : result.getResultCode();
+      resultString = (result.getFailureCause() == null)
+          ? result.getResult() : result.getFailureCause();
+
+    } catch (com.fasterxml.jackson.core.JsonParseException
+        | com.fasterxml.jackson.databind.JsonMappingException e) {
+
+      // We were unable to marshal the supplied json string into a valid
+      // document schema, so return an appropriate error response.
+      resultCode = javax.ws.rs.core.Response.Status.BAD_REQUEST.getStatusCode();
+      resultString = "Malformed schema: " + e.getMessage();
+
+    } catch (IOException e) {
+
+      // We'll treat this is a general internal error.
+      resultCode = javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR.getStatusCode();
+      resultString = "IO Failure: " + e.getMessage();
+    }
+
+    Response response = Response.status(resultCode).entity(resultString).build();
+
+    // Log the result.
+    if ((response.getStatus() >= 200) && (response.getStatus() < 300)) {
+      logger.info(SearchDbMsgs.CREATED_INDEX, index);
+    } else {
+      logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index, resultString);
+    }
+
+    // Generate our audit log.
+    auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, resultCode)
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION,
+                Response.Status.fromStatusCode(resultCode).toString()),
+        (request != null) ? request.getMethod() : "Unknown",
+        (request != null) ? request.getRequestURL().toString() : "Unknown",
+        (request != null) ? request.getRemoteHost() : "Unknown",
+        Integer.toString(response.getStatus()));
+
+    // Clear the MDC context so that no other transaction inadvertently
+    // uses our transaction id.
+    ApiUtils.clearMdcContext();
+
+    // Finally, return the response.
+    return response;
+  }
+
+
+  /**
+   * Processes a client request to remove an index from the document store.
+   * Note that this implicitly deletes all documents contained within that index.
+   *
+   * @param index - The index to be deleted.
+   * @return - A standard REST response.
+   */
+  public Response processDelete(String index,
+                                HttpServletRequest request,
+                                HttpHeaders headers,
+                                DocumentStoreInterface documentStore) {
+
+    // Initialize the MDC Context for logging purposes.
+    ApiUtils.initMdcContext(request, headers);
+
+    // Set a default response in case something unexpected goes wrong.
+    Response response = Response.status(javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR)
+        .entity("Unknown")
+        .build();
+
+    // Validate that the request is correctly authenticated before going
+    // any further.
+    try {
+
+      if (!searchService.validateRequest(headers, request, ApiUtils.Action.POST,
+          ApiUtils.SEARCH_AUTH_POLICY_NAME)) {
+        logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index, "Authentication failure.");
+        return errorResponse(Response.Status.FORBIDDEN, "Authentication failure.", request);
+      }
+
+    } catch (Exception e) {
+
+      logger.warn(SearchDbMsgs.INDEX_CREATE_FAILURE, index,
+          "Unexpected authentication failure - cause: " + e.getMessage());
+      return errorResponse(Response.Status.FORBIDDEN, "Authentication failure.", request);
+    }
+
+
+    try {
+      // Send the request to the document store.
+      response = responseFromOperationResult(documentStore.deleteIndex(index));
+
+    } catch (DocumentStoreOperationException e) {
+      response = Response.status(javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR)
+          .entity(e.getMessage())
+          .build();
+    }
+
+
+    // Log the result.
+    if ((response.getStatus() >= 200) && (response.getStatus() < 300)) {
+      logger.info(SearchDbMsgs.DELETED_INDEX, index);
+    } else {
+      logger.warn(SearchDbMsgs.INDEX_DELETE_FAILURE, index, (String) response.getEntity());
+    }
+
+    // Generate our audit log.
+    auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, response.getStatus())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION,
+                response.getStatusInfo().getReasonPhrase()),
+        (request != null) ? request.getMethod() : "Unknown",
+        (request != null) ? request.getRequestURL().toString() : "Unknown",
+        (request != null) ? request.getRemoteHost() : "Unknown",
+        Integer.toString(response.getStatus()));
+
+    // Clear the MDC context so that no other transaction inadvertently
+    // uses our transaction id.
+    ApiUtils.clearMdcContext();
+
+    return response;
+  }
+
+
+  /**
+   * This method takes a JSON format document schema and produces a set of
+   * field mappings in the form that Elastic Search expects.
+   *
+   * @param documentSchema - A document schema expressed as a JSON string.
+   * @return - A JSON string expressing an Elastic Search mapping configuration.
+   * @throws com.fasterxml.jackson.core.JsonParseException
+   * @throws com.fasterxml.jackson.databind.JsonMappingException
+   * @throws IOException
+   */
+  public String generateDocumentMappings(String documentSchema)
+      throws com.fasterxml.jackson.core.JsonParseException,
+      com.fasterxml.jackson.databind.JsonMappingException, IOException {
+
+    // Unmarshal the json content into a document schema object.
+    ObjectMapper mapper = new ObjectMapper();
+    DocumentSchema schema = mapper.readValue(documentSchema, DocumentSchema.class);
+
+    // Now, generate the Elastic Search mapping json and return it.
+    StringBuilder sb = new StringBuilder();
+    sb.append("{");
+    sb.append("\"properties\": {");
+
+    boolean first = true;
+    for (DocumentFieldSchema field : schema.getFields()) {
+
+      if (!first) {
+        sb.append(",");
+      } else {
+        first = false;
+      }
+
+      sb.append("\"").append(field.getName()).append("\": {");
+
+      // The field type is mandatory.
+      sb.append("\"type\": \"").append(field.getDataType()).append("\"");
+
+      // If the index field was specified, then append it.
+      if (field.getSearchable() != null) {
+        sb.append(", \"index\": \"").append(field.getSearchable()
+            ? "analyzed" : "not_analyzed").append("\"");
+      }
+
+      // If a search analyzer was specified, then append it.
+      if (field.getSearchAnalyzer() != null) {
+        sb.append(", \"search_analyzer\": \"").append(field.getSearchAnalyzer()).append("\"");
+      }
+
+      // If an indexing analyzer was specified, then append it.
+      if (field.getIndexAnalyzer() != null) {
+        sb.append(", \"analyzer\": \"").append(field.getIndexAnalyzer()).append("\"");
+      } else {
+        sb.append(", \"analyzer\": \"").append("whitespace").append("\"");
+      }
+
+      sb.append("}");
+    }
+
+    sb.append("}");
+    sb.append("}");
+
+    logger.debug("Generated document mappings: " + sb.toString());
+
+    return sb.toString();
+  }
+
+
+  /**
+   * Converts an {@link OperationResult} to a standard REST {@link Response}
+   * object.
+   *
+   * @param result - The {@link OperationResult} to be converted.
+   * @return - The equivalent {@link Response} object.
+   */
+  public Response responseFromOperationResult(OperationResult result) {
+
+    if ((result.getResultCode() >= 200) && (result.getResultCode() < 300)) {
+      return Response.status(result.getResultCode()).entity(result.getResult()).build();
+    } else {
+      if (result.getFailureCause() != null) {
+        return Response.status(result.getResultCode()).entity(result.getFailureCause()).build();
+      } else {
+        return Response.status(result.getResultCode()).entity(result.getResult()).build();
+      }
+    }
+  }
+
+  public Response errorResponse(Response.Status status, String msg, HttpServletRequest request) {
+
+    // Generate our audit log.
+    auditLogger.info(SearchDbMsgs.PROCESS_REST_REQUEST,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, status.getStatusCode())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, status.getReasonPhrase()),
+        (request != null) ? request.getMethod() : "Unknown",
+        (request != null) ? request.getRequestURL().toString() : "Unknown",
+        (request != null) ? request.getRemoteHost() : "Unknown",
+        Integer.toString(status.getStatusCode()));
+
+    // Clear the MDC context so that no other transaction inadvertently
+    // uses our transaction id.
+    ApiUtils.clearMdcContext();
+
+    return Response.status(status)
+        .entity(msg)
+        .build();
+  }
+
+
+}
diff --git a/src/main/java/org/openecomp/sa/rest/SearchServiceApi.java b/src/main/java/org/openecomp/sa/rest/SearchServiceApi.java
new file mode 100644 (file)
index 0000000..f2ad6db
--- /dev/null
@@ -0,0 +1,259 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import org.openecomp.sa.auth.SearchDbServiceAuth;
+import org.openecomp.sa.rest.ApiUtils.Action;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreInterface;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.ElasticSearchHttpController;
+
+import java.security.cert.X509Certificate;
+import javax.security.auth.x500.X500Principal;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+
+public class SearchServiceApi {
+
+  /**
+   * The Data Access Object that we will use to interact with the
+   * document store.
+   */
+  protected DocumentStoreInterface documentStore = null;
+  protected ApiUtils apiUtils = null;
+
+
+  /**
+   * Create a new instance of the end point.
+   */
+  public SearchServiceApi() {
+
+    // Perform one-time initialization.
+    init();
+  }
+
+
+  /**
+   * Performs all one-time initialization required for the end point.
+   */
+  public void init() {
+
+    // Instantiate our Document Store DAO.
+    documentStore = ElasticSearchHttpController.getInstance();
+
+    apiUtils = new ApiUtils();
+  }
+
+  @PUT
+  @Path("/indexes/{index}")
+  @Consumes({MediaType.APPLICATION_JSON})
+  public Response processCreateIndex(String requestBody,
+                                     @Context HttpServletRequest request,
+                                     @Context HttpHeaders headers,
+                                     @PathParam("index") String index) {
+
+    // Forward the request to our index API to create the index.
+    IndexApi indexApi = new IndexApi(this);
+    return indexApi.processCreateIndex(requestBody, request, headers, index, documentStore);
+  }
+
+
+  @DELETE
+  @Path("/indexes/{index}")
+  @Consumes({MediaType.APPLICATION_JSON})
+  public Response processDeleteIndex(String requestBody,
+                                     @Context HttpServletRequest request,
+                                     @Context HttpHeaders headers,
+                                     @PathParam("index") String index) {
+
+    // Forward the request to our index API to delete the index.
+    IndexApi indexApi = new IndexApi(this);
+    return indexApi.processDelete(index, request, headers, documentStore);
+  }
+
+
+  @GET
+  @Path("/indexes/{index}/documents/{id}")
+  @Consumes({MediaType.APPLICATION_JSON})
+  public Response processGetDocument(String requestBody,
+                                     @Context HttpServletRequest request,
+                                     @Context HttpServletResponse httpResponse,
+                                     @Context HttpHeaders headers,
+                                     @PathParam("index") String index,
+                                     @PathParam("id") String id) {
+
+    // Forward the request to our document API to retrieve the document.
+    DocumentApi documentApi = new DocumentApi(this);
+    return documentApi.processGet(requestBody, request, headers, httpResponse,
+        index, id, documentStore);
+  }
+
+  @POST
+  @Path("/indexes/{index}/documents")
+  @Consumes({MediaType.APPLICATION_JSON})
+  public Response processCreateDocWithoutId(String requestBody,
+                                            @Context HttpServletRequest request,
+                                            @Context HttpServletResponse httpResponse,
+                                            @Context HttpHeaders headers,
+                                            @PathParam("index") String index) {
+
+    // Forward the request to our document API to create the document.
+    DocumentApi documentApi = new DocumentApi(this);
+    return documentApi.processPost(requestBody, request, headers, httpResponse,
+        index, documentStore);
+  }
+
+  @PUT
+  @Path("/indexes/{index}/documents/{id}")
+  @Consumes({MediaType.APPLICATION_JSON})
+  public Response processUpsertDoc(String requestBody,
+                                   @Context HttpServletRequest request,
+                                   @Context HttpServletResponse httpResponse,
+                                   @Context HttpHeaders headers,
+                                   @PathParam("index") String index,
+                                   @PathParam("id") String id) {
+
+    // Forward the request to our document API to upsert the document.
+    DocumentApi documentApi = new DocumentApi(this);
+    return documentApi.processPut(requestBody, request, headers, httpResponse,
+        index, id, documentStore);
+  }
+
+  @DELETE
+  @Path("/indexes/{index}/documents/{id}")
+  @Consumes({MediaType.APPLICATION_JSON})
+  public Response processDeleteDoc(String requestBody,
+                                   @Context HttpServletRequest request,
+                                   @Context HttpServletResponse httpResponse,
+                                   @Context HttpHeaders headers,
+                                   @PathParam("index") String index,
+                                   @PathParam("id") String id) {
+
+    // Forward the request to our document API to delete the document.
+    DocumentApi documentApi = new DocumentApi(this);
+    return documentApi.processDelete(requestBody, request, headers, httpResponse,
+        index, id, documentStore);
+  }
+
+
+  @GET
+  @Path("/indexes/{index}/query/{queryText}")
+  @Consumes({MediaType.APPLICATION_JSON})
+  public Response processInlineQuery(String requestBody,
+                                     @Context HttpServletRequest request,
+                                     @Context HttpHeaders headers,
+                                     @PathParam("index") String index,
+                                     @PathParam("queryText") String queryText) {
+
+    // Forward the request to our document API to delete the document.
+    DocumentApi documentApi = new DocumentApi(this);
+    return documentApi.processSearchWithGet(requestBody, request, headers,
+        index, queryText, documentStore);
+  }
+
+
+  @GET
+  @Path("/indexes/{index}/query")
+  @Consumes({MediaType.APPLICATION_JSON})
+  public Response processQueryWithGet(String requestBody,
+                                      @Context HttpServletRequest request,
+                                      @Context HttpHeaders headers,
+                                      @PathParam("index") String index) {
+
+    // Forward the request to our document API to delete the document.
+    DocumentApi documentApi = new DocumentApi(this);
+    return documentApi.queryWithGetWithPayload(requestBody, request, headers, index, documentStore);
+  }
+
+  @POST
+  @Path("/indexes/{index}/query")
+  @Consumes({MediaType.APPLICATION_JSON})
+  public Response processQuery(String requestBody,
+                               @Context HttpServletRequest request,
+                               @Context HttpHeaders headers,
+                               @PathParam("index") String index) {
+
+    // Forward the request to our document API to delete the document.
+    DocumentApi documentApi = new DocumentApi(this);
+    return documentApi.processSearchWithPost(requestBody, request, headers, index, documentStore);
+  }
+
+
+  @POST
+  @Path("/bulk")
+  @Consumes({MediaType.APPLICATION_JSON})
+  public Response processBulkRequest(String requestBody,
+                                     @Context HttpServletRequest request,
+                                     @Context HttpHeaders headers,
+                                     @PathParam("index") String index) {
+
+    // Forward the request to our document API to delete the document.
+    BulkApi bulkApi = new BulkApi(this);
+    return bulkApi.processPost(requestBody, request, headers, documentStore, apiUtils);
+  }
+
+  protected boolean validateRequest(HttpHeaders headers,
+                                    HttpServletRequest req,
+                                    Action action,
+                                    String authPolicyFunctionName) throws Exception {
+
+    SearchDbServiceAuth serviceAuth = new SearchDbServiceAuth();
+
+    String cipherSuite = (String) req.getAttribute("javax.servlet.request.cipher_suite");
+    String authUser = null;
+    if (cipherSuite != null) {
+      Object x509CertAttribute = req.getAttribute("javax.servlet.request.X509Certificate");
+      if (x509CertAttribute != null) {
+        X509Certificate[] certChain = (X509Certificate[]) x509CertAttribute;
+        X509Certificate clientCert = certChain[0];
+        X500Principal subjectDn = clientCert.getSubjectX500Principal();
+        authUser = subjectDn.toString();
+      }
+    }
+
+    if (authUser == null) {
+      return false;
+    }
+
+    String status = serviceAuth.authUser(headers, authUser.toLowerCase(),
+        action.toString() + ":" + authPolicyFunctionName);
+    if (!status.equals("OK")) {
+      return false;
+    }
+
+    return true;
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/JaxrsEchoService.java b/src/main/java/org/openecomp/sa/searchdbabstraction/JaxrsEchoService.java
new file mode 100644 (file)
index 0000000..0dc25c2
--- /dev/null
@@ -0,0 +1,52 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+
+
+/**
+ * Exposes REST endpoints for a simple echo service.
+ */
+@Path("/jaxrs-services")
+public class JaxrsEchoService {
+
+  /**
+   * REST endpoint for a simple echo service.
+   *
+   * @param input - The value to be echoed back.
+   * @return - The input value.
+   */
+  @GET
+  @Path("/echo/{input}")
+  @Produces("text/plain")
+  public String ping(@PathParam("input") String input) {
+    return "[Search Database Abstraction Micro Service] - Echo Service: " + input + ".";
+  }
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/JaxrsUserService.java b/src/main/java/org/openecomp/sa/searchdbabstraction/JaxrsUserService.java
new file mode 100644 (file)
index 0000000..1194b75
--- /dev/null
@@ -0,0 +1,53 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction;
+
+import java.util.HashMap;
+import java.util.Map;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+
+@Path("/user")
+public class JaxrsUserService {
+
+  private static final Map<String, String> userIdToNameMap;
+
+  static {
+    userIdToNameMap = new HashMap<String, String>();
+    userIdToNameMap.put("dw113c", "Doug Wait");
+    userIdToNameMap.put("so401q", "Stuart O'Day");
+  }
+
+  @GET
+  @Path("/{userId}")
+  @Produces("text/plain")
+  public String lookupUser(@PathParam("userId") String userId) {
+    String name = userIdToNameMap.get(userId);
+    return name != null ? name : "unknown id";
+  }
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/config/ElasticSearchConfig.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/config/ElasticSearchConfig.java
new file mode 100644 (file)
index 0000000..6268b4c
--- /dev/null
@@ -0,0 +1,87 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.config;
+
+import java.util.Properties;
+
+public class ElasticSearchConfig {
+  private String ipAddress;
+  private String httpPort;
+  private String javaApiPort;
+  private String clusterName;
+
+  public static final String ES_CLUSTER_NAME = "es.cluster-name";
+  public static final String ES_IP_ADDRESS = "es.ip-address";
+  public static final String ES_HTTP_PORT = "es.http-port";
+
+  private static final String JAVA_API_PORT_DEFAULT = "9300";
+
+  public ElasticSearchConfig(Properties props) {
+
+    setClusterName(props.getProperty(ES_CLUSTER_NAME));
+    setIpAddress(props.getProperty(ES_IP_ADDRESS));
+    setHttpPort(props.getProperty(ES_HTTP_PORT));
+    setJavaApiPort(JAVA_API_PORT_DEFAULT);
+  }
+
+  public String getIpAddress() {
+    return ipAddress;
+  }
+
+  public void setIpAddress(String ipAddress) {
+    this.ipAddress = ipAddress;
+  }
+
+  public String getHttpPort() {
+    return httpPort;
+  }
+
+  public void setHttpPort(String httpPort) {
+    this.httpPort = httpPort;
+  }
+
+  public String getJavaApiPort() {
+    return javaApiPort;
+  }
+
+  public void setJavaApiPort(String javaApiPort) {
+    this.javaApiPort = javaApiPort;
+  }
+
+  public String getClusterName() {
+    return clusterName;
+  }
+
+  public void setClusterName(String clusterName) {
+    this.clusterName = clusterName;
+  }
+
+  @Override
+  public String toString() {
+    return "ElasticSearchConfig [ipAddress=" + ipAddress + ", httpPort=" + httpPort
+        + ", javaApiPort=" + javaApiPort + ", clusterName=" + clusterName + "]";
+  }
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreDataEntity.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreDataEntity.java
new file mode 100644 (file)
index 0000000..84f6522
--- /dev/null
@@ -0,0 +1,35 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
+
+public interface DocumentStoreDataEntity {
+
+  public String getId();
+
+  public String getContentInJson();
+
+  public String getVersion();
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreDataEntityImpl.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreDataEntityImpl.java
new file mode 100644 (file)
index 0000000..f7b7a45
--- /dev/null
@@ -0,0 +1,64 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
+
+public class DocumentStoreDataEntityImpl implements DocumentStoreDataEntity {
+
+  private String id;
+  private String content;
+  private String version;
+
+  public String getContent() {
+    return content;
+  }
+
+  public void setContent(String content) {
+    this.content = content;
+  }
+
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  @Override
+  public String getId() {
+    return id;
+  }
+
+  @Override
+  public String getContentInJson() {
+    return content;
+  }
+
+  @Override
+  public String getVersion() {
+    return version;
+  }
+
+  public void setVersion(String version) {
+    this.version = version;
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreInterface.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/DocumentStoreInterface.java
new file mode 100644 (file)
index 0000000..a396516
--- /dev/null
@@ -0,0 +1,73 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
+
+
+import org.openecomp.sa.rest.BulkRequest;
+import org.openecomp.sa.rest.DocumentSchema;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.exception.DocumentStoreOperationException;
+import org.openecomp.sa.searchdbabstraction.entity.DocumentOperationResult;
+import org.openecomp.sa.searchdbabstraction.entity.OperationResult;
+import org.openecomp.sa.searchdbabstraction.entity.SearchOperationResult;
+
+
+public interface DocumentStoreInterface {
+
+  public OperationResult createIndex(String index, DocumentSchema documentSchema);
+
+  public OperationResult deleteIndex(String indexName) throws DocumentStoreOperationException;
+
+  public DocumentOperationResult createDocument(String indexName, DocumentStoreDataEntity document)
+      throws DocumentStoreOperationException;
+
+  public DocumentOperationResult updateDocument(String indexName, DocumentStoreDataEntity document)
+      throws DocumentStoreOperationException;
+
+  public DocumentOperationResult deleteDocument(String indexName, DocumentStoreDataEntity document)
+      throws DocumentStoreOperationException;
+
+  public DocumentOperationResult getDocument(String indexName, DocumentStoreDataEntity document)
+      throws DocumentStoreOperationException;
+
+  public SearchOperationResult search(String indexName, String queryText)
+      throws DocumentStoreOperationException;
+
+  public SearchOperationResult searchWithPayload(String indexName, String query)
+      throws DocumentStoreOperationException;
+
+
+  /**
+   * Forwards a set of operations to the document store as a single, bulk
+   * request.
+   *
+   * @param anIndex    - The index to apply the operations to.
+   * @param operations - A java object containing the set of operations to
+   *                   be performed.
+   * @return - An operation result.
+   * @throws DocumentStoreOperationException
+   */
+  public OperationResult performBulkOperations(BulkRequest[] request)
+      throws DocumentStoreOperationException;
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchBulkOperationResult.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchBulkOperationResult.java
new file mode 100644 (file)
index 0000000..2daa430
--- /dev/null
@@ -0,0 +1,70 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
+
+import java.util.Arrays;
+
+public class ElasticSearchBulkOperationResult {
+
+  private Integer took;
+  private Boolean errors;
+  private ElasticSearchResultItem[] items;
+
+  public ElasticSearchBulkOperationResult() {
+
+  }
+
+  public ElasticSearchResultItem[] getItems() {
+    return items;
+  }
+
+  public void setItems(ElasticSearchResultItem[] items) {
+    this.items = items;
+  }
+
+  public Integer getTook() {
+    return took;
+  }
+
+  public void setTook(Integer took) {
+    this.took = took;
+  }
+
+  public Boolean getErrors() {
+    return errors;
+  }
+
+  public void setErrors(Boolean errors) {
+    this.errors = errors;
+  }
+
+  @Override
+  public String toString() {
+    return "ElasticSearchOperationResult [took=" + took + ", errors="
+        + errors + ", items=" + Arrays.toString(items) + "]";
+  }
+
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchCause.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchCause.java
new file mode 100644 (file)
index 0000000..c69552d
--- /dev/null
@@ -0,0 +1,47 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
+
+public class ElasticSearchCause {
+
+  private String type;
+  private String reason;
+
+  public String getType() {
+    return type;
+  }
+
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  public String getReason() {
+    return reason;
+  }
+
+  public void setReason(String reason) {
+    this.reason = reason;
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchError.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchError.java
new file mode 100644 (file)
index 0000000..0066c94
--- /dev/null
@@ -0,0 +1,75 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class ElasticSearchError {
+
+  private String type;
+  private String reason;
+  private ElasticSearchCause causedBy;
+
+  private Map<String, Object> additionalProperties = new HashMap<String, Object>();
+
+
+  public String getType() {
+    return type;
+  }
+
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  public String getReason() {
+    return reason;
+  }
+
+  public void setReason(String reason) {
+    this.reason = reason;
+  }
+
+  public ElasticSearchCause getCausedBy() {
+    return causedBy;
+  }
+
+  public void setCausedBy(ElasticSearchCause causedBy) {
+    this.causedBy = causedBy;
+  }
+
+  @JsonAnyGetter
+  public Map<String, Object> getAdditionalProperties() {
+    return additionalProperties;
+  }
+
+  @JsonAnySetter
+  public void setAdditionalProperties(String name, Object value) {
+    additionalProperties.put(name, value);
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchHttpController.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchHttpController.java
new file mode 100644 (file)
index 0000000..9ab028e
--- /dev/null
@@ -0,0 +1,1596 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
+
+import com.att.aft.dme2.internal.google.common.base.Throwables;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import edu.emory.mathcs.backport.java.util.Arrays;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.openecomp.cl.api.LogFields;
+import org.openecomp.cl.api.LogLine;
+import org.openecomp.cl.api.Logger;
+import org.openecomp.cl.eelf.LoggerFactory;
+import org.openecomp.cl.mdc.MdcContext;
+import org.openecomp.cl.mdc.MdcOverride;
+import org.openecomp.sa.rest.AnalysisConfiguration;
+import org.openecomp.sa.rest.ApiUtils;
+import org.openecomp.sa.rest.BulkRequest;
+import org.openecomp.sa.rest.BulkRequest.OperationType;
+import org.openecomp.sa.rest.DocumentSchema;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.config.ElasticSearchConfig;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.exception.DocumentStoreOperationException;
+import org.openecomp.sa.searchdbabstraction.entity.AggregationResult;
+import org.openecomp.sa.searchdbabstraction.entity.AggregationResults;
+import org.openecomp.sa.searchdbabstraction.entity.Document;
+import org.openecomp.sa.searchdbabstraction.entity.DocumentOperationResult;
+import org.openecomp.sa.searchdbabstraction.entity.ErrorResult;
+import org.openecomp.sa.searchdbabstraction.entity.OperationResult;
+import org.openecomp.sa.searchdbabstraction.entity.SearchHit;
+import org.openecomp.sa.searchdbabstraction.entity.SearchHits;
+import org.openecomp.sa.searchdbabstraction.entity.SearchOperationResult;
+import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs;
+import org.openecomp.sa.searchdbabstraction.util.AggregationParsingUtil;
+import org.openecomp.sa.searchdbabstraction.util.DocumentSchemaUtil;
+import org.openecomp.sa.searchdbabstraction.util.SearchDbConstants;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.ProtocolException;
+import java.net.URL;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Properties;
+import java.util.concurrent.atomic.AtomicBoolean;
+import javax.ws.rs.core.Response.Status;
+
+
+/**
+ * This class has the Elasticsearch implementation of the
+ * DB operations defined in DocumentStoreInterface.
+ */
+public class ElasticSearchHttpController implements DocumentStoreInterface {
+
+  private static final String BULK_CREATE_WITHOUT_INDEX_TEMPLATE =
+      "{\"create\":{\"_index\" : \"%s\", \"_type\" : \"%s\"} }\n";
+  private static final String BULK_CREATE_WITH_INDEX_TEMPLATE =
+      "{\"create\":{\"_index\" : \"%s\", \"_type\" : \"%s\", \"_id\" : \"%s\" } }\n";
+  private static final String BULK_IMPORT_INDEX_TEMPLATE =
+      "{\"index\":{\"_index\":\"%s\",\"_type\":\"%s\",\"_id\":\"%s\", \"_version\":\"%s\"}}\n";
+  private static final String BULK_DELETE_TEMPLATE =
+      "{ \"delete\": { \"_index\": \"%s\", \"_type\": \"%s\", \"_id\": \"%s\", \"_version\":\"%s\"}}\n";
+
+  private static final String INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT =
+      "Internal Error: ElasticSearch operation fault occurred";
+  private static final Logger logger = LoggerFactory.getInstance()
+      .getLogger(ElasticSearchHttpController.class.getName());
+  private static final Logger metricsLogger = LoggerFactory.getInstance()
+      .getMetricsLogger(ElasticSearchHttpController.class.getName());
+  private final ElasticSearchConfig config;
+
+  private static final String DEFAULT_TYPE = "default";
+
+  private static ElasticSearchHttpController instance = null;
+
+  protected AnalysisConfiguration analysisConfig;
+
+  public static ElasticSearchHttpController getInstance() {
+
+    synchronized (ElasticSearchHttpController.class) {
+
+      if (instance == null) {
+
+        Properties properties = new Properties();
+        File file = new File(SearchDbConstants.ES_CONFIG_FILE);
+        try {
+          properties.load(new FileInputStream(file));
+        } catch (Exception e) {
+          logger.error(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL,
+            "ElasticSearchHTTPController.getInstance",
+            e.getLocalizedMessage());
+        }
+
+        ElasticSearchConfig config = new ElasticSearchConfig(properties);
+        instance = new ElasticSearchHttpController(config);
+      }
+    }
+
+    return instance;
+  }
+
+  public ElasticSearchHttpController(ElasticSearchConfig config) {
+    this.config = config;
+    analysisConfig = new AnalysisConfiguration();
+
+    try {
+      logger.info(SearchDbMsgs.ELASTIC_SEARCH_CONNECTION_ATTEMPT, getFullUrl("", false));
+      checkConnection();
+      logger.info(SearchDbMsgs.ELASTIC_SEARCH_CONNECTION_SUCCESS, getFullUrl("", false));
+    } catch (Exception e) {
+      logger.error(SearchDbMsgs.ELASTIC_SEARCH_CONNECTION_FAILURE, null, e,
+          getFullUrl("", false), e.getMessage());
+    }
+  }
+
+
+  public AnalysisConfiguration getAnalysisConfig() {
+    return analysisConfig;
+  }
+
+  @Override
+  public OperationResult createIndex(String index, DocumentSchema documentSchema) {
+
+    OperationResult result = new OperationResult();
+    result.setResultCode(500);
+
+    try {
+
+      // Submit the request to ElasticSearch to create the index using a
+      // default document type.
+      result = createTable(index,
+          DEFAULT_TYPE,
+          analysisConfig.getEsIndexSettings(),
+          DocumentSchemaUtil.generateDocumentMappings(documentSchema));
+
+      // ElasticSearch will return us a 200 code on success when we
+      // want to report a 201, so translate the result here.
+      result.setResultCode((result.getResultCode() == 200) ? 201 : result.getResultCode());
+      if (isSuccess(result)) {
+        result.setResult("{\"url\": \"" + ApiUtils.buildIndexUri(index) + "\"}");
+        //result.setResult("{\"index\": \"" + index + ", \"type\": \"" + DEFAULT_TYPE + "\"}");
+      }
+
+    } catch (DocumentStoreOperationException e) {
+
+      result.setFailureCause("Document store operation failure.  Cause: " + e.getMessage());
+    }
+
+    return result;
+  }
+
+
+  @Override
+  public OperationResult deleteIndex(String indexName) throws DocumentStoreOperationException {
+
+    //Initialize operation result with a failure codes / fault string
+    OperationResult opResult = new OperationResult();
+    opResult.setResultCode(500);
+    opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
+
+    // Grab the current time so we can use it to generate a metrics log.
+    MdcOverride override = getStartTime(new MdcOverride());
+
+    String fullUrl = getFullUrl("/" + indexName + "/", false);
+    HttpURLConnection conn = initializeConnection(fullUrl);
+
+    logger.debug("\nSending 'DELETE' request to URL : " + conn.getURL());
+
+    try {
+      conn.setRequestMethod("DELETE");
+    } catch (ProtocolException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to set HTTP request method to DELETE.", e);
+    }
+
+    handleResponse(conn, opResult);
+
+    // Generate a metrics log so we can track how long the operation took.
+    metricsLogger.info(SearchDbMsgs.DELETE_INDEX_TIME,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResult()),
+        override,
+        indexName);
+
+    shutdownConnection(conn);
+
+    return opResult;
+  }
+
+
+  private OperationResult checkConnection() throws Exception {
+
+    String fullUrl = getFullUrl("/_cluster/health", false);
+    URL url = null;
+    HttpURLConnection conn = null;
+
+    url = new URL(fullUrl);
+    conn = (HttpURLConnection) url.openConnection();
+    conn.setRequestMethod("GET");
+    conn.setDoOutput(true);
+    logger.debug("getClusterHealth(), Sending 'GET' request to URL : " + url);
+
+    int resultCode = conn.getResponseCode();
+    logger.debug("getClusterHealth() response Code : " + resultCode);
+    OperationResult opResult = new OperationResult();
+    opResult.setResultCode(resultCode);
+
+    shutdownConnection(conn);
+
+    return opResult;
+  }
+
+  private String getFullUrl(String resourceUrl, boolean isSecure) {
+
+    final String host = config.getIpAddress();
+    final String port = config.getHttpPort();
+
+    if (isSecure) {
+      return String.format("https://%s:%s%s", host, port, resourceUrl);
+    } else {
+      return String.format("http://%s:%s%s", host, port, resourceUrl);
+    }
+  }
+
+  private void shutdownConnection(HttpURLConnection connection) {
+    if (connection == null) {
+      return;
+    }
+
+    InputStream inputstream = null;
+    OutputStream outputstream = null;
+
+    try {
+      inputstream = connection.getInputStream();
+    } catch (IOException e) {
+      logger.error(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL, "shutdownConnection", e.getLocalizedMessage());
+    } finally {
+      if (inputstream != null) {
+        try {
+          inputstream.close();
+        } catch (IOException e) {
+          logger.error(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL, "shutdownConnection",
+              e.getLocalizedMessage());
+        }
+      }
+    }
+
+    try {
+      outputstream = connection.getOutputStream();
+    } catch (IOException e) {
+      logger.error(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL, "shutdownConnection", e.getLocalizedMessage());
+    } finally {
+      if (outputstream != null) {
+        try {
+          outputstream.close();
+        } catch (IOException e) {
+          logger.error(SearchDbMsgs.EXCEPTION_DURING_METHOD_CALL, "shutdownConnection",
+              e.getLocalizedMessage());
+        }
+      }
+    }
+
+    connection.disconnect();
+  }
+
+  //@Override
+  protected OperationResult createTable(String indexName, String typeName,
+                                        String indexSettings, String indexMappings)
+      throws DocumentStoreOperationException {
+
+    if (indexSettings == null) {
+      logger.debug("No settings provided.");
+    }
+
+    if (indexMappings == null) {
+      logger.debug("No mappings provided.");
+    }
+
+    OperationResult opResult = new OperationResult();
+
+    // Initialize operation result with a failure codes / fault string
+    opResult.setResultCode(500);
+    opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
+
+    // Grab the current time so we can use it to generate a metrics log.
+    MdcOverride override = getStartTime(new MdcOverride());
+
+    String fullUrl = getFullUrl("/" + indexName + "/", false);
+    HttpURLConnection conn = initializeConnection(fullUrl);
+
+    try {
+      conn.setRequestMethod("PUT");
+    } catch (ProtocolException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to set HTTP request method to PUT.", e);
+    }
+
+    StringBuilder sb = new StringBuilder(128);
+    sb.append("{ \"settings\" : ");
+    sb.append(indexSettings);
+    sb.append(",");
+
+    sb.append("\"mappings\" : {");
+    sb.append("\"" + typeName + "\" :");
+    sb.append(indexMappings);
+    sb.append("}}");
+
+    attachContent(conn, sb.toString());
+
+    logger.debug("\ncreateTable(), Sending 'PUT' request to URL : " + conn.getURL());
+    logger.debug("Request content: " + sb.toString());
+
+    handleResponse(conn, opResult);
+
+    shutdownConnection(conn);
+
+    // Generate a metrics log so we can track how long the operation took.
+    metricsLogger.info(SearchDbMsgs.CREATE_INDEX_TIME,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResultCode()),
+        override,
+        indexName);
+
+    return opResult;
+  }
+
+  @Override
+  public DocumentOperationResult createDocument(String indexName, DocumentStoreDataEntity document)
+      throws DocumentStoreOperationException {
+    if (document.getId() == null || document.getId().isEmpty()) {
+      return createDocumentWithoutId(indexName, document);
+    } else {
+      return createDocumentWithId(indexName, document);
+    }
+  }
+
+  private DocumentOperationResult createDocumentWithId(String indexName,
+                                                       DocumentStoreDataEntity document)
+      throws DocumentStoreOperationException {
+    // check if the document already exists
+    DocumentOperationResult opResult = checkDocumentExistence(indexName, document.getId());
+
+
+    if (opResult.getResultCode() != Status.NOT_FOUND.getStatusCode()) {
+      if (opResult.getResultCode() == Status.OK.getStatusCode()) {
+        opResult.setFailureCause("A document with the same id already exists.");
+      } else {
+        opResult.setFailureCause("Failed to verify a document with the specified id does not already exist.");
+      }
+      opResult.setResultCode(Status.CONFLICT.getStatusCode());
+      return opResult;
+    }
+
+    opResult = new DocumentOperationResult();
+    // Initialize operation result with a failure codes / fault string
+    opResult.setResultCode(500);
+    opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
+
+    // Grab the current time so we can use it to generate a metrics log.
+    MdcOverride override = getStartTime(new MdcOverride());
+
+    String fullUrl = getFullUrl("/" + indexName + "/" + DEFAULT_TYPE
+        + "/" + document.getId(), false);
+    HttpURLConnection conn = initializeConnection(fullUrl);
+
+    try {
+      conn.setRequestMethod("PUT");
+    } catch (ProtocolException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to set HTTP request method to PUT.", e);
+    }
+
+    attachDocument(conn, document);
+
+    logger.debug("Sending 'PUT' request to: " + conn.getURL());
+
+    handleResponse(conn, opResult);
+    buildDocumentResult(opResult, indexName);
+
+    // Generate a metrics log so we can track how long the operation took.
+    metricsLogger.info(SearchDbMsgs.CREATE_DOCUMENT_TIME,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResult()),
+        override,
+        indexName);
+
+    shutdownConnection(conn);
+
+    return opResult;
+
+  }
+
+  private DocumentOperationResult createDocumentWithoutId(String indexName,
+                                                          DocumentStoreDataEntity document)
+      throws DocumentStoreOperationException {
+
+    DocumentOperationResult response = new DocumentOperationResult();
+    // Initialize operation result with a failure codes / fault string
+    response.setResultCode(500);
+    response.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
+
+    // Grab the current time so we can use it to generate a metrics log.
+    MdcOverride override = getStartTime(new MdcOverride());
+
+    String fullUrl = getFullUrl("/" + indexName + "/" + DEFAULT_TYPE, false);
+    HttpURLConnection conn = initializeConnection(fullUrl);
+
+    try {
+      conn.setRequestMethod("POST");
+    } catch (ProtocolException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to set HTTP request method to POST.", e);
+    }
+
+    attachDocument(conn, document);
+
+    logger.debug("Sending 'POST' request to: " + conn.getURL());
+
+    handleResponse(conn, response);
+    buildDocumentResult(response, indexName);
+
+    // Generate a metrics log so we can track how long the operation took.
+    metricsLogger.info(SearchDbMsgs.CREATE_DOCUMENT_TIME,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, response.getResultCode())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, response.getResult()),
+        override,
+        indexName);
+
+    shutdownConnection(conn);
+
+    return response;
+  }
+
+  private void attachDocument(HttpURLConnection conn, DocumentStoreDataEntity doc)
+      throws DocumentStoreOperationException {
+    conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
+    conn.setRequestProperty("Connection", "Close");
+
+    attachContent(conn, doc.getContentInJson());
+  }
+
+  private DocumentOperationResult checkDocumentExistence(String indexName,
+                                                         String docId)
+      throws DocumentStoreOperationException {
+    DocumentOperationResult opResult = new DocumentOperationResult();
+
+    // Initialize operation result with a failure codes / fault string
+    opResult.setResultCode(500);
+
+    // Grab the current time so we can use it to generate a metrics log.
+    MdcOverride override = getStartTime(new MdcOverride());
+
+    String fullUrl = getFullUrl("/" + indexName + "/" + DEFAULT_TYPE + "/" + docId, false);
+    HttpURLConnection conn = initializeConnection(fullUrl);
+
+    try {
+      conn.setRequestMethod("HEAD");
+    } catch (ProtocolException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to set HTTP request method to HEAD.", e);
+    }
+
+    logger.debug("Sending 'HEAD' request to: " + conn.getURL());
+
+    int resultCode;
+    try {
+      resultCode = conn.getResponseCode();
+    } catch (IOException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to get the response code from the connection.", e);
+    }
+
+    logger.debug("Response Code : " + resultCode);
+
+    opResult.setResultCode(resultCode);
+
+    // Generate a metrics log so we can track how long the operation took.
+    metricsLogger.info(SearchDbMsgs.GET_DOCUMENT_TIME,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResult()),
+        override,
+        indexName,
+        docId);
+
+    shutdownConnection(conn);
+
+    return opResult;
+  }
+
+  @Override
+  public DocumentOperationResult updateDocument(String indexName, DocumentStoreDataEntity document)
+      throws DocumentStoreOperationException {
+    DocumentOperationResult opResult = new DocumentOperationResult();
+
+    // Initialize operation result with a failure codes / fault string
+    opResult.setResultCode(500);
+    opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
+
+    // Grab the current time so we can use it to generate a metrics log.
+    MdcOverride override = getStartTime(new MdcOverride());
+
+    String fullUrl = getFullUrl("/" + indexName + "/" + DEFAULT_TYPE + "/" + document.getId()
+        + "?version=" + document.getVersion(), false);
+    HttpURLConnection conn = initializeConnection(fullUrl);
+
+    try {
+      conn.setRequestMethod("PUT");
+    } catch (ProtocolException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to set HTTP request method to PUT.", e);
+    }
+
+    attachDocument(conn, document);
+
+    logger.debug("Sending 'PUT' request to: " + conn.getURL());
+
+    handleResponse(conn, opResult);
+    buildDocumentResult(opResult, indexName);
+
+    // Generate a metrics log so we can track how long the operation took.
+    metricsLogger.info(SearchDbMsgs.UPDATE_DOCUMENT_TIME,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResult()),
+        override,
+        indexName,
+        document.getId());
+
+    shutdownConnection(conn);
+
+    return opResult;
+  }
+
+  @Override
+  public DocumentOperationResult deleteDocument(String indexName, DocumentStoreDataEntity document)
+      throws DocumentStoreOperationException {
+    DocumentOperationResult opResult = new DocumentOperationResult();
+
+    // Initialize operation result with a failure codes / fault string
+    opResult.setResultCode(500);
+    opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
+
+    // Grab the current time so we can use it to generate a metrics log.
+    MdcOverride override = getStartTime(new MdcOverride());
+
+    String fullUrl = getFullUrl("/" + indexName + "/" + DEFAULT_TYPE + "/" + document.getId()
+        + "?version=" + document.getVersion(), false);
+    HttpURLConnection conn = initializeConnection(fullUrl);
+
+    try {
+      conn.setRequestMethod("DELETE");
+    } catch (ProtocolException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to set HTTP request method to DELETE.", e);
+    }
+
+    logger.debug("\nSending 'DELETE' request to " + conn.getURL());
+
+    handleResponse(conn, opResult);
+    buildDocumentResult(opResult, indexName);
+    //supress the etag and url in response for delete as they are not required
+    if (opResult.getDocument() != null) {
+      opResult.getDocument().setEtag(null);
+      opResult.getDocument().setUrl(null);
+    }
+
+    // Generate a metrics log so we can track how long the operation took.
+    metricsLogger.info(SearchDbMsgs.DELETE_DOCUMENT_TIME,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResult())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResultCode()),
+        override,
+        indexName,
+        document.getId());
+
+    shutdownConnection(conn);
+
+    return opResult;
+  }
+
+  @Override
+  public DocumentOperationResult getDocument(String indexName, DocumentStoreDataEntity document)
+      throws DocumentStoreOperationException {
+    DocumentOperationResult opResult = new DocumentOperationResult();
+
+    // Initialize operation result with a failure codes / fault string
+    opResult.setResultCode(500);
+    opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
+
+    // Grab the current time so we can use it to generate a metrics log.
+    MdcOverride override = getStartTime(new MdcOverride());
+
+    String fullUrl = null;
+    if (document.getVersion() == null) {
+      fullUrl = getFullUrl("/" + indexName + "/" + DEFAULT_TYPE + "/" + document.getId(), false);
+    } else {
+      fullUrl = getFullUrl("/" + indexName + "/" + DEFAULT_TYPE + "/" + document.getId()
+          + "?version=" + document.getVersion(), false);
+    }
+    HttpURLConnection conn = initializeConnection(fullUrl);
+
+    logger.debug("\nSending 'GET' request to: " + conn.getURL());
+
+    handleResponse(conn, opResult);
+    buildDocumentResult(opResult, indexName);
+
+    // Generate a metrics log so we can track how long the operation took.
+    metricsLogger.info(SearchDbMsgs.GET_DOCUMENT_TIME,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResult()),
+        override,
+        indexName,
+        document.getId());
+
+    shutdownConnection(conn);
+
+    return opResult;
+  }
+
+  public SearchOperationResult search(String indexName, String queryString)
+      throws DocumentStoreOperationException {
+    SearchOperationResult opResult = new SearchOperationResult();
+
+    // Initialize operation result with a failure codes / fault string
+    opResult.setResultCode(500);
+    opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
+
+    String fullUrl = getFullUrl("/" + indexName + "/_search" + "?" + queryString, false);
+
+    // Grab the current time so we can use it to generate a metrics log.
+    MdcOverride override = getStartTime(new MdcOverride());
+
+    HttpURLConnection conn = initializeConnection(fullUrl);
+
+    try {
+      conn.setRequestMethod("GET");
+    } catch (ProtocolException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to set HTTP request method to GET.", e);
+    }
+
+    logger.debug("\nsearch(), Sending 'GET' request to URL : " + conn.getURL());
+
+    handleResponse(conn, opResult);
+    buildSearchResult(opResult, indexName);
+
+
+    metricsLogger.info(SearchDbMsgs.QUERY_DOCUMENT_TIME,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResult()),
+        override,
+        indexName,
+        queryString);
+
+    return opResult;
+  }
+
+  public SearchOperationResult searchWithPayload(String indexName, String query)
+      throws DocumentStoreOperationException {
+    SearchOperationResult opResult = new SearchOperationResult();
+
+    if (logger.isDebugEnabled()) {
+      logger.debug("Querying index: " + indexName + " with query string: " + query);
+    }
+
+    // Initialize operation result with a failure codes / fault string
+    opResult.setResultCode(500);
+    opResult.setResult(INTERNAL_SERVER_ERROR_ELASTIC_SEARCH_OPERATION_FAULT);
+
+    String fullUrl = getFullUrl("/" + indexName + "/_search", false);
+
+    // Grab the current time so we can use it to generate a metrics log.
+    MdcOverride override = getStartTime(new MdcOverride());
+
+    HttpURLConnection conn = initializeConnection(fullUrl);
+
+    try {
+      conn.setRequestMethod("POST");
+    } catch (ProtocolException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to set HTTP request method to POST.", e);
+    }
+
+    attachContent(conn, query);
+
+    logger.debug("\nsearch(), Sending 'POST' request to URL : " + conn.getURL());
+    logger.debug("Request body =  Elasticsearch query = " + query);
+
+    handleResponse(conn, opResult);
+    buildSearchResult(opResult, indexName);
+
+    metricsLogger.info(SearchDbMsgs.QUERY_DOCUMENT_TIME,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResult()),
+        override,
+        indexName,
+        query);
+
+    shutdownConnection(conn);
+
+    return opResult;
+  }
+
+  private void attachContent(HttpURLConnection conn, String content)
+      throws DocumentStoreOperationException {
+    OutputStream outputStream = null;
+    OutputStreamWriter out = null;
+
+    try {
+      outputStream = conn.getOutputStream();
+    } catch (IOException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to get connection output stream.", e);
+    }
+
+    out = new OutputStreamWriter(outputStream);
+
+    try {
+      out.write(content);
+      out.close();
+    } catch (IOException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to write to the output stream.", e);
+    }
+  }
+
+  private HttpURLConnection initializeConnection(String fullUrl)
+      throws DocumentStoreOperationException {
+    URL url = null;
+    HttpURLConnection conn = null;
+
+    try {
+      url = new URL(fullUrl);
+    } catch (MalformedURLException e) {
+      throw new DocumentStoreOperationException("Error building a URL with " + url, e);
+    }
+
+    try {
+      conn = (HttpURLConnection) url.openConnection();
+      conn.setDoOutput(true);
+    } catch (IOException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to open connection to URL " + url, e);
+    }
+
+    return conn;
+  }
+
+  private void handleResponse(HttpURLConnection conn, OperationResult opResult)
+      throws DocumentStoreOperationException {
+    int resultCode = 200;
+
+    try {
+      resultCode = conn.getResponseCode();
+    } catch (IOException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to get the response code from the connection.", e);
+    }
+
+    logger.debug("Response Code : " + resultCode);
+
+    InputStream inputStream = null;
+
+    if (!(resultCode >= 200 && resultCode <= 299)) { // 2xx response indicates success
+      inputStream = conn.getErrorStream();
+    } else {
+      try {
+        inputStream = conn.getInputStream();
+      } catch (IOException e) {
+        shutdownConnection(conn);
+        throw new DocumentStoreOperationException("Failed to get the response input stream.", e);
+      }
+    }
+
+    InputStreamReader inputstreamreader = new InputStreamReader(inputStream);
+    BufferedReader bufferedreader = new BufferedReader(inputstreamreader);
+
+    StringBuilder result = new StringBuilder(128);
+    String string = null;
+
+    try {
+      while ((string = bufferedreader.readLine()) != null) {
+        result.append(string).append("\n");
+      }
+    } catch (IOException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed getting the response body payload.", e);
+    }
+
+    if (resultCode == Status.CONFLICT.getStatusCode()) {
+      opResult.setResultCode(Status.PRECONDITION_FAILED.getStatusCode());
+    } else {
+      opResult.setResultCode(resultCode);
+    }
+    if (logger.isDebugEnabled()) {
+      logger.debug("Raw result string from ElasticSearch = " + result.toString());
+    }
+    opResult.setResult(result.toString());
+    opResult.setResultVersion(extractVersion(result.toString()));
+  }
+
+  private String extractVersion(String result) throws DocumentStoreOperationException {
+
+    JSONParser parser = new JSONParser();
+    String version = null;
+    try {
+      JSONObject root = (JSONObject) parser.parse(result);
+      if (root.get("_version") != null) {
+        version = root.get("_version").toString();
+      }
+
+    } catch (ParseException e) {
+
+      // Not all responses from ElasticSearch include a version, so
+      // if we don't get one back, just return an empty string rather
+      // than trigger a false failure.
+      version = "";
+    }
+    return version;
+  }
+
+  /**
+   * This convenience method gets the current system time and stores
+   * it in an attribute in the supplied {@link MdcOverride} object so
+   * that it can be used later by the metrics logger.
+   *
+   * @param override - The {@link MdcOverride} object to update.
+   * @return - The supplied {@link MdcOverride} object.
+   */
+  private MdcOverride getStartTime(MdcOverride override) {
+
+    // Grab the current time...
+    long startTimeInMs = System.currentTimeMillis();
+
+    // ...and add it as an attribute to the supplied MDC Override
+    // object.
+    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX");
+    override.addAttribute(MdcContext.MDC_START_TIME, formatter.format(startTimeInMs));
+
+    // Return the MdcOverride object that we were passed.
+    // This looks odd, but it allows us to do stuff like:
+    //
+    //    MdcOverride ov = getStartTime(new MdcOverride())
+    //
+    // which is quite handy, but also allows us to pass in an existing
+    // MdcOverride object which already has some attributes set.
+    return override;
+  }
+
+  private boolean isSuccess(OperationResult result) {
+
+    return isSuccessCode(result.getResultCode());
+  }
+
+
+  private boolean isSuccessCode(int statusCode) {
+    return ((statusCode >= 200) && (statusCode < 300));
+  }
+
+
+  @Override
+  public OperationResult performBulkOperations(BulkRequest[] requests)
+      throws DocumentStoreOperationException {
+
+    if (logger.isDebugEnabled()) {
+      String dbgString = "ESController: performBulkOperations - Operations: ";
+
+      for (BulkRequest request : requests) {
+        dbgString += "[" + request.toString() + "] ";
+      }
+
+      logger.debug(dbgString);
+    }
+
+    // Grab the current time so we can use it to generate a metrics log.
+    MdcOverride override = getStartTime(new MdcOverride());
+
+    // Parse the supplied set of operations.
+    // Iterate over the list of operations which we were provided and
+    // translate them into a format that ElasticSearh understands.
+    int opCount = 0;
+    StringBuilder esOperationSet = new StringBuilder(128);
+    List<ElasticSearchResultItem> rejected = new ArrayList<ElasticSearchResultItem>();
+    for (BulkRequest request : requests) {
+
+      // Convert the request to the syntax ElasticSearch likes.
+      if (buildEsOperation(request, esOperationSet, rejected)) {
+        opCount++;
+      }
+    }
+
+    ElasticSearchBulkOperationResult opResult = null;
+    if (opCount > 0) {
+
+      // Open an HTTP connection to the ElasticSearch back end.
+      String fullUrl = getFullUrl("/_bulk", false);
+      URL url;
+      HttpURLConnection conn;
+      try {
+
+        url = new URL(fullUrl);
+        conn = (HttpURLConnection) url.openConnection();
+        conn.setRequestMethod("PUT");
+        conn.setDoOutput(true);
+        conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
+        conn.setRequestProperty("Connection", "Close");
+
+      } catch (IOException e) {
+
+        logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, e.getMessage());
+        if (logger.isDebugEnabled()) {
+          logger.debug(Throwables.getStackTraceAsString(e));
+        }
+
+        throw new DocumentStoreOperationException("Failed to open connection to document store.  Cause: "
+            + e.getMessage(), e);
+      }
+
+      StringBuilder bulkResult = new StringBuilder(128);
+      try {
+        // Create an output stream to write our request to.
+        OutputStreamWriter out = new OutputStreamWriter(conn.getOutputStream());
+        ;
+
+        if (logger.isDebugEnabled()) {
+          logger.debug("ESController: Sending 'BULK' request to " + conn.getURL());
+          logger.debug("ESController: operations: " + esOperationSet.toString().replaceAll("\n",
+              "\\n"));
+        }
+
+        // Write the resulting request string to our output stream. (this sends the request to ES?)
+        out.write(esOperationSet.toString());
+        out.close();
+
+        // Open an input stream on our connection in order to read back the results.
+        InputStream is = conn.getInputStream();
+        InputStreamReader inputstreamreader = new InputStreamReader(is);
+        BufferedReader bufferedreader = new BufferedReader(inputstreamreader);
+
+        // Read the contents of the input stream into our result string...
+        String esResponseString = null;
+
+        while ((esResponseString = bufferedreader.readLine()) != null) {
+          bulkResult.append(esResponseString).append("\n");
+        }
+
+      } catch (IOException e) {
+
+        logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, e.getMessage());
+        if (logger.isDebugEnabled()) {
+          StringWriter sw = new StringWriter();
+          e.printStackTrace(new PrintWriter(sw));
+          logger.debug(sw.toString());
+        }
+
+        throw new DocumentStoreOperationException("Failure interacting with document store.  Cause: "
+            + e.getMessage(), e);
+      }
+
+      if (logger.isDebugEnabled()) {
+        logger.debug("ESController: Received result string from ElasticSearch: = "
+            + bulkResult.toString());
+      }
+
+      // ...and marshal the resulting string into a Java object.
+      try {
+        opResult = marshallEsBulkResult(bulkResult.toString());
+
+      } catch (IOException e) {
+
+        logger.warn(SearchDbMsgs.BULK_OPERATION_FAILURE, e.getMessage());
+        if (logger.isDebugEnabled()) {
+          logger.debug(Throwables.getStackTraceAsString(e));
+        }
+
+        throw new DocumentStoreOperationException("Failed to marshal response body.  Cause: "
+            + e.getMessage(), e);
+      }
+    }
+
+    // Finally, build the operation result and return it to the caller.
+    OperationResult result = new OperationResult();
+    result.setResultCode(207);
+    result.setResult(buildGenericBulkResultSet(opResult, rejected));
+
+    // In the success case we don't want the entire result string to be
+    // dumped into the metrics log, so concatenate it.
+    String resultStringForMetricsLog = result.getResult();
+    if ((result.getResultCode() >= 200) && (result.getResultCode() < 300)) {
+      resultStringForMetricsLog = resultStringForMetricsLog.substring(0,
+          Math.max(resultStringForMetricsLog.length(), 85)) + "...";
+    }
+
+    metricsLogger.info(SearchDbMsgs.BULK_OPERATIONS_TIME,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, result.getResultCode())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, resultStringForMetricsLog),
+        override);
+
+    return result;
+  }
+
+
+  /**
+   * This method converts a {@link BulkRequest} object into a json structure
+   * which can be understood by ElasticSearch.
+   *
+   * @param request - The request to be performed.
+   * @param sb      - The string builder to append the json data to
+   * @throws DocumentStoreOperationException
+   */
+  private boolean buildEsOperation(BulkRequest request, StringBuilder sb,
+                                   List<ElasticSearchResultItem> fails)
+      throws DocumentStoreOperationException {
+
+    boolean retVal = true;
+    OperationResult indexExistsResult = null;
+
+    // What kind of operation are we performing?
+    switch (request.getOperationType()) {
+
+      // Create a new document.
+      case CREATE:
+
+        // Make sure that we were supplied a document payload.
+        if (request.getOperation().getDocument() == null) {
+
+          fails.add(generateRejectionEntry(request.getOperationType(),
+              "Missing document payload",
+              request.getIndex(),
+              request.getId(),
+              400,
+              request.getOperation().getMetaData().getUrl()));
+          return false;
+        }
+
+        // Make sure that the supplied document URL is formatted
+        // correctly.
+        if (!ApiUtils.validateDocumentUri(request.getOperation().getMetaData().getUrl(), false)) {
+          fails.add(generateRejectionEntry(request.getOperationType(),
+              "Invalid document URL: " + request.getOperation().getMetaData().getUrl(),
+              request.getIndex(),
+              "",
+              400,
+              request.getOperation().getMetaData().getUrl()));
+          return false;
+        }
+
+        // Validate that the specified index actually exists before we
+        // try to perform the create.
+        if (!indexExists(ApiUtils.extractIndexFromUri(request.getOperation().getMetaData().getUrl()))) {
+
+          fails.add(generateRejectionEntry(request.getOperationType(),
+              "Specified resource does not exist: "
+                  + request.getOperation().getMetaData().getUrl(),
+              request.getIndex(),
+              request.getId(),
+              404,
+              request.getOperation().getMetaData().getUrl()));
+          return false;
+        }
+
+        // If we were supplied an id for the new document, then
+        // include it in the bulk operation to Elastic Search
+        if (request.getId() == null) {
+
+          sb.append(String.format(BULK_CREATE_WITHOUT_INDEX_TEMPLATE,
+              request.getIndex(),
+              DEFAULT_TYPE));
+
+          // Otherwise, we just leave that parameter off and ElasticSearch
+          // will generate one for us.
+        } else {
+          sb.append(String.format(BULK_CREATE_WITH_INDEX_TEMPLATE,
+              request.getIndex(),
+              DEFAULT_TYPE,
+              request.getId()));
+        }
+
+        try {
+          // Append the document that we want to create.
+          sb.append(request.getOperation().getDocument().toJson()).append("\n");
+        } catch (JsonProcessingException e) {
+          throw new DocumentStoreOperationException("Failure parsing document to json", e);
+        }
+
+        break;
+
+      // Update an existing document.
+      case UPDATE:
+
+        // Make sure that we were supplied a document payload.
+        if (request.getOperation().getDocument() == null) {
+
+          fails.add(generateRejectionEntry(request.getOperationType(),
+              "Missing document payload",
+              request.getIndex(),
+              request.getId(),
+              400,
+              request.getOperation().getMetaData().getUrl()));
+          return false;
+        }
+
+        // Make sure that the supplied document URL is formatted
+        // correctly.
+        if (!ApiUtils.validateDocumentUri(request.getOperation().getMetaData().getUrl(), true)) {
+          fails.add(generateRejectionEntry(request.getOperationType(),
+              "Invalid document URL: " + request.getOperation().getMetaData().getUrl(),
+              request.getIndex(),
+              "",
+              400,
+              request.getOperation().getMetaData().getUrl()));
+          return false;
+        }
+
+        // Validate that the specified index actually exists before we
+        // try to perform the update.
+        if (!indexExists(request.getIndex())) {
+
+          fails.add(generateRejectionEntry(request.getOperationType(),
+              "Specified resource does not exist: "
+                  + request.getOperation().getMetaData().getUrl(),
+              request.getIndex(),
+              request.getId(),
+              404,
+              request.getOperation().getMetaData().getUrl()));
+          return false;
+        }
+
+        // Validate that the document we are trying to update actually
+        // exists before we try to perform the update.
+        if (!documentExists(request.getIndex(), request.getId())) {
+
+          fails.add(generateRejectionEntry(request.getOperationType(),
+              "Specified resource does not exist: "
+                  + request.getOperation().getMetaData().getUrl(),
+              request.getIndex(),
+              request.getId(),
+              404,
+              request.getOperation().getMetaData().getUrl()));
+          return false;
+        }
+
+        // It is mandatory that a version be supplied for an update operation,
+        // so validate that now.
+        if (request.getOperation().getMetaData().getEtag() == null) {
+
+          fails.add(generateRejectionEntry(request.getOperationType(),
+              "Missing mandatory ETag field",
+              request.getIndex(),
+              request.getId(),
+              400,
+              request.getOperation().getMetaData().getUrl()));
+          return false;
+        }
+
+        // Generate the update request...
+        sb.append(String.format(BULK_IMPORT_INDEX_TEMPLATE,
+            request.getIndex(),
+            DEFAULT_TYPE,
+            request.getId(),
+            request.getOperation().getMetaData().getEtag()));
+
+        // ...and append the document that we want to update.
+        try {
+          sb.append(request.getOperation().getDocument().toJson()).append("\n");
+        } catch (JsonProcessingException e) {
+          throw new DocumentStoreOperationException("Failure parsing document to json", e);
+        }
+        break;
+
+      // Delete an existing document.
+      case DELETE:
+
+        // Make sure that the supplied document URL is formatted
+        // correctly.
+        if (!ApiUtils.validateDocumentUri(request.getOperation().getMetaData().getUrl(), true)) {
+          fails.add(generateRejectionEntry(request.getOperationType(),
+              "Invalid document URL: " + request.getOperation().getMetaData().getUrl(),
+              request.getIndex(),
+              "",
+              400,
+              request.getOperation().getMetaData().getUrl()));
+          return false;
+        }
+
+        // Validate that the specified index actually exists before we
+        // try to perform the delete.
+        if (!indexExists(request.getIndex())) {
+
+          fails.add(generateRejectionEntry(request.getOperationType(),
+              "Specified resource does not exist: "
+                  + request.getOperation().getMetaData().getUrl(),
+              request.getIndex(),
+              request.getId(),
+              404,
+              request.getOperation().getMetaData().getUrl()));
+          return false;
+        }
+
+        // Validate that the document we are trying to update actually
+        // exists before we try to perform the delete.
+        if (!documentExists(request.getIndex(), request.getId())) {
+
+          fails.add(generateRejectionEntry(request.getOperationType(),
+              "Specified resource does not exist: "
+                  + request.getOperation().getMetaData().getUrl(),
+              request.getIndex(),
+              request.getId(),
+              404,
+              request.getOperation().getMetaData().getUrl()));
+          return false;
+        }
+
+        // It is mandatory that a version be supplied for a delete operation,
+        // so validate that now.
+        if (request.getOperation().getMetaData().getEtag() == null) {
+
+          fails.add(generateRejectionEntry(request.getOperationType(),
+              "Missing mandatory ETag field",
+              request.getIndex(),
+              request.getId(),
+              400,
+              request.getOperation().getMetaData().getUrl()));
+          return false;
+        }
+
+        // Generate the delete request.
+        sb.append(String.format(BULK_DELETE_TEMPLATE,
+            request.getIndex(),
+            DEFAULT_TYPE,
+            request.getId(),
+            request.getOperation().getMetaData().getEtag()));
+        break;
+      default:
+    }
+
+    return retVal;
+  }
+
+  private boolean indexExists(String index) throws DocumentStoreOperationException {
+
+    OperationResult indexExistsResult = checkIndexExistence(index);
+
+    return ((indexExistsResult.getResultCode() >= 200)
+        && (indexExistsResult.getResultCode() < 300));
+  }
+
+  private boolean documentExists(String index, String id) throws DocumentStoreOperationException {
+
+    OperationResult docExistsResult = checkDocumentExistence(index, id);
+
+    return ((docExistsResult.getResultCode() >= 200) && (docExistsResult.getResultCode() < 300));
+  }
+
+  /**
+   * This method constructs a status entry for a bulk operation which has
+   * been rejected before even sending it to the document store.
+   *
+   * @param rejectReason - A message describing why the operation was rejected.
+   * @param anId         - The identifier associated with the document being
+   *                     acted on.
+   * @param statusCode  - An HTTP status code.
+   * @return - A result set item.
+   */
+  private ElasticSearchResultItem generateRejectionEntry(OperationType opType,
+                                                         String rejectReason,
+                                                         String index,
+                                                         String anId,
+                                                         int statusCode,
+                                                         String originalUrl) {
+
+    ElasticSearchError err = new ElasticSearchError();
+    err.setReason(rejectReason);
+
+    ElasticSearchOperationStatus op = new ElasticSearchOperationStatus();
+    op.setIndex(index);
+    op.setId(anId);
+    op.setStatus(statusCode);
+    op.setError(err);
+    op.setAdditionalProperties(ElasticSearchResultItem.REQUEST_URL, originalUrl);
+
+    ElasticSearchResultItem rejectionResult = new ElasticSearchResultItem();
+
+    switch (opType) {
+      case CREATE:
+        rejectionResult.setCreate(op);
+        break;
+      case UPDATE:
+        rejectionResult.setIndex(op);
+        break;
+      case DELETE:
+        rejectionResult.setDelete(op);
+        break;
+      default:
+    }
+
+    return rejectionResult;
+  }
+
+
+  /**
+   * This method takes the json structure returned from ElasticSearch in
+   * response to a bulk operations request and marshals it into a Java
+   * object.
+   *
+   * @param jsonResult - The bulk operations response returned from
+   *                   ElasticSearch.
+   * @return - The marshalled response.
+   * @throws JsonParseException
+   * @throws JsonMappingException
+   * @throws IOException
+   */
+  private ElasticSearchBulkOperationResult marshallEsBulkResult(String jsonResult)
+      throws JsonParseException, JsonMappingException, IOException {
+
+    if (jsonResult != null) {
+      if (logger.isDebugEnabled()) {
+        logger.debug("ESController: Marshalling ES result set from json: "
+            + jsonResult.replaceAll("\n", ""));
+      }
+
+      ObjectMapper mapper = new ObjectMapper();
+      mapper.setSerializationInclusion(Include.NON_EMPTY);
+
+      return mapper.readValue(jsonResult, ElasticSearchBulkOperationResult.class);
+    }
+
+    return null;
+  }
+
+
+  /**
+   * This method takes the marshalled ElasticSearch bulk response and
+   * converts it into a generic response payload.
+   *
+   * @param esResult - ElasticSearch bulk operations response.
+   * @return - A generic result set.
+   */
+  private String buildGenericBulkResultSet(ElasticSearchBulkOperationResult esResult,
+                                           List<ElasticSearchResultItem> rejectedOps) {
+
+    int totalOps = 0;
+    int totalSuccess = 0;
+    int totalFails = 0;
+
+    if (logger.isDebugEnabled()) {
+
+      logger.debug("ESController: Build generic result set.  ES Results: "
+          + ((esResult != null) ? esResult.toString() : "[]")
+          + " Rejected Ops: " + rejectedOps.toString());
+    }
+
+    // Build a combined list of result items from the results returned
+    // from ElasticSearch and the list of operations that we rejected
+    // without sending to ElasticSearch.
+    List<ElasticSearchResultItem> combinedResults = new ArrayList<ElasticSearchResultItem>();
+    if (esResult != null) {
+      combinedResults.addAll(Arrays.asList(esResult.getItems()));
+    }
+    combinedResults.addAll(rejectedOps);
+
+    // Iterate over the individual results in the resulting result set.
+    StringBuilder resultsBuilder = new StringBuilder();
+    AtomicBoolean firstItem = new AtomicBoolean(true);
+    for (ElasticSearchResultItem item : combinedResults) {
+
+      // Increment the operation counts.
+      totalOps++;
+      if (isSuccessCode(item.operationStatus().getStatus())) {
+        totalSuccess++;
+      } else {
+        totalFails++;
+      }
+
+      // Prepend a comma to our response string unless this it the
+      // first result in the set.
+      if (!firstItem.compareAndSet(true, false)) {
+        resultsBuilder.append(", ");
+      }
+
+      // Append the current result as a generic json structure.
+      resultsBuilder.append(item.toJson());
+    }
+
+    // Now, build the result string and return it.
+    String responseBody = "{ \"total_operations\": " + totalOps + ", "
+        + "\"total_success\": " + totalSuccess + ", "
+        + "\"total_fails\": " + totalFails + ", "
+        + "\"results\": ["
+        + resultsBuilder.toString()
+        + "]}";
+
+    return responseBody;
+  }
+
+
+  /**
+   * This method queryies ElasticSearch to determine if the supplied
+   * index is present in the document store.
+   *
+   * @param indexName - The index to look for.
+   * @return - An operation result indicating the success or failure of
+   * the check.
+   * @throws DocumentStoreOperationException
+   */
+  public OperationResult checkIndexExistence(String indexName)
+      throws DocumentStoreOperationException {
+
+    // Initialize operation result with a failure codes / fault string
+    OperationResult opResult = new OperationResult();
+    opResult.setResultCode(500);
+
+    // Grab the current time so we can use it to generate a metrics log.
+    MdcOverride override = getStartTime(new MdcOverride());
+
+    String fullUrl = getFullUrl("/" + indexName, false);
+    HttpURLConnection conn = initializeConnection(fullUrl);
+
+    try {
+      conn.setRequestMethod("HEAD");
+
+    } catch (ProtocolException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to set HTTP request method to HEAD.", e);
+    }
+
+    logger.debug("Sending 'HEAD' request to: " + conn.getURL());
+
+    int resultCode;
+    try {
+      resultCode = conn.getResponseCode();
+    } catch (IOException e) {
+      shutdownConnection(conn);
+      throw new DocumentStoreOperationException("Failed to get the response code from the connection.", e);
+    }
+    logger.debug("Response Code : " + resultCode);
+
+    opResult.setResultCode(resultCode);
+
+    // Generate a metrics log so we can track how long the operation took.
+    metricsLogger.info(SearchDbMsgs.CHECK_INDEX_TIME,
+        new LogFields()
+            .setField(LogLine.DefinedFields.RESPONSE_CODE, opResult.getResultCode())
+            .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, opResult.getResultCode()),
+        override,
+        indexName);
+
+    shutdownConnection(conn);
+
+    return opResult;
+  }
+
+
+  private void buildDocumentResult(DocumentOperationResult result, String index)
+      throws DocumentStoreOperationException {
+
+    JSONParser parser = new JSONParser();
+    JSONObject root;
+    try {
+      root = (JSONObject) parser.parse(result.getResult());
+
+      if (result.getResultCode() >= 200 && result.getResultCode() <= 299) {
+        // Success response object
+        Document doc = new Document();
+        doc.setEtag(result.getResultVersion());
+        doc.setUrl(buildDocumentResponseUrl(index, root.get("_id").toString()));
+
+        doc.setContent((JSONObject) root.get("_source"));
+        result.setDocument(doc);
+
+      } else {
+        // Error response object
+        JSONObject error = (JSONObject) root.get("error");
+        if (error != null) {
+          result.setError(new ErrorResult(error.get("type").toString(),
+              error.get("reason").toString()));
+        }
+
+      }
+    } catch (Exception e) {
+      throw new DocumentStoreOperationException("Failed to parse Elastic Search response."
+          + result.getResult());
+    }
+
+
+  }
+
+  private String buildDocumentResponseUrl(String index, String id) {
+    return ApiUtils.buildDocumentUri(index, id);
+  }
+
+  private void buildSearchResult(SearchOperationResult result, String index)
+      throws DocumentStoreOperationException {
+
+    JSONParser parser = new JSONParser();
+    JSONObject root;
+
+    try {
+      root = (JSONObject) parser.parse(result.getResult());
+      if (result.getResultCode() >= 200 && result.getResultCode() <= 299) {
+        JSONObject hits = (JSONObject) root.get("hits");
+        JSONArray hitArray = (JSONArray) hits.get("hits");
+        SearchHits searchHits = new SearchHits();
+        searchHits.setTotalHits(hits.get("total").toString());
+        ArrayList<SearchHit> searchHitArray = new ArrayList<SearchHit>();
+
+        for (int i = 0; i < hitArray.size(); i++) {
+          JSONObject hit = (JSONObject) hitArray.get(i);
+          SearchHit searchHit = new SearchHit();
+          searchHit.setScore((hit.get("_score") != null) ? hit.get("_score").toString() : "");
+          Document doc = new Document();
+          if (hit.get("_version") != null) {
+            doc.setEtag((hit.get("_version") != null) ? hit.get("_version").toString() : "");
+          }
+
+          doc.setUrl(buildDocumentResponseUrl(index, (hit.get("_id") != null)
+              ? hit.get("_id").toString() : ""));
+          doc.setContent((JSONObject) hit.get("_source"));
+          searchHit.setDocument(doc);
+          searchHitArray.add(searchHit);
+        }
+        searchHits.setHits(searchHitArray.toArray(new SearchHit[searchHitArray.size()]));
+        result.setSearchResult(searchHits);
+
+        JSONObject aggregations = (JSONObject) root.get("aggregations");
+        if (aggregations != null) {
+          AggregationResult[] aggResults =
+              AggregationParsingUtil.parseAggregationResults(aggregations);
+          AggregationResults aggs = new AggregationResults();
+          aggs.setAggregations(aggResults);
+          result.setAggregationResult(aggs);
+        }
+
+        // success
+      } else {
+        JSONObject error = (JSONObject) root.get("error");
+        if (error != null) {
+          result.setError(new ErrorResult(error.get("type").toString(),
+              error.get("reason").toString()));
+        }
+      }
+    } catch (Exception e) {
+      throw new DocumentStoreOperationException("Failed to parse Elastic Search response."
+          + result.getResult());
+    }
+
+  }
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchOperationStatus.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchOperationStatus.java
new file mode 100644 (file)
index 0000000..e3f8e6a
--- /dev/null
@@ -0,0 +1,119 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class ElasticSearchOperationStatus {
+
+  private String index;
+  private String type;
+  private String id;
+  private String version;
+  private ElasticSearchShardStatus shards;
+  private Integer status;
+  private ElasticSearchError error;
+
+  private Map<String, Object> additionalProperties = new HashMap<String, Object>();
+
+
+  public ElasticSearchError getError() {
+    return error;
+  }
+
+  public void setError(ElasticSearchError error) {
+    this.error = error;
+  }
+
+  public Integer getStatus() {
+    return status;
+  }
+
+  public void setStatus(Integer status) {
+    this.status = status;
+  }
+
+  public ElasticSearchShardStatus getShards() {
+    return shards;
+  }
+
+  public void setShards(ElasticSearchShardStatus shards) {
+    this.shards = shards;
+  }
+
+  public String getIndex() {
+    return index;
+  }
+
+  public void setIndex(String index) {
+    this.index = index;
+  }
+
+  public String getType() {
+    return type;
+  }
+
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  public String getId() {
+    return id;
+  }
+
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  public String getVersion() {
+    return version;
+  }
+
+  public void setVersion(String version) {
+    this.version = version;
+  }
+
+  @JsonAnyGetter
+  public Map<String, Object> getAdditionalProperties() {
+    return additionalProperties;
+  }
+
+  @JsonAnySetter
+  public void setAdditionalProperties(String name, Object value) {
+    additionalProperties.put(name, value);
+  }
+
+  @Override
+  public String toString() {
+    return "ElasticSearchIndexStatus [index=" + index + ", type=" + type + ", id="
+        + id + ", version=" + version + ", shards=" + shards + ", status=" + status + "]";
+  }
+
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchResultItem.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchResultItem.java
new file mode 100644 (file)
index 0000000..28a235d
--- /dev/null
@@ -0,0 +1,152 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
+
+import org.openecomp.sa.rest.ApiUtils;
+
+public class ElasticSearchResultItem {
+
+  public static final String REQUEST_URL = "REQUEST_URL";
+
+  private ElasticSearchOperationStatus create;
+  private ElasticSearchOperationStatus index;
+  private ElasticSearchOperationStatus delete;
+
+  public ElasticSearchOperationStatus getCreate() {
+    return create;
+  }
+
+  public void setCreate(ElasticSearchOperationStatus index) {
+    this.create = index;
+  }
+
+  public ElasticSearchOperationStatus getIndex() {
+    return index;
+  }
+
+  public void setIndex(ElasticSearchOperationStatus index) {
+    this.index = index;
+  }
+
+  public ElasticSearchOperationStatus getDelete() {
+    return delete;
+  }
+
+  public void setDelete(ElasticSearchOperationStatus delete) {
+    this.delete = delete;
+  }
+
+  public String operationType() {
+
+    if (create != null) {
+      return "create";
+    }
+    if (index != null) {
+      return "update";
+    }
+    if (delete != null) {
+      return "delete";
+    }
+
+    return "unknown";
+  }
+
+  public ElasticSearchOperationStatus operationStatus() {
+
+    if (create != null) {
+      return create;
+    }
+    if (index != null) {
+      return index;
+    }
+    if (delete != null) {
+      return delete;
+    }
+
+    return null;
+  }
+
+
+  public String toJson() {
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("{");
+
+    sb.append("\"operation\": \"").append(operationType()).append("\", ");
+
+    if (operationStatus().getAdditionalProperties().containsKey(REQUEST_URL)) {
+      sb.append("\"url\": \"").append(operationStatus().getAdditionalProperties()
+          .get(REQUEST_URL)).append("\", ");
+    } else {
+      sb.append("\"url\": \"").append(ApiUtils.buildDocumentUri(operationStatus()
+          .getIndex(), operationStatus().getId())).append("\", ");
+    }
+
+    // We don't want to include an etag field in the response in
+    // the case of a delete, since that would imply that the client
+    // could still access that version of the file in some manner
+    // (which we are not supporting).
+    if (!operationType().equals("delete")) {
+      sb.append("\"etag\": \"").append(operationStatus().getVersion()).append("\", ");
+    }
+    sb.append("\"status-code\": \"").append(operationStatus().getStatus()).append("\", ");
+
+    sb.append("\"status-message\": \"");
+
+    if ((operationStatus().getStatus() >= 200) && (operationStatus().getStatus() < 300)) {
+      sb.append("OK");
+    } else {
+      // Sometimes the error object doesn't get populated, so check
+      // before we try to reference it...
+      if (operationStatus().getError() != null) {
+        sb.append(operationStatus().getError().getReason());
+      } else {
+        sb.append("");
+      }
+    }
+    sb.append("\"");
+    sb.append("}");
+
+    return sb.toString();
+  }
+
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("ElasticSearchItemStatus [");
+    if (create != null) {
+      sb.append("create " + create);
+    } else if (index != null) {
+      sb.append("index " + index);
+    } else if (delete != null) {
+      sb.append("delete " + index);
+    }
+    sb.append("]");
+    return sb.toString();
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchShardStatus.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchShardStatus.java
new file mode 100644 (file)
index 0000000..77dc2ad
--- /dev/null
@@ -0,0 +1,64 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
+
+public class ElasticSearchShardStatus {
+
+  private int total;
+  private int successful;
+  private int failed;
+
+  public int getTotal() {
+    return total;
+  }
+
+  public void setTotal(int total) {
+    this.total = total;
+  }
+
+  public int getSuccessful() {
+    return successful;
+  }
+
+  public void setSuccessful(int successful) {
+    this.successful = successful;
+  }
+
+  public int getFailed() {
+    return failed;
+  }
+
+  public void setFailed(int failed) {
+    this.failed = failed;
+  }
+
+  @Override
+  public String toString() {
+    return "ElasticSearchShardStatus [total=" + total + ", successful=" + successful
+        + ", failed=" + failed + "]";
+  }
+
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/exception/DocumentStoreOperationException.java b/src/main/java/org/openecomp/sa/searchdbabstraction/elasticsearch/exception/DocumentStoreOperationException.java
new file mode 100644 (file)
index 0000000..4425754
--- /dev/null
@@ -0,0 +1,39 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.exception;
+
+public class DocumentStoreOperationException extends Exception {
+
+  private static final long serialVersionUID = -7689309913743200670L;
+
+  public DocumentStoreOperationException(String message, Exception ex) {
+    super(message, ex);
+  }
+
+  public DocumentStoreOperationException(String message) {
+    super(message);
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationBucket.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationBucket.java
new file mode 100644 (file)
index 0000000..4877b0b
--- /dev/null
@@ -0,0 +1,79 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.entity;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import edu.emory.mathcs.backport.java.util.Arrays;
+
+public class AggregationBucket {
+  private Object key;
+
+  @JsonProperty("formatted-key")
+  private String formattedKey;
+
+  private Number count;
+
+  @JsonProperty("sub-aggregations")
+  private AggregationResult[] subAggregationResult;
+
+  public Object getKey() {
+    return key;
+  }
+
+  public void setKey(Object key) {
+    this.key = key;
+  }
+
+  public String getFormattedKey() {
+    return formattedKey;
+  }
+
+  public void setFormattedKey(String formattedKey) {
+    this.formattedKey = formattedKey;
+  }
+
+  public Number getCount() {
+    return count;
+  }
+
+  public void setCount(Number count) {
+    this.count = count;
+  }
+
+  public AggregationResult[] getSubAggregationResult() {
+    return subAggregationResult;
+  }
+
+  public void setSubAggregationResult(AggregationResult[] subAggregationResult) {
+    this.subAggregationResult = subAggregationResult;
+  }
+
+  @Override
+  public String toString() {
+    return "AggregationBucket [key=" + key + ", formattedKey=" + formattedKey + ", count=" + count
+        + ", subAggregationResult=" + Arrays.toString(subAggregationResult) + "]";
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationResult.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationResult.java
new file mode 100644 (file)
index 0000000..3867975
--- /dev/null
@@ -0,0 +1,79 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.entity;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import edu.emory.mathcs.backport.java.util.Arrays;
+
+public class AggregationResult {
+  private String name;
+
+  private Number count;
+
+  private AggregationBucket[] buckets;
+
+  @JsonProperty("nested-aggregations")
+  private AggregationResult[] nestedAggregations;
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public AggregationBucket[] getBuckets() {
+    return buckets;
+  }
+
+  public void setBuckets(AggregationBucket[] buckets) {
+    this.buckets = buckets;
+  }
+
+  public AggregationResult[] getNestedAggregations() {
+    return nestedAggregations;
+  }
+
+  public void setNestedAggregations(AggregationResult[] nestedAggregations) {
+    this.nestedAggregations = nestedAggregations;
+  }
+
+  public Number getCount() {
+    return count;
+  }
+
+  public void setCount(Number count) {
+    this.count = count;
+  }
+
+  @Override
+  public String toString() {
+    return "AggregationResult [name=" + name + ", count=" + count + ", buckets="
+        + Arrays.toString(buckets) + ", nestedAggregations=" + Arrays.toString(nestedAggregations)
+        + "]";
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationResults.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/AggregationResults.java
new file mode 100644 (file)
index 0000000..f2f7f8f
--- /dev/null
@@ -0,0 +1,45 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.entity;
+
+import java.util.Arrays;
+
+public class AggregationResults {
+  private AggregationResult[] aggregations;
+
+  public AggregationResult[] getAggregations() {
+    return aggregations;
+  }
+
+  public void setAggregations(AggregationResult[] aggregations) {
+    this.aggregations = aggregations;
+  }
+
+  @Override
+  public String toString() {
+    return "AggregationResults [aggregations=" + Arrays.toString(aggregations) + "]";
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/Document.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/Document.java
new file mode 100644 (file)
index 0000000..53e83dd
--- /dev/null
@@ -0,0 +1,65 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.entity;
+
+import org.json.simple.JSONObject;
+
+public class Document {
+  private String etag;
+  private String url;
+
+  private JSONObject content;
+
+  public String getEtag() {
+    return etag;
+  }
+
+  public void setEtag(String etag) {
+    this.etag = etag;
+  }
+
+  public String getUrl() {
+    return url;
+  }
+
+  public void setUrl(String url) {
+    this.url = url;
+  }
+
+  public JSONObject getContent() {
+    return content;
+  }
+
+  public void setContent(JSONObject content) {
+    this.content = content;
+  }
+
+  @Override
+  public String toString() {
+    return "Document [etag=" + etag + ", url=" + url + "]";
+  }
+
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/DocumentOperationResult.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/DocumentOperationResult.java
new file mode 100644 (file)
index 0000000..2af5e9e
--- /dev/null
@@ -0,0 +1,44 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.entity;
+
+public class DocumentOperationResult extends OperationResult {
+  private Document document;
+
+  public Document getDocument() {
+    return document;
+  }
+
+  public void setDocument(Document document) {
+    this.document = document;
+  }
+
+  @Override
+  public String toString() {
+    return "DocumentOperationResult [document=" + document + "]";
+  }
+
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/ErrorResult.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/ErrorResult.java
new file mode 100644 (file)
index 0000000..7171c66
--- /dev/null
@@ -0,0 +1,60 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.entity;
+
+public class ErrorResult {
+
+  private String type;
+  private String reason;
+
+
+  public ErrorResult(String type, String reason) {
+    super();
+    this.type = type;
+    this.reason = reason;
+  }
+
+  public String getType() {
+    return type;
+  }
+
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  public String getReason() {
+    return reason;
+  }
+
+  public void setReason(String reason) {
+    this.reason = reason;
+  }
+
+  @Override
+  public String toString() {
+    return "ErrorResponse [type=" + type + ", reason=" + reason + "]";
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/OperationResult.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/OperationResult.java
new file mode 100644 (file)
index 0000000..b496fb4
--- /dev/null
@@ -0,0 +1,82 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.entity;
+
+public class OperationResult {
+
+  private int resultCode;
+
+  private String failureCause;
+  private String resultVersion;
+  private String result;
+  private ErrorResult error;
+
+  public int getResultCode() {
+    return resultCode;
+  }
+
+  public void setResultCode(int resultCode) {
+    this.resultCode = resultCode;
+  }
+
+  public String getFailureCause() {
+    return failureCause;
+  }
+
+  public void setFailureCause(String failureCause) {
+    this.failureCause = failureCause;
+  }
+
+  public String getResultVersion() {
+    return resultVersion;
+  }
+
+  public void setResultVersion(String resultVersion) {
+    this.resultVersion = resultVersion;
+  }
+
+  public String getResult() {
+    return result;
+  }
+
+  public void setResult(String result) {
+    this.result = result;
+  }
+
+  public ErrorResult getError() {
+    return error;
+  }
+
+  public void setError(ErrorResult error) {
+    this.error = error;
+  }
+
+  @Override
+  public String toString() {
+    return "OperationResult [resultCode=" + resultCode + ", failureCause=" + failureCause
+        + ", resultVersion=" + resultVersion + ", result=" + result + ", error=" + error + "]";
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchHit.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchHit.java
new file mode 100644 (file)
index 0000000..b4077e9
--- /dev/null
@@ -0,0 +1,52 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.entity;
+
+public class SearchHit {
+  private String score;
+  Document document;
+
+  public String getScore() {
+    return score;
+  }
+
+  public void setScore(String score) {
+    this.score = score;
+  }
+
+  public Document getDocument() {
+    return document;
+  }
+
+  public void setDocument(Document document) {
+    this.document = document;
+  }
+
+  @Override
+  public String toString() {
+    return "SearchHit [score=" + score + ", document=" + document + "]";
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchHits.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchHits.java
new file mode 100644 (file)
index 0000000..bbaf2f8
--- /dev/null
@@ -0,0 +1,54 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.entity;
+
+import java.util.Arrays;
+
+public class SearchHits {
+  private String totalHits;
+  private SearchHit[] hits;
+
+  public String getTotalHits() {
+    return totalHits;
+  }
+
+  public void setTotalHits(String totalHits) {
+    this.totalHits = totalHits;
+  }
+
+  public SearchHit[] getHits() {
+    return hits;
+  }
+
+  public void setHits(SearchHit[] hits) {
+    this.hits = hits;
+  }
+
+  @Override
+  public String toString() {
+    return "SearchHits [totalHits=" + totalHits + ", hits=" + Arrays.toString(hits) + "]";
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchOperationResult.java b/src/main/java/org/openecomp/sa/searchdbabstraction/entity/SearchOperationResult.java
new file mode 100644 (file)
index 0000000..be00d77
--- /dev/null
@@ -0,0 +1,54 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.entity;
+
+public class SearchOperationResult extends OperationResult {
+
+  private SearchHits searchResult;
+  private AggregationResults aggregationResult;
+
+  public SearchHits getSearchResult() {
+    return searchResult;
+  }
+
+  public AggregationResults getAggregationResult() {
+    return aggregationResult;
+  }
+
+  public void setAggregationResult(AggregationResults aggregations) {
+    this.aggregationResult = aggregations;
+  }
+
+  public void setSearchResult(SearchHits hits) {
+    this.searchResult = hits;
+  }
+
+  @Override
+  public String toString() {
+    return "SearchOperationResult [searchResult=" + searchResult
+        + ", aggregationResult=" + aggregationResult;
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/logging/SearchDbMsgs.java b/src/main/java/org/openecomp/sa/searchdbabstraction/logging/SearchDbMsgs.java
new file mode 100644 (file)
index 0000000..0b415d2
--- /dev/null
@@ -0,0 +1,226 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.logging;
+
+import com.att.eelf.i18n.EELFResourceManager;
+import org.openecomp.cl.eelf.LogMessageEnum;
+
+public enum SearchDbMsgs implements LogMessageEnum {
+
+  /**
+   * Arguments:
+   * None
+   */
+  SERVICE_STARTED,
+
+  /**
+   * Arguments:
+   * {0} = url
+   */
+  ELASTIC_SEARCH_CONNECTION_ATTEMPT,
+
+  /**
+   * Arguments:
+   * {0} = url
+   */
+  ELASTIC_SEARCH_CONNECTION_SUCCESS,
+
+  /**
+   * Arguments:
+   * {0} = url
+   * {1} = failure cause
+   */
+  ELASTIC_SEARCH_CONNECTION_FAILURE,
+
+  /**
+   * Arguments:
+   * {0} = Filter configuration file.
+   * {1} = Failure cause.
+   */
+  FILTERS_CONFIG_FAILURE,
+
+  /**
+   * Arguments:
+   * {0} = Analysys configuration file.
+   * {1} = Failure case.
+   */
+  ANALYSYS_CONFIG_FAILURE,
+
+  /**
+   * Arguments:
+   * {0} = Index name
+   */
+  CREATED_INDEX,
+
+  /**
+   * Arguments:
+   * {0} = Index name
+   * {1} = Document type
+   */
+  CREATE_INDEX_TIME,
+
+  /**
+   * Arguments:
+   * {0} = Index name
+   */
+  DELETED_INDEX,
+
+  /**
+   * Arguments:
+   * {0} = Index name
+   */
+  DELETE_INDEX_TIME,
+
+  /**
+   * Arguments:
+   * {0} = Index name
+   */
+  CHECK_INDEX_TIME,
+
+  /**
+   * Arguments:
+   * {0} = Index name
+   */
+  CREATE_DOCUMENT_TIME,
+
+  /**
+   * Arguments:
+   * {0} = Index name
+   * {1} = Document id
+   */
+  UPDATE_DOCUMENT_TIME,
+
+  /**
+   * Arguments:
+   * {0} = Index name
+   * {1} = Document id
+   */
+  DELETE_DOCUMENT_TIME,
+
+  /**
+   * Arguments:
+   * {0} = Index name
+   * {1} = Document id
+   */
+  GET_DOCUMENT_TIME,
+
+  /**
+   * Arguments:
+   * {0} = Index name
+   * {1} = Query string
+   */
+  QUERY_DOCUMENT_TIME,
+
+  /**
+   * Arguments:
+   */
+  BULK_OPERATIONS_TIME,
+
+  /**
+   * Arguments:
+   */
+  PROCESSED_BULK_OPERATIONS,
+
+  /**
+   * Arguments:
+   * {0} = Event
+   * {1} = Result
+   */
+  PROCESS_EVENT,
+
+  /**
+   * Arguments:
+   * {0} = URL.
+   */
+  PROCESS_INLINE_QUERY,
+
+  /**
+   * Arguments
+   * {0} - Operation type (GET or POST)
+   * {1} - URL.
+   */
+  PROCESS_PAYLOAD_QUERY,
+
+  /**
+   * Arguments:
+   * {0} = Index
+   * {1} = Error
+   */
+  INDEX_CREATE_FAILURE,
+
+  /**
+   * Arguments:
+   * {0} = Index name
+   * {1} = Error cause
+   */
+  INDEX_DELETE_FAILURE,
+
+  /**
+   * Arguments:
+   * {0} = Failure cause.
+   */
+  GET_ANALYZERS_FAILURE,
+
+  /**
+   * Arguments:
+   * {0} = Failure cause.
+   */
+  BULK_OPERATION_FAILURE,
+
+  /**
+   * Arguments:
+   * {0} = Method
+   * {1} = Exception
+   */
+  EXCEPTION_DURING_METHOD_CALL,
+
+  /**
+   * Received request {0} {1} from {2}.  Sending response: {3}
+   *
+   * <p>Arguments:
+   * {0} = operation
+   * {1} = target URL
+   * {2} = source
+   * {3} = response code
+   */
+  PROCESS_REST_REQUEST,
+
+  STARTUP_EXCEPTION
+  /**
+   * Exception encountered during startup of search service: {0}
+   *
+   * <p>Arguments:
+   *    {0} = exception
+   */
+  ;
+
+  /**
+   * Load message bundle (SearchDbMsgs.properties file)
+   */
+  static {
+    EELFResourceManager.loadMessageBundle("logging/SearchDbMsgs");
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/AbstractAggregation.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/AbstractAggregation.java
new file mode 100644 (file)
index 0000000..6359227
--- /dev/null
@@ -0,0 +1,82 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+
+/**
+ * This is the common parent from which all aggregation types inherit.  It defines
+ * the common fields that all aggregations must include.
+ */
+public abstract class AbstractAggregation {
+
+  /**
+   * The name of the field to apply the aggregation against.
+   */
+  protected String field;
+
+  /**
+   * Optionally allows the number of buckets for the aggregation to be
+   * specified.
+   */
+  protected Integer size;
+
+  /**
+   * Optionally sets the minimum number of matches that must occur before
+   * a particular bucket is included in the aggregation result.
+   */
+  @JsonProperty("min-threshold")
+  protected Integer minThreshold;
+
+
+  public String getField() {
+    return field;
+  }
+
+  public void setField(String field) {
+    this.field = field;
+  }
+
+  public Integer getSize() {
+    return size;
+  }
+
+  public void setSize(Integer size) {
+    this.size = size;
+  }
+
+  public Integer getMinThreshold() {
+    return minThreshold;
+  }
+
+  public void setMinThreshold(Integer minThreshold) {
+    this.minThreshold = minThreshold;
+  }
+
+  public abstract String toElasticSearch();
+
+  public abstract String toString();
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Aggregation.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Aggregation.java
new file mode 100644 (file)
index 0000000..2cb42c6
--- /dev/null
@@ -0,0 +1,67 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class Aggregation {
+  private String name;
+
+  @JsonProperty("aggregation")
+  private AggregationStatement aggregation;
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public AggregationStatement getStatement() {
+    return aggregation;
+  }
+
+  public void setStatement(AggregationStatement aggregation) {
+    this.aggregation = aggregation;
+  }
+
+  public String toElasticSearch() {
+    StringBuffer sb = new StringBuffer();
+
+    sb.append("\"");
+    sb.append(name);
+    sb.append("\": ");
+    sb.append(aggregation.toElasticSearch());
+
+    return sb.toString();
+  }
+
+  @Override
+  public String toString() {
+    return "{name: " + name + ", aggregation: " + aggregation.toString();
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationStatement.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationStatement.java
new file mode 100644 (file)
index 0000000..cf2314e
--- /dev/null
@@ -0,0 +1,178 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import java.util.Arrays;
+
+public class AggregationStatement {
+
+  @JsonProperty("group-by")
+  private GroupByAggregation groupBy;
+
+  @JsonProperty("date-range")
+  private DateRangeAggregation dateRange;
+
+  @JsonProperty("date-histogram")
+  private DateHistogramAggregation dateHist;
+
+  @JsonProperty("nested")
+  private Aggregation[] nested;
+
+  @JsonProperty("sub-aggregations")
+  private Aggregation[] subAggregations;
+
+  public GroupByAggregation getGroupBy() {
+    return groupBy;
+  }
+
+  public void setGroupBy(GroupByAggregation groupBy) {
+    this.groupBy = groupBy;
+  }
+
+  public DateRangeAggregation getDateRange() {
+    return dateRange;
+  }
+
+  public void setDateRange(DateRangeAggregation dateRange) {
+    this.dateRange = dateRange;
+  }
+
+  public DateHistogramAggregation getDateHist() {
+    return dateHist;
+  }
+
+  public void setDateHist(DateHistogramAggregation dateHist) {
+    this.dateHist = dateHist;
+  }
+
+  public Aggregation[] getNested() {
+    return nested;
+  }
+
+  public void setNested(Aggregation[] nested) {
+    this.nested = nested;
+  }
+
+  public Aggregation[] getSubAggregations() {
+    return subAggregations;
+  }
+
+  public void setSubAggregations(Aggregation[] subAggregations) {
+    this.subAggregations = subAggregations;
+  }
+
+  public String toElasticSearch() {
+    StringBuffer sb = new StringBuffer();
+
+    sb.append("{");
+
+    if (nested != null && nested.length > 0) {
+      sb.append("\"nested\": {\"path\": \"");
+      if (nested[0].getStatement() != null) {
+        sb.append(nested[0].getStatement().getNestedPath());
+      }
+      sb.append("\"}, \"aggs\": {");
+      for (int i = 0; i < nested.length; i++) {
+        if (i > 0) {
+          sb.append(",");
+        }
+        sb.append(nested[i].toElasticSearch());
+      }
+
+      sb.append("}");
+    } else {
+      if (groupBy != null) {
+        sb.append(groupBy.toElasticSearch());
+      } else if (dateRange != null) {
+        sb.append(dateRange.toElasticSearch());
+      } else if (dateHist != null) {
+        sb.append(dateHist.toElasticSearch());
+      }
+
+      if (subAggregations != null && subAggregations.length > 0) {
+        sb.append(", \"aggs\": {");
+        for (int i = 0; i < subAggregations.length; i++) {
+          if (i > 0) {
+            sb.append(",");
+          }
+          sb.append(subAggregations[i].toElasticSearch());
+        }
+        sb.append("}");
+      }
+    }
+
+    sb.append("}");
+
+    return sb.toString();
+  }
+
+  @Override
+  public String toString() {
+    StringBuffer sb = new StringBuffer();
+
+    if (nested != null) {
+      sb.append("{nested: ");
+      sb.append(Arrays.toString(nested));
+    } else if (groupBy != null) {
+      sb.append(groupBy.toString());
+    } else if (dateHist != null) {
+      sb.append(dateHist.toString());
+    } else if (dateRange != null) {
+      sb.append(dateRange.toString());
+    }
+
+    if (subAggregations != null) {
+      sb.append(", sub-aggregations: ");
+      sb.append(Arrays.toString(subAggregations));
+    }
+
+    sb.append("}");
+
+    return sb.toString();
+  }
+
+  public String getNestedPath() {
+    String path = null;
+    String fieldName = null;
+
+    if (groupBy != null) {
+      fieldName = groupBy.getField();
+    } else if (dateRange != null) {
+      fieldName = dateRange.getField();
+    } else if (dateHist != null) {
+      fieldName = dateHist.getField();
+    }
+
+    if (fieldName != null && fieldName.contains(".")) {
+      // we have nested field
+      path = fieldName.substring(0, fieldName.indexOf("."));
+    }
+
+    return path;
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateHistogramAggregation.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateHistogramAggregation.java
new file mode 100644 (file)
index 0000000..bc62a95
--- /dev/null
@@ -0,0 +1,119 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * An example of elasticsearch date_histogram aggregation:
+ *
+ * <p><pre>
+ * {
+ *    "aggs": {
+ *        "my_group": {
+ *            "date_histogram" : {
+ *               "field" : "date",
+ *               "interval" : "month"
+ *           }
+ *        }
+ *    }
+ * }
+ * </pre>
+ */
+
+public class DateHistogramAggregation extends AbstractAggregation {
+
+  private String interval;
+
+  private String format;
+
+  @JsonProperty("time-zone")
+  private String timeZone;
+
+
+  public String getInterval() {
+    return interval;
+  }
+
+  public void setInterval(String interval) {
+    this.interval = interval;
+  }
+
+  public String getTimeZone() {
+    return timeZone;
+  }
+
+  public String getFormat() {
+    return format;
+  }
+
+  public void setFormat(String format) {
+    this.format = format;
+  }
+
+  public void setTimeZone(String timeZone) {
+    this.timeZone = timeZone;
+  }
+
+  @Override
+  public String toElasticSearch() {
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("\"date_histogram\": {\"field\": \"");
+    sb.append(field);
+    sb.append("\"");
+    if (interval != null) {
+      sb.append(", \"interval\": \"");
+      sb.append(interval);
+      sb.append("\"");
+    }
+    if (format != null) {
+      sb.append(", \"format\": \"");
+      sb.append(format);
+      sb.append("\"");
+    }
+    if (timeZone != null) {
+      sb.append(", \"time_zone\": \"");
+      sb.append(timeZone);
+      sb.append("\"");
+    }
+    if (size != null) {
+      sb.append(", \"size\": ");
+      sb.append(size);
+    }
+    if (minThreshold != null) {
+      sb.append(", \"min_doc_count\": ").append(minThreshold);
+    }
+    sb.append("}");
+
+    return sb.toString();
+  }
+
+  @Override
+  public String toString() {
+    return "DateHistogramAggregation: [field=" + field + ", interval=" + interval + ", format="
+        + format + ", timeZone=" + timeZone + ", size=" + size + " minThreshold=" + minThreshold;
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRange.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRange.java
new file mode 100644 (file)
index 0000000..1bd0240
--- /dev/null
@@ -0,0 +1,115 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * This class represents the ranges specification in an date_range statement.
+ * <p>
+ * The expected JSON structure for a ranges is as follows:
+ * <p>
+ * <pre>
+ * {
+ *  "from": <from-date>
+ * }
+ * </pre>
+ * <p>
+ * or
+ * <p>
+ * <pre>
+ * {
+ *  "to": <to-date>
+ * }
+ * </pre>
+ * <p>
+ * or
+ * <p>
+ * <pre>
+ * {
+ *  "from": <from-date>,
+ *  "to": <to-date>
+ * }
+ * </pre>
+ *
+ * @author sye
+ */
+public class DateRange {
+
+  @JsonProperty("from")
+  private String fromDate;
+
+  @JsonProperty("to")
+  private String toDate;
+
+  public String getFromDate() {
+    return fromDate;
+  }
+
+  public void setFromDate(String fromDate) {
+    this.fromDate = fromDate;
+  }
+
+  public String getToDate() {
+    return toDate;
+  }
+
+  public void setToDate(String toDate) {
+    this.toDate = toDate;
+  }
+
+  public String toElasticSearch() {
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("{");
+
+    if (fromDate != null) {
+      sb.append("\"from\": \"");
+      sb.append(fromDate.toString());
+      sb.append("\"");
+    }
+
+    if (toDate != null) {
+      if (fromDate != null) {
+        sb.append(", \"to\": \"");
+        sb.append(toDate.toString());
+        sb.append("\"");
+      } else {
+        sb.append("\"to\": \"");
+        sb.append(toDate.toString());
+        sb.append("\"");
+      }
+    }
+
+    sb.append("}");
+
+    return sb.toString();
+  }
+
+  public String toString() {
+    return "{from: " + fromDate + ", to: " + toDate + "}";
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeAggregation.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeAggregation.java
new file mode 100644 (file)
index 0000000..f938e68
--- /dev/null
@@ -0,0 +1,133 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * An example of a date_range aggregation:
+ *
+ * <p><pre>
+ * {
+ *    "aggs": {
+ *        "range": {
+ *            "date_range": {
+ *                "field": "date",
+ *                "format": "MM-yyy",
+ *                "ranges": [
+ *                    { "to": "now-10M/M" },
+ *                    { "from": "now-10M/M" }
+ *                ]
+ *            }
+ *        }
+ *    }
+ * }
+ * </pre>
+ *
+ * @author sye
+ */
+public class DateRangeAggregation extends AbstractAggregation {
+
+
+  private String format;
+
+  @JsonProperty("ranges")
+  private DateRange[] dateRanges;
+
+
+  public String getFormat() {
+    return format;
+  }
+
+  public void setFormat(String format) {
+    this.format = format;
+  }
+
+  public DateRange[] getDateRanges() {
+    return dateRanges;
+  }
+
+  public void setDateRanges(DateRange[] dateRanges) {
+    this.dateRanges = dateRanges;
+  }
+
+  @Override
+  public String toElasticSearch() {
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("\"date_range\": {\"field\": \"");
+    sb.append(field);
+    sb.append("\"");
+
+    if (format != null) {
+      sb.append(", \"format\": \"");
+      sb.append(format);
+      sb.append("\"");
+    }
+
+    if (dateRanges != null && dateRanges.length > 0) {
+      sb.append(", \"ranges\": [");
+
+      for (int i = 0; i < dateRanges.length; i++) {
+        if (i > 0) {
+          sb.append(",");
+        }
+        sb.append(dateRanges[i].toElasticSearch());
+      }
+
+      sb.append("]");
+    }
+
+    if (size != null) {
+      sb.append(", \"size\": ");
+      sb.append(size);
+    }
+
+    if (minThreshold != null) {
+      sb.append(", \"min_doc_count\": ").append(minThreshold);
+    }
+
+    sb.append("}");
+
+    return sb.toString();
+  }
+
+  @Override
+  public String toString() {
+    StringBuffer sb = new StringBuffer();
+    sb.append("date-range: {field: " + field + ", format: " + format + ", size: " + size
+        + ", minThreshold: " + minThreshold + "ranges: [");
+    for (int i = 0; i < dateRanges.length; i++) {
+      if (i > 0) {
+        sb.append(",");
+      }
+      sb.append(dateRanges[i].toString());
+    }
+    sb.append("]");
+
+    return sb.toString();
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Filter.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Filter.java
new file mode 100644 (file)
index 0000000..13519ae
--- /dev/null
@@ -0,0 +1,190 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * This class represents the filter stanza in a search statement.
+ *
+ * <p>The expected JSON structure for a filter stanza is as follows:
+ * <pre>
+ * {
+ *     "filter": {
+ *        "all": [ {query structure}, {query structure}, ... {query structure} ],
+ *        "any": [ {query structure}, {query structure}, ... {query structure} ]
+ *     }
+ * }
+ * </pre>
+ */
+public class Filter {
+
+  /**
+   * All queries in this list must evaluate to true for the filter to pass.
+   */
+  private QueryStatement[] all;
+
+  /**
+   * Any one of the queries in this list must evaluate to true for the
+   * filter to pass.
+   */
+  private QueryStatement[] any;
+
+
+  public QueryStatement[] getAll() {
+    return all;
+  }
+
+  public void setAll(QueryStatement[] all) {
+    this.all = all;
+  }
+
+  public QueryStatement[] getAny() {
+    return any;
+  }
+
+  public void setAny(QueryStatement[] any) {
+    this.any = any;
+  }
+
+  /**
+   * This method returns a string which represents this filter in syntax
+   * that is understandable by ElasticSearch and is suitable for inclusion
+   * in an ElasticSearch query string.
+   *
+   * @return - ElasticSearch syntax string.
+   */
+  public String toElasticSearch() {
+
+    StringBuilder sb = new StringBuilder();
+
+    List<QueryStatement> notMatchQueries = new ArrayList<QueryStatement>();
+    sb.append("{");
+    sb.append("\"bool\": {");
+
+    // Add the queries from our 'all' list.
+    int matchQueriesCount = 0;
+    int notMatchQueriesCount = 0;
+    if (all != null) {
+      sb.append("\"must\": [");
+
+      for (QueryStatement query : all) {
+        if (matchQueriesCount > 0) {
+          sb.append(", ");
+        }
+
+        if (query.isNotMatch()) {
+          notMatchQueries.add(query);
+        } else {
+          sb.append(query.toElasticSearch());
+          matchQueriesCount++;
+        }
+      }
+      sb.append("],");
+
+
+      sb.append("\"must_not\": [");
+      for (QueryStatement query : notMatchQueries) {
+        if (notMatchQueriesCount > 0) {
+          sb.append(", ");
+        }
+        sb.append(query.toElasticSearch());
+        notMatchQueriesCount++;
+      }
+      sb.append("]");
+    }
+
+    // Add the queries from our 'any' list.
+    notMatchQueries.clear();
+    if (any != null) {
+      if (all != null) {
+        sb.append(",");
+      }
+      sb.append("\"should\": [");
+
+      matchQueriesCount = 0;
+      for (QueryStatement query : any) {
+        //if(!firstQuery.compareAndSet(true, false)) {
+        if (matchQueriesCount > 0) {
+          sb.append(", ");
+        }
+
+        if (query.isNotMatch()) {
+          notMatchQueries.add(query);
+        } else {
+          sb.append(query.toElasticSearch());
+          matchQueriesCount++;
+        }
+      }
+      sb.append("],");
+
+      //firstQuery.set(true);
+      notMatchQueriesCount = 0;
+      sb.append("\"must_not\": [");
+      for (QueryStatement query : notMatchQueries) {
+        //if(!firstQuery.compareAndSet(true, false)) {
+        if (notMatchQueriesCount > 0) {
+          sb.append(", ");
+        }
+        sb.append(query.toElasticSearch());
+        notMatchQueriesCount++;
+      }
+      sb.append("]");
+    }
+    sb.append("}");
+    sb.append("}");
+
+    return sb.toString();
+  }
+
+  @Override
+  public String toString() {
+
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("{");
+
+    sb.append("all: [");
+    if (all != null) {
+      for (QueryStatement query : all) {
+        sb.append(query.toString());
+      }
+    }
+    sb.append("], ");
+
+    sb.append("any: [");
+    if (any != null) {
+      for (QueryStatement query : any) {
+        sb.append(query.toString());
+      }
+    }
+    sb.append("] ");
+
+    sb.append("}");
+
+    return sb.toString();
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/GroupByAggregation.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/GroupByAggregation.java
new file mode 100644 (file)
index 0000000..3225a93
--- /dev/null
@@ -0,0 +1,73 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+/**
+ * An example of a date_range aggregation:
+ *
+ * <p><pre>
+ * {
+ *    "aggs": {
+ *        "my_group": {
+ *            "term": {
+ *                "field": "group"
+ *            }
+ *        }
+ *    }
+ * }
+ * </pre>
+ *
+ * @author sye
+ */
+public class GroupByAggregation extends AbstractAggregation {
+
+  @Override
+  public String toElasticSearch() {
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("\"terms\": {\"field\": \"");
+    sb.append(field);
+    sb.append("\"");
+    if (size != null) {
+      sb.append(", \"size\": ");
+      sb.append(size);
+    }
+
+    if (minThreshold != null) {
+      sb.append(", \"min_doc_count\": ").append(minThreshold);
+    }
+
+    sb.append("}");
+
+    return sb.toString();
+  }
+
+  @Override
+  public String toString() {
+    return "{group-by: {field: " + field + ", size: " + size
+        + " minThreshold: " + minThreshold + "}}";
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/ParsedQuery.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/ParsedQuery.java
new file mode 100644 (file)
index 0000000..8b07d50
--- /dev/null
@@ -0,0 +1,120 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * This class represents a simple parsed query statement.
+ *
+ * <p>A 'parsed query' specifies a document field to inspect and a query
+ * string which will be parsed by the document store to generate the
+ * exact query to be performed.
+ *
+ * <p>The query string will be tokenized into 'terms' and 'operators' where:
+ *
+ * <p>Terms may be any of the following:
+ * <ul>
+ * <li> single words </li>
+ * <li> exact phrases (denoted by surrounding the phrase with '"' characters) </li>
+ * <li> regular expressions (denoted by surrounding the phrase with '/' characters) </li>
+ * </ul>
+ *
+ * <p>Operators may be any of the following:
+ * <ul>
+ * <li> +   -- The term to the right of the operator MUST be present to produce a match. </li>
+ * <li> -   -- The term to the right of the operator MUST NOT be present to produce a match. </li>
+ * <li> AND -- Both the terms to the left and right of the operator MUST be present to produce a match. </li>
+ * <li> OR  -- Either the term to the left or right of the operator MUST be present to produce a match. </li>
+ * <li> NOT -- The term to the right of the operator MUST NOT be present to produce a match. </li>
+ * </ul>
+ *
+ * <p>The expected JSON structure for a parsed query is as follows:
+ * <pre>
+ *     {
+ *         "parsed-query": {
+ *             "field": "fieldname",
+ *             "query-string": "string"
+ *         }
+ *     }
+ * </pre>
+ */
+public class ParsedQuery {
+
+  /**
+   * The name of the field which the query is to be applied to.
+   */
+  private String field;
+
+  /**
+   * The string to be parsed to generate the full query.
+   */
+  @JsonProperty("query-string")
+  private String queryString;
+
+
+  public String getField() {
+    return field;
+  }
+
+  public void setField(String field) {
+    this.field = field;
+  }
+
+  public String getQueryString() {
+    return queryString;
+  }
+
+  public void setQueryString(String queryString) {
+    this.queryString = queryString;
+  }
+
+
+  /**
+   * This method returns a string which represents this query in syntax
+   * that is understandable by ElasticSearch and is suitable for inclusion
+   * in an ElasticSearch query string.
+   *
+   * @return - ElasticSearch syntax string.
+   */
+  public String toElasticSearch() {
+
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("{");
+    sb.append("\"query_string\": {");
+    sb.append("\"default_field\": \"").append(field).append("\", ");
+    sb.append("\"query\": \"").append(queryString).append("\"");
+    sb.append("}");
+    sb.append("}");
+
+    return sb.toString();
+  }
+
+  @Override
+  public String toString() {
+    return "{field:" + field + ", query-string: '" + queryString + "'}";
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Query.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Query.java
new file mode 100644 (file)
index 0000000..1b9c1ed
--- /dev/null
@@ -0,0 +1,94 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+public class Query {
+
+  private QueryStatement may;
+  private QueryStatement must;
+
+  public QueryStatement getMay() {
+    return may;
+  }
+
+  public void setMay(QueryStatement may) {
+    this.may = may;
+  }
+
+  public QueryStatement getMust() {
+    return must;
+  }
+
+  public void setMust(QueryStatement must) {
+    this.must = must;
+  }
+
+  public QueryStatement getQueryStatement() {
+    if (isMust()) {
+      return must;
+    } else if (isMay()) {
+      return may;
+    } else {
+      return null;
+    }
+  }
+
+  public boolean isMust() {
+    return must != null;
+  }
+
+  public boolean isMay() {
+    return may != null;
+  }
+
+  public String toElasticSearch() {
+
+    if (isMust()) {
+      return must.toElasticSearch();
+    } else if (isMay()) {
+      return may.toElasticSearch();
+    } else {
+      return ""; // throw an exception?
+    }
+  }
+
+  @Override
+  public String toString() {
+
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("Query:[");
+    if (isMust()) {
+      sb.append("must: ").append(must.toString());
+    } else if (isMay()) {
+      sb.append("may: ").append(may.toString());
+    } else {
+      sb.append("INVALID");
+    }
+    sb.append("]");
+
+    return sb.toString();
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/QueryStatement.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/QueryStatement.java
new file mode 100644 (file)
index 0000000..f5fc367
--- /dev/null
@@ -0,0 +1,142 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class QueryStatement {
+
+  private TermQuery match;
+
+  @JsonProperty("not-match")
+  private TermQuery notMatch;
+
+  @JsonProperty("parsed-query")
+  private ParsedQuery parsedQuery;
+
+  private RangeQuery range;
+
+  public TermQuery getMatch() {
+    return match;
+  }
+
+  public void setMatch(TermQuery match) {
+    this.match = match;
+  }
+
+  public TermQuery getNotMatch() {
+    return notMatch;
+  }
+
+  public void setNotMatch(TermQuery notMatch) {
+    this.notMatch = notMatch;
+  }
+
+  public ParsedQuery getParsedQuery() {
+    return parsedQuery;
+  }
+
+  public void setParsedQuery(ParsedQuery parsedQuery) {
+    this.parsedQuery = parsedQuery;
+  }
+
+  public RangeQuery getRange() {
+    return range;
+  }
+
+  public void setRange(RangeQuery range) {
+    this.range = range;
+  }
+
+  public boolean isNotMatch() {
+    return (notMatch != null);
+  }
+
+  public String toElasticSearch() {
+
+    if (match != null) {
+      return match.toElasticSearch();
+
+    } else if (notMatch != null) {
+      return notMatch.toElasticSearch();
+
+    } else if (parsedQuery != null) {
+
+      // We need some special wrapping if this query is against a nested field.
+      if (fieldIsNested(parsedQuery.getField())) {
+        return "{\"nested\": { \"path\": \"" + pathForNestedField(parsedQuery.getField())
+            + "\", \"query\": " + parsedQuery.toElasticSearch() + "}}";
+      } else {
+        return parsedQuery.toElasticSearch();
+      }
+
+    } else if (range != null) {
+
+      // We need some special wrapping if this query is against a nested field.
+      if (fieldIsNested(range.getField())) {
+        return "{\"nested\": { \"path\": \"" + pathForNestedField(range.getField())
+            + "\", \"query\": " + range.toElasticSearch() + "}}";
+      } else {
+        return range.toElasticSearch();
+      }
+
+    } else {
+      // throw an exception?
+      return null;
+    }
+  }
+
+  private boolean fieldIsNested(String field) {
+    return field.contains(".");
+  }
+
+  private String pathForNestedField(String field) {
+    int index = field.lastIndexOf('.');
+    return field.substring(0, index);
+  }
+
+  @Override
+  public String toString() {
+
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("{");
+
+    if (match != null) {
+      sb.append("TERM QUERY: { match: {").append(match.toString()).append("}}");
+    } else if (notMatch != null) {
+      sb.append("TERM QUERY: { not-match: {").append(match.toString()).append("}}");
+    } else if (parsedQuery != null) {
+      sb.append("PARSED QUERY: { ").append(parsedQuery.toString()).append("}");
+    } else if (range != null) {
+      sb.append("RANGE QUERY: { ").append(range.toString()).append("}");
+    } else {
+      sb.append("UNDEFINED");
+    }
+
+    sb.append("}");
+    return sb.toString();
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/RangeQuery.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/RangeQuery.java
new file mode 100644 (file)
index 0000000..fcb0212
--- /dev/null
@@ -0,0 +1,348 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * This class represents a simple range query.
+ *
+ * <p>A range query is composed of one or more operator/value pairs which define
+ * the upper and lower bounds of the range, and a field to apply the query to.
+ *
+ * <p>Operators may be one of the following:
+ * <ul>
+ * <li>gt  - Greater than. </li>
+ * <li>gte - Greater than or equal to. </li>
+ * <li>lt  - Less than. </li>
+ * <li>lte - Less than or equal to. </li>
+ * </ul>
+ * Values may be either numeric values (Integer or Double) or Strings representing
+ * dates.
+ *
+ * <p>The following examples illustrate a couple of variants of the range query:
+ *
+ * <p><pre>
+ *     // A simple numeric range query:
+ *     {
+ *         "range": {
+ *             "field": "fieldname",
+ *             "gte": 5,
+ *             "lte": 10
+ *         }
+ *     }
+ *
+ *     // A simple date range query:
+ *     {
+ *         "range": {
+ *             "field": "fieldname",
+ *             "gt": "2016-10-06T00:00:00.558+03:00",
+ *             "lt": "2016-10-06T23:59:59.558+03:00"
+ *         }
+ *     }
+ * </pre>
+ */
+public class RangeQuery {
+
+  /**
+   * The name of the field to apply the range query against.
+   */
+  private String field;
+
+  /**
+   * The value of the field must be greater than this value to be a match.<br>
+   * NOTE: Only one of 'gt' or 'gte' should be set on any single {@link RangeQuery}
+   * instance.
+   */
+  private Object gt;
+
+  /**
+   * The value of the field must be greater than or equal to this value to be a match.<br>
+   * NOTE: Only one of 'gt' or 'gte' should be set on any single {@link RangeQuery}
+   * instance.
+   */
+  private Object gte;
+
+  /**
+   * The value of the field must be less than this value to be a match.<br>
+   * NOTE: Only one of 'lt' or 'lte' should be set on any single {@link RangeQuery}
+   * instance.
+   */
+  private Object lt;
+
+  /**
+   * The value of the field must be less than or equal to than this value to be a match.<br>
+   * NOTE: Only one of 'lt' or 'lte' should be set on any single {@link RangeQuery}
+   * instance.
+   */
+  private Object lte;
+
+  private String format;
+
+  @JsonProperty("time-zone")
+  private String timeZone;
+
+  public String getField() {
+    return field;
+  }
+
+  public void setField(String field) {
+    this.field = field;
+  }
+
+  public Object getGt() {
+    return gt;
+  }
+
+  public void setGt(Object gt) {
+
+    // It does not make sense to assign a value to both the 'greater than'
+    // and 'greater than or equal' operations, so make sure we are not
+    // trying to do that.
+    if (gte == null) {
+
+      // Make sure that we are not trying to mix both numeric and date
+      // type values in the same queries.
+      if (((lt != null) && !typesMatch(gt, lt))
+          || ((lte != null) && !typesMatch(gt, lte))) {
+        throw new IllegalArgumentException("Cannot mix date and numeric values in the same ranged query");
+      }
+
+      // If we made it here, then we're all good.  Store the value.
+      this.gt = gt;
+    } else {
+      throw new IllegalArgumentException("Cannot assign both 'gt' and 'gte' fields in the same ranged query");
+    }
+  }
+
+
+  public Object getGte() {
+    return gte;
+  }
+
+  public void setGte(Object gte) {
+
+    // It does not make sense to assign a value to both the 'greater than'
+    // and 'greater than or equal' operations, so make sure we are not
+    // trying to do that.
+    if (gt == null) {
+
+      // Make sure that we are not trying to mix both numeric and date
+      // type values in the same queries.
+      if (((lt != null) && !typesMatch(gte, lt))
+          || ((lte != null) && !typesMatch(gte, lte))) {
+        throw new IllegalArgumentException("Cannot mix date and numeric values in the same ranged query");
+      }
+
+      // If we made it here, then we're all good.  Store the value.
+      this.gte = gte;
+
+    } else {
+      throw new IllegalArgumentException("Cannot assign both 'gt' and 'gte' fields in the same ranged query");
+    }
+  }
+
+  public Object getLt() {
+    return lt;
+  }
+
+  public void setLt(Object lt) {
+
+    // It does not make sense to assign a value to both the 'less than'
+    // and 'less than or equal' operations, so make sure we are not
+    // trying to do that.
+    if (lte == null) {
+
+      // Make sure that we are not trying to mix both numeric and date
+      // type values in the same queries.
+      if (((gt != null) && !typesMatch(lt, gt))
+          || ((gte != null) && !typesMatch(lt, gte))) {
+        throw new IllegalArgumentException("Cannot mix date and numeric values in the same ranged query");
+      }
+
+      // If we made it here, then we're all good.  Store the value.
+
+      this.lt = lt;
+    } else {
+      throw new IllegalArgumentException("Cannot assign both 'lt' and 'lte' fields in the same ranged query");
+    }
+  }
+
+  public Object getLte() {
+    return lte;
+  }
+
+  public void setLte(Object lte) {
+
+    // It does not make sense to assign a value to both the 'greater than'
+    // and 'greater than or equal' operations, so make sure we are not
+    // trying to do that.
+    if (lt == null) {
+
+      // Make sure that we are not trying to mix both numeric and date
+      // type values in the same queries.
+      if (((gt != null) && !typesMatch(lte, gt))
+          || ((gte != null) && !typesMatch(lte, gte))) {
+        throw new IllegalArgumentException("Cannot mix date and numeric values in the same ranged query");
+      }
+
+      // If we made it here, then we're all good.  Store the value.
+
+      this.lte = lte;
+    } else {
+      throw new IllegalArgumentException("Cannot assign both 'lt' and 'lte' fields in the same ranged query");
+    }
+  }
+
+  public String getFormat() {
+    return format;
+  }
+
+  public void setFormat(String format) {
+    this.format = format;
+  }
+
+  public String getTimeZone() {
+    return timeZone;
+  }
+
+  public void setTimeZone(String timeZone) {
+    this.timeZone = timeZone;
+  }
+
+  /**
+   * This convenience method determines whether or not the supplied
+   * value needs to be enclosed in '"' characters when generating
+   * ElasticSearch compatible syntax.
+   *
+   * @param val - The value to check.
+   * @return - A string representation of the value for inclusion
+   *     in an ElasticSearch syntax string.
+   */
+  private String formatStringOrNumericVal(Object val) {
+
+    if (val instanceof String) {
+      return "\"" + val.toString() + "\"";
+    } else {
+      return val.toString();
+    }
+  }
+
+
+  /**
+   * This convenience method verifies that the supplied objects are
+   * of classes considered to be compatible for a ranged query.
+   *
+   * @param value1 - The first value to check.
+   * @param value2 - The second value to check.
+   * @return - True if the two objects are compatible for inclusion in the
+   *     same ranged query, False, otherwise.
+   */
+  boolean typesMatch(Object value1, Object value2) {
+
+    return ((value1 instanceof String) && (value2 instanceof String))
+        || (!(value1 instanceof String) && !(value2 instanceof String));
+  }
+
+
+  /**
+   * This method returns a string which represents this query in syntax
+   * that is understandable by ElasticSearch and is suitable for inclusion
+   * in an ElasticSearch query string.
+   *
+   * @return - ElasticSearch syntax string.
+   */
+  public String toElasticSearch() {
+
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("{");
+    sb.append("\"range\": {");
+    sb.append("\"").append(field).append("\": {");
+
+    // We may have one or zero of 'greater than' or 'greater
+    // than or equal'
+    boolean needComma = false;
+    if (gte != null) {
+      sb.append("\"gte\": ").append(formatStringOrNumericVal(gte));
+      needComma = true;
+    } else if (gt != null) {
+      sb.append("\"gt\": ").append(formatStringOrNumericVal(gt));
+      needComma = true;
+    }
+
+    // We may have one or zero of 'less than' or 'less
+    // than or equal'
+    if (lte != null) {
+      if (needComma) {
+        sb.append(", ");
+      }
+      sb.append("\"lte\": ").append(formatStringOrNumericVal(lte));
+    } else if (lt != null) {
+      if (needComma) {
+        sb.append(", ");
+      }
+      sb.append("\"lt\": ").append(formatStringOrNumericVal(lt));
+    }
+
+    // Append the format specifier if one was provided.
+    if (format != null) {
+      sb.append(", \"format\": \"").append(format).append("\"");
+    }
+
+    // Append the time zone specifier if one was provided.
+    if (timeZone != null) {
+      sb.append(", \"time_zone\": \"").append(timeZone).append("\"");
+    }
+
+    sb.append("}");
+    sb.append("}");
+    sb.append("}");
+
+    return sb.toString();
+  }
+
+  @Override
+  public String toString() {
+
+    String str = "{ field: " + field + ", ";
+
+    if (gt != null) {
+      str += "gt: " + gt;
+    } else if (gte != null) {
+      str += "gte: " + gte;
+    }
+
+    if (lt != null) {
+      str += (((gt != null) || (gte != null)) ? ", " : "") + "lt: " + lt;
+    } else if (lte != null) {
+      str += (((gt != null) || (gte != null)) ? ", " : "") + "lte: " + lte;
+    }
+
+    str += "}";
+
+    return str;
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/SearchStatement.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/SearchStatement.java
new file mode 100644 (file)
index 0000000..e111163
--- /dev/null
@@ -0,0 +1,325 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.radeox.util.logging.Logger;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ * This class represents the structure of a search statement.
+ *
+ * <p>The expected JSON structure to represent a search statement is as follows:
+ *
+ * <p><pre>
+ *     {
+ *         "results-start": int,  - Optional: index of starting point in result set.
+ *         "results-size": int,   - Optional: maximum number of documents to include in result set.
+ *
+ *         "filter": {
+ *             { filter structure - see {@link Filter} }
+ *         },
+ *
+ *         "queries": [
+ *             { query structure - see {@link QueryStatement} },
+ *             { query structure - see {@link QueryStatement} },
+ *                              .
+ *                              .
+ *             { query structure - see {@link QueryStatement} },
+ *         ],
+ *
+ *         "aggregations": [
+ *             { aggregation structure - see {@link AggregationStatement} },
+ *             { aggregation structure - see {@link AggregationStatement} },
+ *                              .
+ *                              .
+ *             { aggregation structure - see {@link AggregationStatement} },
+ *         ]
+ *     }
+ * </pre>
+ */
+public class SearchStatement {
+
+  /**
+   * Defines the filters that should be applied before running the
+   * actual queries.  This is optional.
+   */
+  private Filter filter;
+
+  /**
+   * The list of queries to be applied to the document store.
+   */
+  private Query[] queries;
+
+  /**
+   * The list of aggregations to be applied to the search
+   */
+  private Aggregation[] aggregations;
+
+  /**
+   * Defines the sort criteria to apply to the query result set.
+   * This is optional.
+   */
+  private Sort sort;
+
+  @JsonProperty("results-start")
+  private Integer resultsStart;
+
+  @JsonProperty("results-size")
+  private Integer size;
+
+  public Filter getFilter() {
+    return filter;
+  }
+
+  public void setFilter(Filter filter) {
+    this.filter = filter;
+  }
+
+  public Query[] getQueries() {
+    return queries;
+  }
+
+  public void setQueries(Query[] queries) {
+    this.queries = queries;
+  }
+
+  public Sort getSort() {
+    return sort;
+  }
+
+  public void setSort(Sort sort) {
+    this.sort = sort;
+  }
+
+  public boolean isFiltered() {
+    return filter != null;
+  }
+
+  public Aggregation[] getAggregations() {
+    return aggregations;
+  }
+
+  public void setAggregations(Aggregation[] aggregations) {
+    this.aggregations = aggregations;
+  }
+
+  public boolean hasAggregations() {
+    return aggregations != null && aggregations.length > 0;
+  }
+
+  public Integer getFrom() {
+    return resultsStart;
+  }
+
+  public void setFrom(Integer from) {
+    this.resultsStart = from;
+  }
+
+  public Integer getSize() {
+    return size;
+  }
+
+  public void setSize(Integer size) {
+    this.size = size;
+  }
+
+  /**
+   * This method returns a string which represents this statement in syntax
+   * that is understandable by ElasticSearch and is suitable for inclusion
+   * in an ElasticSearch query string.
+   *
+   * @return - ElasticSearch syntax string.
+   */
+  public String toElasticSearch() {
+
+    StringBuilder sb = new StringBuilder();
+    List<QueryStatement> notMatchQueries = new ArrayList<QueryStatement>();
+    List<QueryStatement> mustQueries = new ArrayList<QueryStatement>();
+    List<QueryStatement> shouldQueries = new ArrayList<QueryStatement>();
+
+    createQueryLists(queries, mustQueries, shouldQueries, notMatchQueries);
+
+    sb.append("{");
+
+    sb.append("\"version\": true,");
+
+    // If the client has specified an index into the results for the first
+    // document in the result set then include that in the ElasticSearch
+    // query.
+    if (resultsStart != null) {
+      sb.append("\"from\": ").append(resultsStart).append(", ");
+    }
+
+    // If the client has specified a maximum number of documents to be returned
+    // in the result set then include that in the ElasticSearch query.
+    if (size != null) {
+      sb.append("\"size\": ").append(size).append(", ");
+    }
+
+    sb.append("\"query\": {");
+    sb.append("\"bool\": {");
+
+    sb.append("\"must\": [");
+    AtomicBoolean firstQuery = new AtomicBoolean(true);
+    for (QueryStatement query : mustQueries) {
+
+      if (!firstQuery.compareAndSet(true, false)) {
+        sb.append(", ");
+      }
+
+      sb.append(query.toElasticSearch());
+    }
+    sb.append("], ");
+
+    sb.append("\"should\": [");
+
+    firstQuery = new AtomicBoolean(true);
+    for (QueryStatement query : shouldQueries) {
+
+      if (!firstQuery.compareAndSet(true, false)) {
+        sb.append(", ");
+      }
+
+      sb.append(query.toElasticSearch());
+    }
+
+    sb.append("],"); // close should list
+
+    sb.append("\"must_not\": [");
+    firstQuery.set(true);
+    for (QueryStatement query : notMatchQueries) {
+      sb.append(query.toElasticSearch());
+    }
+    sb.append("]");
+
+    // Add the filter stanza, if one is required.
+    if (isFiltered()) {
+      sb.append(", \"filter\": ").append(filter.toElasticSearch());
+    }
+
+    sb.append("}"); // close bool clause
+    sb.append("}"); // close query clause
+
+    // Add the sort directive, if one is required.
+    if (sort != null) {
+      sb.append(", \"sort\": ").append(sort.toElasticSearch());
+    }
+
+    // Add aggregations
+    if (hasAggregations()) {
+      sb.append(", \"aggs\": {");
+
+      for (int i = 0; i < aggregations.length; i++) {
+        if (i > 0) {
+          sb.append(",");
+        }
+        sb.append(aggregations[i].toElasticSearch());
+      }
+
+      sb.append("}");
+    }
+
+    sb.append("}");
+
+    Logger.debug("Generated raw ElasticSearch query statement: " + sb.toString());
+    return sb.toString();
+  }
+
+  private void createQueryLists(Query[] queries, List<QueryStatement> mustList,
+                                List<QueryStatement> mayList, List<QueryStatement> mustNotList) {
+
+    for (Query query : queries) {
+
+      if (query.isMust()) {
+
+        if (query.getQueryStatement().isNotMatch()) {
+          mustNotList.add(query.getQueryStatement());
+        } else {
+          mustList.add(query.getQueryStatement());
+        }
+      } else {
+
+        if (query.getQueryStatement().isNotMatch()) {
+          mustNotList.add(query.getQueryStatement());
+        } else {
+          mayList.add(query.getQueryStatement());
+        }
+      }
+    }
+  }
+
+
+  @Override
+  public String toString() {
+
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("SEARCH STATEMENT: {");
+
+    if (size != null) {
+      sb.append("from: ").append(resultsStart).append(", size: ").append(size).append(", ");
+    }
+
+    if (filter != null) {
+      sb.append("filter: ").append(filter.toString()).append(", ");
+    }
+
+    sb.append("queries: [");
+    AtomicBoolean firstQuery = new AtomicBoolean(true);
+    if (queries != null) {
+      for (Query query : queries) {
+
+        if (!firstQuery.compareAndSet(true, false)) {
+          sb.append(", ");
+        }
+        sb.append(query.toString());
+      }
+    }
+    sb.append("]");
+
+    sb.append("aggregations: [");
+    firstQuery = new AtomicBoolean(true);
+
+    if (aggregations != null) {
+      for (Aggregation agg : aggregations) {
+
+        if (!firstQuery.compareAndSet(true, false)) {
+          sb.append(", ");
+        }
+        sb.append(agg.toString());
+      }
+    }
+    sb.append("]");
+
+    sb.append("]}");
+
+    return sb.toString();
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Sort.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/Sort.java
new file mode 100644 (file)
index 0000000..968a7ad
--- /dev/null
@@ -0,0 +1,76 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+public class Sort {
+
+  private String field;
+  private SortDirection order = null;
+
+  public enum SortDirection {
+    ascending,
+    descending
+  }
+
+  public String getField() {
+    return field;
+  }
+
+  public void setField(String field) {
+    this.field = field;
+  }
+
+  public SortDirection getOrder() {
+    return order;
+  }
+
+  public void setOrder(SortDirection order) {
+    this.order = order;
+  }
+
+  public String toElasticSearch() {
+
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("{ \"").append(field).append("\": { \"order\": ");
+
+    // If a sort order wasn't explicitly supplied, default to 'ascending'.
+    if (order != null) {
+      switch (order) {
+        case ascending:
+          sb.append("\"asc\"}}");
+          break;
+        case descending:
+          sb.append("\"desc\"}}");
+          break;
+        default:
+      }
+    } else {
+      sb.append("\"asc\"}}");
+    }
+
+    return sb.toString();
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/TermQuery.java b/src/main/java/org/openecomp/sa/searchdbabstraction/searchapi/TermQuery.java
new file mode 100644 (file)
index 0000000..e4dba59
--- /dev/null
@@ -0,0 +1,349 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import edu.emory.mathcs.backport.java.util.Arrays;
+
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ * This class represents a simple term query.
+ *
+ * <p>A term query takes an operator, a field to apply the query to and a value to match
+ * against the query contents.
+ *
+ * <p>Valid operators include:
+ * <ul>
+ * <li> match - Field must contain the supplied value to produce a match. </li>
+ * <li> not-match - Field must NOT contain the supplied value to produce a match. </li>
+ * </ul>
+ * The following examples illustrate the structure of a few variants of the
+ * term query:
+ *
+ * <p><pre>
+ *     // Single Field Match Query:
+ *     {
+ *         "match": {"field": "searchTags", "value": "abcd"}
+ *     }
+ *
+ *     // Single Field Not-Match query:
+ *     {
+ *         "not-match": {"field": "searchTags", "value": "efgh"}
+ *     }
+ * </pre>
+ *
+ * <p><pre>
+ *     // Multi Field Match Query With A Single Value:
+ *     {
+ *         "match": {"field": "entityType searchTags", "value": "pserver"}
+ *     }
+ *
+ *     // Multi Field Match Query With Multiple Values:
+ *     {
+ *         "match": {"field": "entityType searchTags", "value": "pserver tenant"}
+ *     }
+ * </pre>
+ */
+public class TermQuery {
+
+  /**
+   * The name of the field to apply the term query to.
+   */
+  private String field;
+
+  /**
+   * The value which the field must contain in order to have a match.
+   */
+  private Object value;
+
+  /**
+   * For multi field queries only.  Determines the rules for whether or not a document matches
+   * the query, as follows:
+   *
+   * <p>"and" - At least one occurrence of every supplied value must be present in any of the
+   * supplied fields.
+   *
+   * <p>"or"  - At least one occurrence of any of the supplied values must be present in any of
+   * the supplied fields.
+   */
+  private String operator;
+
+  @JsonProperty("analyzer")
+  private String searchAnalyzer;
+
+
+  public String getField() {
+    return field;
+  }
+
+  public void setField(String field) {
+    this.field = field;
+  }
+
+  public Object getValue() {
+    return value;
+  }
+
+  public void setValue(Object value) {
+    this.value = value;
+  }
+
+  private boolean isNumericValue() {
+    return ((value instanceof Integer) || (value instanceof Double));
+  }
+
+  public String getOperator() {
+    return operator;
+  }
+
+  public void setOperator(String operator) {
+    this.operator = operator;
+  }
+
+  public String getSearchAnalyzer() {
+    return searchAnalyzer;
+  }
+
+  public void setSearchAnalyzer(String searchAnalyzer) {
+    this.searchAnalyzer = searchAnalyzer;
+  }
+
+  /**
+   * This method returns a string which represents this query in syntax
+   * that is understandable by ElasticSearch and is suitable for inclusion
+   * in an ElasticSearch query string.
+   *
+   * @return - ElasticSearch syntax string.
+   */
+  public String toElasticSearch() {
+
+    StringBuilder sb = new StringBuilder();
+
+    sb.append("{");
+
+    // Are we generating a multi field query?
+    if (isMultiFieldQuery()) {
+
+      // For multi field queries, we have to be careful about how we handle
+      // nested fields, so check to see if any of the specified fields are
+      // nested.
+      if (field.contains(".")) {
+
+        // Build the equivalent of a multi match query across one or more nested fields.
+        toElasticSearchNestedMultiMatchQuery(sb);
+
+      } else {
+
+        // Build a real multi match query, since we don't need to worry about nested fields.
+        toElasticSearchMultiFieldQuery(sb);
+      }
+    } else {
+
+      // Single field query.
+
+      // Add the necessary wrapping if this is a query against a nested field.
+      if (fieldIsNested(field)) {
+        sb.append("{\"nested\": { \"path\": \"").append(pathForNestedField(field))
+            .append("\", \"query\": ");
+      }
+
+      // Build the query.
+      toElasticSearchSingleFieldQuery(sb);
+
+      if (fieldIsNested(field)) {
+        sb.append("}}");
+      }
+    }
+
+    sb.append("}");
+
+    return sb.toString();
+  }
+
+
+  /**
+   * Determines whether or not the client has specified a term query with
+   * multiple fields.
+   *
+   * @return - true if the query is referencing multiple fields, false, otherwise.
+   */
+  private boolean isMultiFieldQuery() {
+
+    return (field.split(" ").length > 1);
+  }
+
+
+  /**
+   * Constructs a single field term query in ElasticSearch syntax.
+   *
+   * @param sb - The string builder to assemble the query string with.
+   * @return - The single term query.
+   */
+  private void toElasticSearchSingleFieldQuery(StringBuilder sb) {
+
+    sb.append("\"term\": {\"").append(field).append("\" : ");
+
+    // For numeric values, don't enclose the value in quotes.
+    if (!isNumericValue()) {
+      sb.append("\"").append(value).append("\"");
+    } else {
+      sb.append(value);
+    }
+
+    sb.append("}");
+  }
+
+
+  /**
+   * Constructs a multi field query in ElasticSearch syntax.
+   *
+   * @param sb - The string builder to assemble the query string with.
+   * @return - The multi field query.
+   */
+  private void toElasticSearchMultiFieldQuery(StringBuilder sb) {
+
+    sb.append("\"multi_match\": {");
+
+    sb.append("\"query\": \"").append(value).append("\", ");
+    sb.append("\"type\": \"cross_fields\",");
+    sb.append("\"fields\": [");
+
+    List<String> fields = Arrays.asList(field.split(" "));
+    AtomicBoolean firstField = new AtomicBoolean(true);
+    for (String f : fields) {
+      if (!firstField.compareAndSet(true, false)) {
+        sb.append(", ");
+      }
+      sb.append("\"").append(f.trim()).append("\"");
+    }
+    sb.append("],");
+
+    sb.append("\"operator\": \"").append((operator != null)
+        ? operator.toLowerCase() : "and").append("\"");
+
+    if (searchAnalyzer != null) {
+      sb.append(", \"analyzer\": \"").append(searchAnalyzer).append("\"");
+    }
+
+    sb.append("}");
+  }
+
+
+  /**
+   * Constructs the equivalent of an ElasticSearch multi match query across
+   * multiple nested fields.
+   *
+   * <p>Since ElasticSearch doesn't really let you do that, we have to be clever
+   * and construct an equivalent query using boolean operators to produce
+   * the same result.
+   *
+   * @param sb - The string builder to use to build the query.
+   */
+  public void toElasticSearchNestedMultiMatchQuery(StringBuilder sb) {
+
+    // Break out our whitespace delimited list of fields and values into a actual lists.
+    List<String> fields = Arrays.asList(field.split(" "));
+    List<String> values = Arrays.asList(((String) value).split(" ")); // GDF: revisit this cast.
+
+    sb.append("\"bool\": {");
+
+    if (operator != null) {
+
+      if (operator.toLowerCase().equals("and")) {
+        sb.append("\"must\": [");
+      } else if (operator.toLowerCase().equals("or")) {
+        sb.append("\"should\": [");
+      }
+
+    } else {
+      sb.append("\"must\": [");
+    }
+
+    AtomicBoolean firstField = new AtomicBoolean(true);
+    for (String f : fields) {
+
+      if (!firstField.compareAndSet(true, false)) {
+        sb.append(", ");
+      }
+
+      sb.append("{ ");
+
+      // Is this a nested field?
+      if (fieldIsNested(f)) {
+
+        sb.append("\"nested\": {");
+        sb.append("\"path\": \"").append(pathForNestedField(f)).append("\", ");
+        sb.append("\"query\": ");
+      }
+
+      sb.append("{\"bool\": {");
+      sb.append("\"should\": [");
+
+      AtomicBoolean firstValue = new AtomicBoolean(true);
+      for (String v : values) {
+        if (!firstValue.compareAndSet(true, false)) {
+          sb.append(", ");
+        }
+        sb.append("{\"match\": { \"");
+        sb.append(f).append("\": {\"query\": \"").append(v).append("\"");
+
+        if (searchAnalyzer != null) {
+          sb.append(", \"analyzer\": \"").append(searchAnalyzer).append("\"");
+        }
+        sb.append("}}}");
+      }
+
+      sb.append("]");
+      sb.append("}");
+
+      if (fieldIsNested(f)) {
+        sb.append("}");
+        sb.append("}");
+      }
+
+      sb.append("}");
+    }
+
+    sb.append("]");
+    sb.append("}");
+  }
+
+
+  @Override
+  public String toString() {
+    return "field: " + field + ", value: " + value + " (" + value.getClass().getName() + ")";
+  }
+
+  public boolean fieldIsNested(String field) {
+    return field.contains(".");
+  }
+
+  public String pathForNestedField(String field) {
+    int index = field.lastIndexOf('.');
+    return field.substring(0, index);
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/service/SearchService.java b/src/main/java/org/openecomp/sa/searchdbabstraction/service/SearchService.java
new file mode 100644 (file)
index 0000000..7cbc101
--- /dev/null
@@ -0,0 +1,59 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.service;
+
+import org.openecomp.cl.api.Logger;
+import org.openecomp.cl.eelf.LoggerFactory;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.config.ElasticSearchConfig;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.ElasticSearchHttpController;
+import org.openecomp.sa.searchdbabstraction.logging.SearchDbMsgs;
+import org.openecomp.sa.searchdbabstraction.util.SearchDbConstants;
+
+import java.io.FileInputStream;
+import java.util.Properties;
+
+
+public class SearchService {
+  private ElasticSearchHttpController esController = null;
+
+  static Logger logger = LoggerFactory.getInstance().getLogger(SearchService.class.getName());
+
+  public SearchService() {
+    try {
+      start();
+    } catch (Exception e) {
+      logger.error(SearchDbMsgs.STARTUP_EXCEPTION, e.getLocalizedMessage());
+    }
+  }
+
+  protected void start() throws Exception {
+    Properties configProperties = new Properties();
+    configProperties.load(new FileInputStream(SearchDbConstants.ES_CONFIG_FILE));
+    ElasticSearchConfig esConfig = new ElasticSearchConfig(configProperties);
+
+    esController = new ElasticSearchHttpController(esConfig);
+    logger.info(SearchDbMsgs.SERVICE_STARTED);
+  }
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/util/AggregationParsingUtil.java b/src/main/java/org/openecomp/sa/searchdbabstraction/util/AggregationParsingUtil.java
new file mode 100644 (file)
index 0000000..38ff943
--- /dev/null
@@ -0,0 +1,106 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.util;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.openecomp.sa.searchdbabstraction.entity.AggregationBucket;
+import org.openecomp.sa.searchdbabstraction.entity.AggregationResult;
+
+import java.util.Iterator;
+import java.util.Set;
+
+public class AggregationParsingUtil {
+  public static AggregationResult[] parseAggregationResults(JSONObject aggregations)
+      throws JsonProcessingException {
+
+    // Obtain the set of aggregation names
+    Set keySet = aggregations.keySet();
+    AggregationResult[] aggResults = new AggregationResult[keySet.size()];
+
+    int index = 0;
+    for (Iterator it = keySet.iterator(); it.hasNext(); ) {
+      String key = (String) it.next();
+      AggregationResult aggResult = new AggregationResult();
+      aggResult.setName(key);
+
+      JSONObject bucketsOrNested = (JSONObject) aggregations.get(key);
+      Object buckets = bucketsOrNested.get("buckets");
+      if (buckets == null) {
+        // we have a nested
+        Number count = (Number) bucketsOrNested.remove("doc_count");
+        aggResult.setCount(count);
+        AggregationResult[] nestedResults = parseAggregationResults(bucketsOrNested);
+        aggResult.setNestedAggregations(nestedResults);
+      } else {
+        AggregationBucket[] aggBuckets = parseAggregationBuckets((JSONArray) buckets);
+        aggResult.setBuckets(aggBuckets);
+      }
+
+      aggResults[index] = aggResult;
+      index++;
+    }
+
+    return aggResults;
+
+  }
+
+  private static AggregationBucket[] parseAggregationBuckets(JSONArray buckets)
+      throws JsonProcessingException {
+    AggregationBucket[] aggBuckets = new AggregationBucket[buckets.size()];
+    for (int i = 0; i < buckets.size(); i++) {
+      AggregationBucket aggBucket = new AggregationBucket();
+      JSONObject bucketContent = (JSONObject) buckets.get(i);
+      Object key = bucketContent.remove("key");
+      aggBucket.setKey(key);
+      Object formatted = bucketContent.remove("key_as_string");
+      if (formatted != null) {
+        aggBucket.setFormattedKey((String) formatted);
+      }
+      Object count = bucketContent.remove("doc_count");
+      if (count != null) {
+        aggBucket.setCount((Number) count);
+      }
+      bucketContent.remove("from");
+      bucketContent.remove("from_as_string");
+      bucketContent.remove("to");
+      bucketContent.remove("to_as_string");
+
+
+      if (!bucketContent.entrySet().isEmpty()) {
+        // we have results from sub-aggregation
+        AggregationResult[] subResult = parseAggregationResults(bucketContent);
+        if (subResult != null) {
+          aggBucket.setSubAggregationResult(subResult);
+        }
+      }
+      aggBuckets[i] = aggBucket;
+    }
+
+    return aggBuckets;
+  }
+
+}
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/util/DocumentSchemaUtil.java b/src/main/java/org/openecomp/sa/searchdbabstraction/util/DocumentSchemaUtil.java
new file mode 100644 (file)
index 0000000..579d201
--- /dev/null
@@ -0,0 +1,123 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.util;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.openecomp.sa.rest.DocumentFieldSchema;
+import org.openecomp.sa.rest.DocumentSchema;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+public class DocumentSchemaUtil {
+
+  public static String generateDocumentMappings(String documentSchema)
+      throws JsonParseException, JsonMappingException, IOException {
+
+    // Unmarshal the json content into a document schema object.
+    ObjectMapper mapper = new ObjectMapper();
+    DocumentSchema schema = mapper.readValue(documentSchema, DocumentSchema.class);
+
+    return generateDocumentMappings(schema);
+  }
+
+  public static String generateDocumentMappings(DocumentSchema schema) {
+
+    // Now, generate the Elastic Search mapping json and return it.
+    StringBuilder sb = new StringBuilder();
+    sb.append("{");
+    sb.append("\"properties\": {");
+
+    generateFieldMappings(schema.getFields(), sb);
+
+    sb.append("}");
+    sb.append("}");
+
+    return sb.toString();
+  }
+
+
+  private static void generateFieldMappings(List<DocumentFieldSchema> fields, StringBuilder sb) {
+
+    AtomicBoolean firstField = new AtomicBoolean(true);
+
+    for (DocumentFieldSchema field : fields) {
+
+      // If this isn't the first field in the list, prepend it with a ','
+      if (!firstField.compareAndSet(true, false)) {
+        sb.append(", ");
+      }
+
+      // Now, append the translated field contents.
+      generateFieldMapping(field, sb);
+    }
+  }
+
+  private static void generateFieldMapping(DocumentFieldSchema fieldSchema, StringBuilder sb) {
+
+    sb.append("\"").append(fieldSchema.getName()).append("\": {");
+
+    // The field type is mandatory.
+    sb.append("\"type\": \"").append(fieldSchema.getDataType()).append("\"");
+
+    // For date type fields we may optionally supply a format specifier.
+    if (fieldSchema.getDataType().equals("date")) {
+      if (fieldSchema.getFormat() != null) {
+        sb.append(", \"format\": \"").append(fieldSchema.getFormat()).append("\"");
+      }
+    }
+
+    // If the index field was specified, then append it.
+    if (fieldSchema.getSearchable() != null) {
+      sb.append(", \"index\": \"").append(fieldSchema.getSearchable()
+          ? "analyzed" : "not_analyzed").append("\"");
+    }
+
+    // If a search analyzer was specified, then append it.
+    if (fieldSchema.getSearchAnalyzer() != null) {
+      sb.append(", \"search_analyzer\": \"").append(fieldSchema.getSearchAnalyzer()).append("\"");
+    }
+
+    // If an indexing analyzer was specified, then append it.
+    if (fieldSchema.getIndexAnalyzer() != null) {
+      sb.append(", \"analyzer\": \"").append(fieldSchema.getIndexAnalyzer()).append("\"");
+    }
+
+
+    if (fieldSchema.getDataType().equals("nested")) {
+
+      sb.append(", \"properties\": {");
+      generateFieldMappings(fieldSchema.getSubFields(), sb);
+      sb.append("}");
+    }
+
+    sb.append("}");
+  }
+
+}
+
diff --git a/src/main/java/org/openecomp/sa/searchdbabstraction/util/SearchDbConstants.java b/src/main/java/org/openecomp/sa/searchdbabstraction/util/SearchDbConstants.java
new file mode 100644 (file)
index 0000000..6790b24
--- /dev/null
@@ -0,0 +1,56 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.util;
+
+public class SearchDbConstants {
+  public static final String SDB_FILESEP = (System.getProperty("file.separator") == null) ? "/"
+      : System.getProperty("file.separator");
+  public static final String SDB_BUNDLECONFIG_NAME =
+      (System.getProperty("BUNDLECONFIG_DIR") == null)
+          ? "bundleconfig" : System.getProperty("BUNDLECONFIG_DIR");
+
+  public static final String SDB_HOME_BUNDLECONFIG = (System.getProperty("AJSC_HOME") == null)
+      ? SDB_FILESEP + "opt" + SDB_FILESEP + "app" + SDB_FILESEP + "searchdb"
+        + SDB_FILESEP + SDB_BUNDLECONFIG_NAME
+      : System.getProperty("AJSC_HOME") + SDB_FILESEP + SDB_BUNDLECONFIG_NAME;
+
+  public static final String SDB_HOME_ETC =
+      SDB_HOME_BUNDLECONFIG + SDB_FILESEP + "etc" + SDB_FILESEP;
+  public static final String SDB_CONFIG_APP_LOCATION = SDB_HOME_ETC + "appprops" + SDB_FILESEP;
+
+  // Elastic Search related
+  public static final String SDB_SPECIFIC_CONFIG = (System.getProperty("CONFIG_HOME") == null)
+      ? SDB_CONFIG_APP_LOCATION : System.getProperty("CONFIG_HOME") + SDB_FILESEP;
+  public static final String ES_CONFIG_FILE = SDB_SPECIFIC_CONFIG + SDB_FILESEP
+      + "elastic-search.properties";
+  public static final String SDB_AUTH = SDB_SPECIFIC_CONFIG + "auth" + SDB_FILESEP;
+  public static final String SDB_AUTH_CONFIG_FILENAME = SDB_AUTH + "search_policy.json";
+  public static final String SDB_FILTER_CONFIG_FILE = SDB_SPECIFIC_CONFIG + "filter-config.json";
+  public static final String SDB_ANALYSIS_CONFIG_FILE =
+      SDB_SPECIFIC_CONFIG + "analysis-config.json";
+
+  // Logging related
+  public static final String SDB_SERVICE_NAME = "SearchDataService";
+}
diff --git a/src/main/resources/json/schema/analyzer.schema.json b/src/main/resources/json/schema/analyzer.schema.json
new file mode 100644 (file)
index 0000000..7592ee9
--- /dev/null
@@ -0,0 +1,29 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "title": "Analyzer",
+  "description": "The analyzer aggregates a tokenizer and multiple filters to describe how an input stream should be indexed.",
+  "type": "object",
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "description": {
+      "type": "string"
+    },
+    "behaviours": {
+      "type": "array",
+      "items": {
+        "type": "string"
+      }
+    },
+    "tokenizer": {
+      "type": "string"
+    },
+    "filters": {
+      "type": "array",
+      "items": {
+        "type": "string"
+      }
+    }
+  }
+}
\ No newline at end of file
diff --git a/src/main/resources/json/schema/document-field.schema.json b/src/main/resources/json/schema/document-field.schema.json
new file mode 100644 (file)
index 0000000..2b79256
--- /dev/null
@@ -0,0 +1,37 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "title": "Document Field Schema",
+  "description": "Describes the structure of a document field for storage in a document store.",
+  "type": "object",
+  "javaType": "org.openecomp.sa.rest.DocumentFieldSchema",
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "data-type": {
+      "type": "string"
+    },
+    "format": {
+      "type": "string"
+    },
+    "searchable": {
+      "type": "boolean"
+    },
+    "search-analyzer": {
+      "type": "string"
+    },
+    "index-analyzer": {
+      "type": "string"
+    },
+    "sub-fields": {
+      "type": "array",
+      "items": {
+        "$ref": "document-field.schema.json"
+      }
+    }
+  },
+  "required": [
+    "name",
+    "data-type"
+  ]
+}
\ No newline at end of file
diff --git a/src/main/resources/json/schema/document.schema.json b/src/main/resources/json/schema/document.schema.json
new file mode 100644 (file)
index 0000000..e2dd204
--- /dev/null
@@ -0,0 +1,15 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "title": "Document Schema",
+  "description": "Describes the structure of a document for storage in a document store.",
+  "type": "object",
+  "javaType": "org.openecomp.sa.rest.DocumentSchema",
+  "properties": {
+    "fields": {
+      "type": "array",
+      "items": {
+        "$ref": "document-field.schema.json"
+      }
+    }
+  }
+}      
\ No newline at end of file
diff --git a/src/main/resources/json/schema/filter.schema.json b/src/main/resources/json/schema/filter.schema.json
new file mode 100644 (file)
index 0000000..6d1a030
--- /dev/null
@@ -0,0 +1,17 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "title": "Filter",
+  "description": "Filters accept a stream of tokens from a tokenizer and apply additional rules, possibly producing additional tokens. ",
+  "type": "object",
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "description": {
+      "type": "string"
+    },
+    "configuration": {
+      "type": "string"
+    }
+  }
+}
\ No newline at end of file
diff --git a/src/main/resources/logging/SearchDbMsgs.properties b/src/main/resources/logging/SearchDbMsgs.properties
new file mode 100644 (file)
index 0000000..3e8de80
--- /dev/null
@@ -0,0 +1,123 @@
+#Resource key=Error Code|Message text|Resolution text|Description text
+#######
+#Newlines can be utilized to add some clarity ensuring continuing line
+#has at least one leading space
+#ResourceKey=\
+#             ERR0000E\
+#             Sample error msg txt\
+#             Sample resolution msg\
+#             Sample description txt
+#
+###### 
+#Error code classification category
+#000    Info/Debug
+#100    Permission errors
+#200    Availability errors/Timeouts
+#300    Data errors
+#400    Schema Interface type/validation errors
+#500    Business process errors
+#900    Unknown errors
+#
+########################################################################
+# INFO logs
+SERVICE_STARTED=\
+            SDB0001I|\
+            SearchDB Service started|\
+            NA|\
+            The SearchDB Service has been started
+ELASTIC_SEARCH_CONNECTION_SUCCESS=\
+            SDB0002I|\
+            Successfully established connection to ElasticSearch {0}|\
+            NA|\
+            Successfully established connection to ElasticSearch
+ELASTIC_SEARCH_CONNECTION_ATTEMPT=\
+            SDB0003I|\
+            Attempting to connect to ElasticSearch {0}|\
+            NA|\
+            An attempt is being made to establish connectivity to ElasticSearch
+CREATED_INDEX=\
+            SDB0019I|\
+            Index with name {0} created successfully
+DELETED_INDEX=\
+            SDB0021I|\
+            Deleted index with name {0}
+PROCESSED_BULK_OPERATIONS=\
+            SDB0022I|\
+            Successfully processed a bulk operations request.
+PROCESS_EVENT=\
+            SDB0041I|\
+            Processed event {0}.  Result: {1}
+PROCESS_INLINE_QUERY=\
+            SDB0042I|\
+            Processing inline query: {0}
+PROCESS_PAYLOAD_QUERY=\
+            SDB0043I|\
+            Processing query - operation: {0} against URL: {1}
+# INFO Level Metrics Logs
+CREATE_INDEX_TIME=\
+            SDB0020I|\
+            Create index request for index {0} document type {1}
+DELETE_INDEX_TIME=\
+            SDB0022I|\
+            Delete index request for index {0}
+CREATE_DOCUMENT_TIME=\
+            SDB0023|\
+            Create document in index {0}
+UPDATE_DOCUMENT_TIME=\
+            SDB0024|\
+            Update document in index {0} with id {1}
+DELETE_DOCUMENT_TIME=\
+            SDB0025|\
+            Delete document in index {0} with id {1}
+GET_DOCUMENT_TIME=\
+            SDB0026|\
+            Get document from index {0} with id {1}
+QUERY_DOCUMENT_TIME=\
+            SDB0027|\
+            Query request for index {0} with query string {1}
+BULK_OPERATIONS_TIME=\
+            SDB0028|\
+            Bulk operations request
+CHECK_INDEX_TIME=\
+            SDB0029|\
+            Check for index {0}
+# WARNING logs
+INDEX_CREATE_FAILURE=\
+            SDB0301W|\
+            Failed to create index with index name = {0}. Cause: {1}|\
+            Check cause.  It is possible the index already exists.|\
+            Failed to create the specified index
+INDEX_DELETE_FAILURE=\
+            SDB0302W|\
+            Failed to delete index with index name = {0}. Cause: {1}|\
+            Check cause.  It is possible the index does not exist.|\
+            Failed to delete the specified index
+FILTERS_CONFIG_FAILURE=\
+            SDB0303W|\
+            Failed to read filter configuration from file {0}. Cause: {1}
+ANALYSYS_CONFIG_FAILURE=\
+            SDB0304W|\
+            Failed to read analysis configuration from file {0}. Cause: {1}
+GET_ANALYZERS_FAILURE=\
+            SDB0305W|\
+            Failed to get analyzer definitions.  Cause: {0}
+BULK_OPERATION_FAILURE=\
+            SDB0306W|\
+            Failed to execute bulk operations.  Cause: {0}
+# ERROR logs
+ELASTIC_SEARCH_CONNECTION_FAILURE=\
+            SDB0501E|\
+            Failed to establish connection to ElasticSearch {0}. Cause: {1}|\
+            Check connectivity with ElasticSearch.  Ensure ElasticSearch is up and reachable.|\
+            A connectivity check to ElasticSearch has failed.
+EXCEPTION_DURING_METHOD_CALL=\
+            SDB0502E|\
+            Failed to execute method {0} due to: {1}|\
+            Check error cause|\
+            Method failed to execute
+PROCESS_REST_REQUEST=\
+            SDB0503E|\
+            Received request {0} {1} from {2}.  Sending response: {3}
+STARTUP_EXCEPTION=\
+            SDB0504E|\
+            Exception encountered during startup of search service: {0}
diff --git a/src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context b/src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context
new file mode 100644 (file)
index 0000000..8514196
--- /dev/null
@@ -0,0 +1 @@
+{"context":{"contextClass":"ajsc.Context","contextId":"__module_ajsc_namespace_name__:__module_ajsc_namespace_version__","contextName":"__module_ajsc_namespace_name__","contextVersion":"__module_ajsc_namespace_version__","description":"__module_ajsc_namespace_name__ Context"}}
\ No newline at end of file
diff --git a/src/main/runtime/context/default#0.context b/src/main/runtime/context/default#0.context
new file mode 100644 (file)
index 0000000..d1b5ab4
--- /dev/null
@@ -0,0 +1 @@
+{"context":{"contextClass":"ajsc.Context","contextId":"default:0","contextName":"default","contextVersion":"0","description":"Default Context"}}
\ No newline at end of file
diff --git a/src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json b/src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json
new file mode 100644 (file)
index 0000000..76b27bd
--- /dev/null
@@ -0,0 +1,10 @@
+{
+  "deploymentPackage": {
+    "Class": "ajsc.DeploymentPackage",
+    "Id": "__module.ajsc.namespace.name__:__module_ajsc_namespace_version__",
+    "namespace": "__module_ajsc_namespace_name__",
+    "namespaceVersion": "__module_ajsc_namespace_version__",
+    "description": "__module_ajsc_namespace_name__ __module_ajsc_namespace_version__ - default description",
+    "userId": "ajsc"
+  }
+}
\ No newline at end of file
diff --git a/src/main/runtime/shiroRole/ajscadmin.json b/src/main/runtime/shiroRole/ajscadmin.json
new file mode 100644 (file)
index 0000000..8eb9c3d
--- /dev/null
@@ -0,0 +1,6 @@
+{
+  "shiroRoleClass": "ajsc.auth.ShiroRole",
+  "shiroRoleId": "ajscadmin",
+  "name": "ajscadmin",
+  "permissions": "[ajscadmin:*, ajsc:*]"
+}
\ No newline at end of file
diff --git a/src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json b/src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json
new file mode 100644 (file)
index 0000000..cb56d0c
--- /dev/null
@@ -0,0 +1,6 @@
+{
+  "shiroRoleClass": "ajsc.auth.ShiroRole",
+  "shiroRoleId": "contextadmin:__module_ajsc_namespace_name__",
+  "name": "contextadmin:__module_ajsc_namespace_name__",
+  "permissions": "[]"
+}
\ No newline at end of file
diff --git a/src/main/runtime/shiroRole/contextadmin#default.json b/src/main/runtime/shiroRole/contextadmin#default.json
new file mode 100644 (file)
index 0000000..e8e4dbc
--- /dev/null
@@ -0,0 +1,6 @@
+{
+  "shiroRoleClass": "ajsc.auth.ShiroRole",
+  "shiroRoleId": "contextadmin:default",
+  "name": "contextadmin:default",
+  "permissions": "[]"
+}
\ No newline at end of file
diff --git a/src/main/runtime/shiroUser/ajsc.json b/src/main/runtime/shiroUser/ajsc.json
new file mode 100644 (file)
index 0000000..033bdee
--- /dev/null
@@ -0,0 +1,7 @@
+{
+  "shiroUserClass": "ajsc.auth.ShiroUser",
+  "shiroUserId": "ajsc",
+  "passwordHash": "9471697417008c880720ba54c6038791ad7e98f3b88136fe34f4d31a462dd27a",
+  "permissions": "[*:*]",
+  "username": "ajsc"
+}
\ No newline at end of file
diff --git a/src/main/runtime/shiroUserRole/ajsc#ajscadmin.json b/src/main/runtime/shiroUserRole/ajsc#ajscadmin.json
new file mode 100644 (file)
index 0000000..e680d84
--- /dev/null
@@ -0,0 +1,6 @@
+{
+  "shiroUserRoleClass": "ajsc.auth.ShiroUserRole",
+  "shiroUserRoleId": "ajsc:ajscadmin",
+  "roleId": "ajscadmin",
+  "userId": "ajsc"
+}
\ No newline at end of file
diff --git a/src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json b/src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json
new file mode 100644 (file)
index 0000000..836c320
--- /dev/null
@@ -0,0 +1,6 @@
+{
+  "shiroUserRoleClass": "ajsc.auth.ShiroUserRole",
+  "shiroUserRoleId": "ajsc:contextadmin:__module_ajsc_namespace_name__",
+  "roleId": "contextadmin:__module_ajsc_namespace_name__",
+  "userId": "ajsc"
+}
\ No newline at end of file
diff --git a/src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json b/src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json
new file mode 100644 (file)
index 0000000..590857a
--- /dev/null
@@ -0,0 +1,6 @@
+{
+  "shiroUserRoleClass": "ajsc.auth.ShiroUserRole",
+  "shiroUserRoleId": "ajsc:contextadmin:default",
+  "roleId": "contextadmin:default",
+  "userId": "ajsc"
+}
\ No newline at end of file
diff --git a/src/test/java/org/openecomp/sa/rest/ApiUtilsTest.java b/src/test/java/org/openecomp/sa/rest/ApiUtilsTest.java
new file mode 100644 (file)
index 0000000..b5bc314
--- /dev/null
@@ -0,0 +1,46 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+
+public class ApiUtilsTest {
+
+  @Test
+  public void testHTTPStatusConversion() {
+    String statusString = ApiUtils.getHttpStatusString(201);
+    assertEquals(0, statusString.compareToIgnoreCase("Created"));
+
+    statusString = ApiUtils.getHttpStatusString(207);
+    assertEquals(0, statusString.compareToIgnoreCase("Multi Status"));
+
+    statusString = ApiUtils.getHttpStatusString(9999);
+    assertEquals(0, statusString.compareToIgnoreCase("Unknown"));
+  }
+
+}
diff --git a/src/test/java/org/openecomp/sa/rest/BulkApiTest.java b/src/test/java/org/openecomp/sa/rest/BulkApiTest.java
new file mode 100644 (file)
index 0000000..483ef33
--- /dev/null
@@ -0,0 +1,109 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import org.glassfish.jersey.server.ResourceConfig;
+import org.glassfish.jersey.test.JerseyTest;
+import org.junit.Test;
+
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.core.Application;
+import javax.ws.rs.core.Response;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+
+
+/**
+ * This suite of tests validates the behaviour of the bulk operations REST
+ * end point.
+ */
+public class BulkApiTest extends JerseyTest {
+
+  private final String TOP_URI = "/test/bulk/";
+
+
+  @Override
+  protected Application configure() {
+
+    // Make sure that our test endpoint is on the resource path
+    // for Jersey Test.
+    return new ResourceConfig(SearchServiceApiHarness.class);
+  }
+
+
+  /**
+   * This test validates that the expected response codes are returned
+   * to the client in the event of an authentication failure.
+   */
+  @Test
+  public void authenticationFailureTest() {
+
+    // Send a request to the end point, with a special trigger in the
+    // payload that tells our test harness to force the authentication
+    // to fail.
+    Response result = target(TOP_URI).request().post(Entity.json(SearchServiceApiHarness.FAIL_AUTHENTICATION_TRIGGER), Response.class);
+
+    // Validate that a failure to authenticate results in the expected
+    // response code returned to the client.
+    assertEquals(Response.Status.FORBIDDEN.getStatusCode(), result.getStatus());
+  }
+
+
+  /**
+   * This test validates that properly constructed json payloads are
+   * correctly validated and that improperly contructed payloads will
+   * be rejected with the appropriate response code returned to the
+   * client.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void payloadValidationTest() throws IOException {
+
+    // Post a request to the bulk operations endpoint with a valid
+    // operations list payload.
+    File validBulkOpsFile = new File("src/test/resources/json/bulk-ops-valid.json");
+    String validPayloadStr = TestUtils.readFileToString(validBulkOpsFile);
+    Response validResult = target(TOP_URI).request().post(Entity.json(validPayloadStr), Response.class);
+
+    // Validate that the payload is accepted as expected.
+    assertEquals("Valid operations payload was rejected",
+        Response.Status.OK.getStatusCode(), validResult.getStatus());
+
+    // Post a request to the bulk operations endpoint with an invalid
+    // operations list payload.
+    File inValidBulkOpsFile = new File("src/test/resources/json/bulk-ops-invalid.json");
+    String inValidPayloadStr = TestUtils.readFileToString(inValidBulkOpsFile);
+    Response invalidResult = target(TOP_URI).request().post(Entity.json(inValidPayloadStr), Response.class);
+
+    // Validate that the payload is rejected as expected.
+    assertEquals("Invalid operations payload was not rejected",
+        Response.Status.BAD_REQUEST.getStatusCode(), invalidResult.getStatus());
+  }
+}
diff --git a/src/test/java/org/openecomp/sa/rest/DocumentApiTest.java b/src/test/java/org/openecomp/sa/rest/DocumentApiTest.java
new file mode 100644 (file)
index 0000000..f57bf30
--- /dev/null
@@ -0,0 +1,206 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import org.glassfish.jersey.server.ResourceConfig;
+import org.glassfish.jersey.test.JerseyTest;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.client.Invocation.Builder;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.Application;
+import java.io.IOException;
+
+import static org.junit.Assert.assertTrue;
+
+public class DocumentApiTest extends JerseyTest {
+
+  private static final String INDEXES_URI = "/test/indexes/";
+  private static final String DOCUMENT_URI = "documents/";
+
+  private static final String SEARCH_URI = "query/";
+  private static final String INDEX_NAME = "test-index";
+  private static final String DOC_ID = "test-1";
+  private static final String SIMPLE_QUERY = "\"parsed-query\": {\"my-field\": \"something\", \"query-string\": \"string\"}";
+  private static final String COMPLEX_QUERY =
+      "{"
+          + "\"filter\": {"
+          + "\"all\": ["
+          + "{\"match\": {\"field\": \"searchTags\", \"value\": \"a\"}}"
+          + "]"
+          + "},"
+          + "\"queries\": ["
+          + "{\"may\": {\"parsed-query\": {\"field\": \"searchTags\", \"query-string\": \"b\"}}}"
+          + "]"
+          + "}";
+
+  private static final String CREATE_JSON_CONTENT = "creation content";
+
+
+  @Override
+  protected Application configure() {
+
+    // Make sure that our test endpoint is on the resource path
+    // for Jersey Test.
+    return new ResourceConfig(SearchServiceApiHarness.class);
+  }
+
+  /**
+   * This test validates the behaviour of the 'Create Document' POST request
+   * endpoint.
+   *
+   * @throws IOException
+   * @throws ParseException
+   */
+  @Test
+  public void createDocumentTest() throws IOException, ParseException {
+    String result = target(INDEXES_URI + INDEX_NAME + "/" + DOCUMENT_URI).request().post(Entity.json(CREATE_JSON_CONTENT), String.class);
+
+
+    // Our stub document store DAO returns the parameters that it was
+    // passed as the result string, so now we can validate that our
+    // endpoint invoked it with the correct parameters.
+
+    JSONParser parser = new JSONParser();
+    JSONObject json = (JSONObject) parser.parse(result);
+
+    assertTrue("Unexpected Result ", !json.get("etag").toString().isEmpty());
+  }
+
+  /**
+   * This test validates the behaviour of the 'Create Document' PUT request
+   * endpoint.
+   *
+   * @throws IOException
+   * @throws ParseException
+   */
+  @Test
+  public void updateDocumentTest() throws IOException, ParseException {
+    WebTarget target = target(INDEXES_URI + INDEX_NAME + "/" + DOCUMENT_URI + DOC_ID);
+    Builder request = target.request().header("If-Match", "1");
+    String result = request.put(Entity.json(CREATE_JSON_CONTENT), String.class);
+
+    // Our stub document store DAO returns the parameters that it was
+    // passed as the result string, so now we can validate that our
+    // endpoint invoked it with the correct parameters.
+    JSONParser parser = new JSONParser();
+    JSONObject json = (JSONObject) parser.parse(result);
+
+    assertTrue("Unexpected Result ", !json.get("etag").toString().isEmpty());
+  }
+
+  /**
+   * This test validates the behaviour of the 'Get Document' GET request
+   * endpoint.
+   *
+   * @throws IOException
+   * @throws ParseException
+   */
+  @Test
+  public void getDocumentTest() throws IOException, ParseException {
+    String result = target(INDEXES_URI + INDEX_NAME + "/" + DOCUMENT_URI + DOC_ID).request().get(String.class);
+
+    // Our stub document store DAO returns the parameters that it was
+    // passed as the result string, so now we can validate that our
+    // endpoint invoked it with the correct parameters.
+    JSONParser parser = new JSONParser();
+    JSONObject json = (JSONObject) parser.parse(result);
+
+    assertTrue("Unexpected Result ", !json.get("etag").toString().isEmpty());
+
+  }
+
+  /**
+   * This test validates the behaviour of the 'Delete Document' DELETE request
+   * endpoint.
+   *
+   * @throws IOException
+   * @throws ParseException
+   */
+  @Test
+  public void deleteDocumentTest() throws IOException, ParseException {
+    WebTarget target = target(INDEXES_URI + INDEX_NAME + "/" + DOCUMENT_URI + DOC_ID);
+    Builder request = target.request().header("If-Match", "1");
+    String result = request.delete(String.class);
+
+
+    // Our stub document store DAO returns the parameters that it was
+    // passed as the result string, so now we can validate that our
+    // endpoint invoked it with the correct parameters.
+    assertTrue("Unexpected Result ", result.isEmpty());
+
+  }
+
+  /**
+   * This test validates the behaviour of the 'Search Documents' GET request
+   * endpoint.
+   *
+   * @throws IOException
+   * @throws ParseException
+   */
+  @Ignore
+  @Test
+  public void searchDocumentTest1() throws IOException, ParseException {
+    String result = target(INDEXES_URI + INDEX_NAME + "/" + SEARCH_URI + SIMPLE_QUERY).request().get(String.class);
+
+    // Our stub document store DAO returns the parameters that it was
+    // passed as the result string, so now we can validate that our
+    // endpoint invoked it with the correct parameters.
+    JSONParser parser = new JSONParser();
+    JSONObject json = (JSONObject) parser.parse(result);
+
+    assertTrue("Unexpected Result ", json.get("totalHits").toString().equals("1"));
+
+
+  }
+
+  /**
+   * This test validates the behaviour of the 'Search Documents' GET request
+   * endpoint.
+   *
+   * @throws IOException
+   * @throws ParseException
+   */
+  @Test
+  public void searchDocumentTest2() throws IOException, ParseException {
+    String result = target(INDEXES_URI + INDEX_NAME + "/" + SEARCH_URI).request().post(Entity.json(COMPLEX_QUERY), String.class);
+
+    // Our stub document store DAO returns the parameters that it was
+    // passed as the result string, so now we can validate that our
+    // endpoint invoked it with the correct parameters.
+    JSONParser parser = new JSONParser();
+    JSONObject json = (JSONObject) parser.parse(result);
+    JSONObject resultJson = (JSONObject) json.get("searchResult");
+
+    assertTrue("Unexpected Result ", resultJson.get("totalHits").toString().equals("1"));
+
+  }
+
+}
diff --git a/src/test/java/org/openecomp/sa/rest/DocumentSchemaTest.java b/src/test/java/org/openecomp/sa/rest/DocumentSchemaTest.java
new file mode 100644 (file)
index 0000000..fff36b9
--- /dev/null
@@ -0,0 +1,106 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+import org.openecomp.sa.rest.DocumentSchema;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+
+import static org.junit.Assert.assertTrue;
+
+
+public class DocumentSchemaTest {
+
+  private final String SIMPLE_DOC_SCHEMA_JSON = "src/test/resources/json/simpleDocument.json";
+  private final String NESTED_DOC_SCHEMA_JSON = "src/test/resources/json/nested-document.json";
+
+
+  /**
+   * This test validates that we convert document definitions back and
+   * forth between json strings and POJOs without any loss of data.
+   *
+   * @throws com.fasterxml.jackson.core.JsonParseException
+   * @throws com.fasterxml.jackson.databind.JsonMappingException
+   * @throws IOException
+   */
+  @Test
+  public void simpleDocSchemaFromJsonFileTest() throws com.fasterxml.jackson.core.JsonParseException, com.fasterxml.jackson.databind.JsonMappingException, IOException {
+
+    // Import our json format document schema from a file.
+    File schemaFile = new File(SIMPLE_DOC_SCHEMA_JSON);
+    String fileString = TestUtils.readFileToString(schemaFile);
+
+    // Unmarshall that to a Java POJO
+    ObjectMapper mapper = new ObjectMapper();
+    DocumentSchema docSchema = mapper.readValue(schemaFile, DocumentSchema.class);
+
+    // Now, for the purposes of comparison, produce a JSON string from
+    // our Java object.
+    String jsonString = mapper.writeValueAsString(docSchema);
+
+    // Assert that the raw JSON that we read from the file matches the marshalled
+    // JSON we generated from our Java object (ie: validate that we didn't lose
+    // anything going in either direction).
+    assertTrue("Marshalled object does not match the original json source that produced it",
+        fileString.equals(jsonString));
+  }
+
+
+  /**
+   * This test validates that we convert document definitions back and
+   * forth between json strings and POJOs without any loss of data in
+   * the case of document schemas which contain nested fields.
+   *
+   * @throws com.fasterxml.jackson.core.JsonParseException
+   * @throws com.fasterxml.jackson.databind.JsonMappingException
+   * @throws IOException
+   */
+  @Test
+  public void nestedDocSchemaFromJsonFileTest() throws JsonParseException, JsonMappingException, IOException {
+
+    // Import our json format document schema from a file.
+    File schemaFile = new File(NESTED_DOC_SCHEMA_JSON);
+    String fileString = TestUtils.readFileToString(schemaFile);
+
+    // Unmarshall that to a Java POJO
+    ObjectMapper mapper = new ObjectMapper();
+    DocumentSchema docSchema = mapper.readValue(schemaFile, DocumentSchema.class);
+
+    String jsonString = mapper.writeValueAsString(docSchema);
+
+    // Assert that the raw JSON that we read from the file matches the marshalled
+    // JSON we generated from our Java object (ie: validate that we didn't lose
+    // anything going in either direction).
+    assertTrue("Marshalled object does not match the original json source that produced it",
+        fileString.equals(jsonString));
+  }
+}
\ No newline at end of file
diff --git a/src/test/java/org/openecomp/sa/rest/IndexApiTest.java b/src/test/java/org/openecomp/sa/rest/IndexApiTest.java
new file mode 100644 (file)
index 0000000..b969ab6
--- /dev/null
@@ -0,0 +1,228 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+
+import org.glassfish.jersey.server.ResourceConfig;
+import org.glassfish.jersey.test.JerseyTest;
+import org.junit.Test;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.exception.DocumentStoreOperationException;
+import org.openecomp.sa.searchdbabstraction.entity.OperationResult;
+
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.core.Application;
+import javax.ws.rs.core.Response;
+import java.io.*;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+
+/**
+ * This suite of tests is intended to exercise the set of REST endpoints
+ * associated with manipulating Indexes in the document store.
+ */
+public class IndexApiTest extends JerseyTest {
+
+  private final String TOP_URI = "/test/indexes/";
+  private final String SIMPLE_DOC_SCHEMA_JSON = "src/test/resources/json/simpleDocument.json";
+
+
+  @Override
+  protected Application configure() {
+
+    // Make sure that our test endpoint is on the resource path
+    // for Jersey Test.
+    return new ResourceConfig(SearchServiceApiHarness.class);
+  }
+
+
+  /**
+   * This test validates that the {@link IndexApi} is able to convert {@link OperationResult}
+   * obects to standard REST {@link Response} objects.
+   *
+   * @throws FileNotFoundException
+   * @throws IOException
+   * @throws DocumentStoreOperationException
+   */
+  @Test
+  public void responseFromOperationResultTest() throws FileNotFoundException, IOException, DocumentStoreOperationException {
+
+    int SUCCESS_RESULT_CODE = 200;
+    String SUCCESS_RESULT_STRING = "Everything is ay-okay!";
+    int FAILURE_RESULT_CODE = 500;
+    String FAILURE_CAUSE_STRING = "Something went wrong!";
+
+
+    // Create an instance of the index API endpoint that we will test against.
+    // We will override the init() method because we don't want it to try to
+    // connect to a real document store.
+    IndexApi indexApi = new IndexApi(new SearchServiceApiHarness()) {
+      @Override
+      public void init() { /* do nothing */ }
+    };
+
+    //Construct an OperationResult instance with a success code and string.
+    OperationResult successResult = new OperationResult();
+    successResult.setResultCode(SUCCESS_RESULT_CODE);
+    successResult.setResult(SUCCESS_RESULT_STRING);
+
+    // Convert our success OperationResult to a standard REST Response...
+    Response successResponse = indexApi.responseFromOperationResult(successResult);
+
+    // ...and validate that the Response is correctly populated.
+    assertEquals("Unexpected result code", SUCCESS_RESULT_CODE, successResponse.getStatus());
+    assertTrue("Incorrect result string", ((String) successResponse.getEntity()).equals(SUCCESS_RESULT_STRING));
+
+    // Construct an OperationResult instance with an error code and failure
+    // cause.
+    OperationResult failureResult = new OperationResult();
+    failureResult.setResultCode(FAILURE_RESULT_CODE);
+    failureResult.setFailureCause(FAILURE_CAUSE_STRING);
+
+    // Convert our failure OperationResult to a standard REST Response...
+    Response failureResponse = indexApi.responseFromOperationResult(failureResult);
+
+    // ...and validate that the Response is correctly populated.
+    assertEquals("Unexpected result code", FAILURE_RESULT_CODE, failureResponse.getStatus());
+    assertTrue("Incorrect result string", ((String) failureResponse.getEntity()).equals(FAILURE_CAUSE_STRING));
+  }
+
+
+  /**
+   * This test validates the behaviour of the 'Create Index' POST request
+   * endpoint.
+   *
+   * @throws IOException
+   */
+  @Test
+  public void createIndexTest() throws IOException {
+
+    String INDEX_NAME = "test-index";
+    String EXPECTED_SETTINGS =
+        "{\"analysis\": "
+            + "{\"filter\": "
+            + "{\"nGram_filter\": { "
+            + "\"type\": \"nGram\", "
+            + "\"min_gram\": 1, "
+            + "\"max_gram\": 50, "
+            + "\"token_chars\": [ \"letter\", \"digit\", \"punctuation\", \"symbol\" ]}},"
+            + "\"analyzer\": {"
+            + "\"nGram_analyzer\": "
+            + "{\"type\": \"custom\","
+            + "\"tokenizer\": \"whitespace\","
+            + "\"filter\": [\"lowercase\",\"asciifolding\",\"nGram_filter\"]},"
+            + "\"whitespace_analyzer\": "
+            + "{\"type\": \"custom\","
+            + "\"tokenizer\": \"whitespace\","
+            + "\"filter\": [\"lowercase\",\"asciifolding\"]}}}}";
+    String EXPECTED_MAPPINGS =
+        "{\"properties\": {"
+            + "\"serverName\": {"
+            + "\"type\": \"string\", "
+            + "\"index\": \"analyzed\", "
+            + "\"search_analyzer\": \"whitespace\"}, "
+            + "\"serverComplex\": {"
+            + "\"type\": \"string\", "
+            + "\"search_analyzer\": \"whitespace\"}}}";
+
+    // Read a valid document schema from a json file.
+    File schemaFile = new File(SIMPLE_DOC_SCHEMA_JSON);
+    String documentJson = TestUtils.readFileToString(schemaFile);
+
+    // Send a request to our 'create index' endpoint, using the schema
+    // which we just read.
+    String result = target(TOP_URI + INDEX_NAME).request().put(Entity.json(documentJson), String.class);
+
+
+    // Our stub document store DAO returns the parameters that it was
+    // passed as the result string, so now we can validate that our
+    // endpoint invoked it with the correct parameters.
+    String[] tokenizedResult = result.split("@");
+    assertTrue("Unexpected Index Name '" + tokenizedResult[0] + "' passed to doc store DAO",
+        tokenizedResult[0].equals(INDEX_NAME));
+    assertTrue("Unexpected settings string '" + tokenizedResult[1] + "' passed to doc store DAO",
+        tokenizedResult[1].equals(EXPECTED_SETTINGS));
+    assertTrue("Unexpected mappings string '" + tokenizedResult[2] + "' passed to doc store DAO",
+        tokenizedResult[2].equals(EXPECTED_MAPPINGS));
+  }
+
+
+  /**
+   * This test validates that a 'create index' request with an improperly
+   * formatted document schema as the payload will result in an
+   * appropriate error being returned from the endpoint.
+   */
+  @Test
+  public void createIndexWithMangledSchemaTest() {
+
+    String INDEX_NAME = "test-index";
+    int BAD_REQUEST_CODE = 400;
+
+    String invalidSchemaString = "this is definitely not json!";
+
+    Response result = target(TOP_URI + INDEX_NAME).request().put(Entity.json(invalidSchemaString), Response.class);
+
+    assertEquals("Invalid document schema should result in a 400 error",
+        BAD_REQUEST_CODE, result.getStatus());
+  }
+
+
+  /**
+   * This test validates the behaviour of the 'Delete Index' end point.
+   */
+  @Test
+  public void deleteIndexTest() {
+
+    String INDEX_NAME = "test-index";
+
+    // Send a request to the 'delete index' endpoint.
+    String result = target(TOP_URI + INDEX_NAME).request().delete(String.class);
+
+    // Validate that the expected parameters were passed to the document
+    // store DAO.
+    assertTrue("Unexpected index name '" + result + "' passed to doc store DAO",
+        result.equals(INDEX_NAME));
+  }
+
+
+  /**
+   * This test validates that attempting to delete an index which does not
+   * exist results in a 404 error.
+   */
+  @Test
+  public void deleteIndexDoesNotExistTest() {
+
+    int NOT_FOUND_CODE = 404;
+
+    // Send a request to the 'delete index' endpoint, specifying a
+    // non-existent index.
+    Response result = target(TOP_URI + StubEsController.DOES_NOT_EXIST_INDEX).request().delete(Response.class);
+
+    // Validate that a 404 error code is returned from the end point.
+    assertEquals("Deleting an index which does not exist should result in a 404 error",
+        NOT_FOUND_CODE, result.getStatus());
+  }
+}
diff --git a/src/test/java/org/openecomp/sa/rest/SearchServiceApiHarness.java b/src/test/java/org/openecomp/sa/rest/SearchServiceApiHarness.java
new file mode 100644 (file)
index 0000000..1306740
--- /dev/null
@@ -0,0 +1,200 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.*;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+@Path("test/")
+public class SearchServiceApiHarness extends SearchServiceApi {
+
+
+  public static final String FAIL_AUTHENTICATION_TRIGGER = "FAIL AUTHENTICATION";
+
+  private boolean authenticationShouldSucceed = true;
+
+
+  /**
+   * Performs all one-time initialization required for the end point.
+   */
+  @Override
+  public void init() {
+
+    // Instantiate our Document Store DAO.
+    documentStore = new StubEsController();
+  }
+
+
+  @PUT
+  @Path("/indexes/{index}")
+  @Consumes({MediaType.APPLICATION_JSON})
+  @Override
+  public Response processCreateIndex(String requestBody,
+                                     @Context HttpServletRequest request,
+                                     @Context HttpHeaders headers,
+                                     @PathParam("index") String index) {
+
+    return super.processCreateIndex(requestBody, request, headers, index);
+  }
+
+  @DELETE
+  @Path("/indexes/{index}")
+  @Consumes({MediaType.APPLICATION_JSON})
+  @Override
+  public Response processDeleteIndex(String requestBody,
+                                     @Context HttpServletRequest request,
+                                     @Context HttpHeaders headers,
+                                     @PathParam("index") String index) {
+
+    return super.processDeleteIndex(requestBody, request, headers, index);
+  }
+
+  @GET
+  @Path("/indexes/{index}/documents/{id}")
+  @Consumes({MediaType.APPLICATION_JSON})
+  @Override
+  public Response processGetDocument(String requestBody,
+                                     @Context HttpServletRequest request,
+                                     @Context HttpServletResponse httpResponse,
+                                     @Context HttpHeaders headers,
+                                     @PathParam("index") String index,
+                                     @PathParam("id") String id) {
+
+    return super.processGetDocument(requestBody, request, httpResponse, headers, index, id);
+  }
+
+  @POST
+  @Path("/indexes/{index}/documents")
+  @Consumes({MediaType.APPLICATION_JSON})
+  @Override
+  public Response processCreateDocWithoutId(String requestBody,
+                                            @Context HttpServletRequest request,
+                                            @Context HttpServletResponse httpResponse,
+                                            @Context HttpHeaders headers,
+                                            @PathParam("index") String index) {
+
+    return super.processCreateDocWithoutId(requestBody, request, httpResponse, headers, index);
+  }
+
+  @PUT
+  @Path("/indexes/{index}/documents/{id}")
+  @Consumes({MediaType.APPLICATION_JSON})
+  @Override
+  public Response processUpsertDoc(String requestBody,
+                                   @Context HttpServletRequest request,
+                                   @Context HttpServletResponse httpResponse,
+                                   @Context HttpHeaders headers,
+                                   @PathParam("index") String index,
+                                   @PathParam("id") String id) {
+
+    return super.processUpsertDoc(requestBody, request, httpResponse, headers, index, id);
+  }
+
+  @DELETE
+  @Path("/indexes/{index}/documents/{id}")
+  @Consumes({MediaType.APPLICATION_JSON})
+  @Override
+  public Response processDeleteDoc(String requestBody,
+                                   @Context HttpServletRequest request,
+                                   @Context HttpServletResponse httpResponse,
+                                   @Context HttpHeaders headers,
+                                   @PathParam("index") String index,
+                                   @PathParam("id") String id) {
+
+    return super.processDeleteDoc(requestBody, request, httpResponse, headers, index, id);
+  }
+
+  @GET
+  @Path("/indexes/{index}/query/{queryText}")
+  @Consumes({MediaType.APPLICATION_JSON})
+  @Override
+  public Response processInlineQuery(String requestBody,
+                                     @Context HttpServletRequest request,
+                                     @Context HttpHeaders headers,
+                                     @PathParam("index") String index,
+                                     @PathParam("queryText") String queryText) {
+
+    return super.processInlineQuery(requestBody, request, headers, index, queryText);
+  }
+
+  @GET
+  @Path("/indexes/{index}/query")
+  @Consumes({MediaType.APPLICATION_JSON})
+  @Override
+  public Response processQueryWithGet(String requestBody,
+                                      @Context HttpServletRequest request,
+                                      @Context HttpHeaders headers,
+                                      @PathParam("index") String index) {
+
+    return super.processQueryWithGet(requestBody, request, headers, index);
+  }
+
+  @POST
+  @Path("/indexes/{index}/query")
+  @Consumes({MediaType.APPLICATION_JSON})
+  @Override
+  public Response processQuery(String requestBody,
+                               @Context HttpServletRequest request,
+                               @Context HttpHeaders headers,
+                               @PathParam("index") String index) {
+
+    return super.processQuery(requestBody, request, headers, index);
+  }
+
+  @POST
+  @Path("/bulk")
+  @Consumes({MediaType.APPLICATION_JSON})
+  @Override
+  public Response processBulkRequest(String requestBody,
+                                     @Context HttpServletRequest request,
+                                     @Context HttpHeaders headers,
+                                     @PathParam("index") String index) {
+
+    // If the operations string contains a special keyword, set the
+    // harness to fail the authentication validation.
+    if (requestBody.contains(FAIL_AUTHENTICATION_TRIGGER)) {
+      authenticationShouldSucceed = false;
+    }
+
+    // Just pass the request up to the parent, since that is the code
+    // that we really want to test.
+    //return super.processPost(operations, request, headers, index);
+    return super.processBulkRequest(requestBody, request, headers, index);
+  }
+
+  @Override
+  protected boolean validateRequest(HttpHeaders headers,
+                                    HttpServletRequest req,
+                                    ApiUtils.Action action,
+                                    String authPolicyFunctionName) throws Exception {
+
+    return authenticationShouldSucceed;
+  }
+}
diff --git a/src/test/java/org/openecomp/sa/rest/StubEsController.java b/src/test/java/org/openecomp/sa/rest/StubEsController.java
new file mode 100644 (file)
index 0000000..f3e5619
--- /dev/null
@@ -0,0 +1,237 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import org.json.simple.JSONObject;
+import org.openecomp.sa.rest.DocumentSchema;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreDataEntity;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreInterface;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.exception.DocumentStoreOperationException;
+import org.openecomp.sa.searchdbabstraction.entity.Document;
+import org.openecomp.sa.searchdbabstraction.entity.*;
+import org.openecomp.sa.searchdbabstraction.util.DocumentSchemaUtil;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * This class implements a stubbed version of the document store DAO so
+ * that we can run unit tests without trying to connect to a real
+ * document store.
+ */
+public class StubEsController implements DocumentStoreInterface {
+
+  public static final String DOES_NOT_EXIST_INDEX = "index-does-not-exist";
+
+  private AnalysisConfiguration analysisConfig = null;
+
+  /**
+   *
+   */
+  //private IndexAPIHarness indexAPIHarness;
+
+  StubEsController() {
+    analysisConfig = new AnalysisConfiguration();
+    analysisConfig.init("src/test/resources/json/filter-config.json",
+        "src/test/resources/json/analysis-config.json");
+  }
+
+
+  @Override
+  public OperationResult createIndex(String index, DocumentSchema documentSchema) {
+
+    // Just return an OK result, with the parameters that we were passed
+    // bundled in the response string. This allows unit tests to validate
+    // that those parameters match what they expected to be passed.
+    OperationResult opResult = new OperationResult();
+    opResult.setResultCode(200);
+
+    opResult.setResult(index + "@" + analysisConfig.getEsIndexSettings() + "@"
+        + DocumentSchemaUtil.generateDocumentMappings(documentSchema));
+
+    return opResult;
+  }
+
+
+  @Override
+  public OperationResult deleteIndex(String indexName) throws DocumentStoreOperationException {
+
+    OperationResult opResult = new OperationResult();
+
+    if (indexName.equals(DOES_NOT_EXIST_INDEX)) {
+      opResult.setResultCode(404);
+    } else {
+      opResult.setResultCode(200);
+      opResult.setResult(indexName);
+    }
+
+    return opResult;
+  }
+
+  @Override
+  public DocumentOperationResult createDocument(String indexName,
+                                                DocumentStoreDataEntity document) throws DocumentStoreOperationException {
+    DocumentOperationResult opResult = buildSampleDocumentOperationResult();
+
+    if (indexName.equals(DOES_NOT_EXIST_INDEX)) {
+      opResult.setResultCode(404);
+    } else {
+      opResult.setResultCode(200);
+      String id = "dummy";
+      if (document.getId() != null) {
+        id = document.getId();
+      }
+      opResult.setResultVersion("1");
+    }
+
+    return opResult;
+  }
+
+  @Override
+  public DocumentOperationResult updateDocument(String indexName,
+                                                DocumentStoreDataEntity document) throws DocumentStoreOperationException {
+    DocumentOperationResult opResult = buildSampleDocumentOperationResult();
+
+    if (indexName.equals(DOES_NOT_EXIST_INDEX)) {
+      opResult.setResultCode(404);
+    } else {
+      opResult.setResultCode(200);
+      String version = "1";
+      if (document.getVersion() != null) {
+        version = String.valueOf(Integer.parseInt(document.getVersion()) + 1);
+      }
+      opResult.setResultVersion(version);
+    }
+
+    return opResult;
+  }
+
+  @Override
+  public DocumentOperationResult deleteDocument(String indexName,
+                                                DocumentStoreDataEntity document) throws DocumentStoreOperationException {
+    DocumentOperationResult opResult = buildSampleDocumentOperationResult();
+
+
+    if (indexName.equals(DOES_NOT_EXIST_INDEX)) {
+      opResult.setResultCode(404);
+    } else {
+      if (opResult.getDocument() != null) {
+        opResult.getDocument().setEtag(null);
+        opResult.getDocument().setUrl(null);
+      }
+      opResult.setResultCode(200);
+      opResult.setResult(indexName + "@" + document.getId());
+    }
+
+    return opResult;
+  }
+
+  @Override
+  public DocumentOperationResult getDocument(String indexName,
+                                             DocumentStoreDataEntity document) throws DocumentStoreOperationException {
+    DocumentOperationResult opResult = buildSampleDocumentOperationResult();
+
+    if (indexName.equals(DOES_NOT_EXIST_INDEX)) {
+      opResult.setResultCode(404);
+    } else {
+      opResult.setResultCode(200);
+    }
+
+    return opResult;
+  }
+
+  @Override
+  public SearchOperationResult search(String indexName,
+                                      String queryText) throws DocumentStoreOperationException {
+
+    SearchOperationResult opResult = buildSampleSearchOperationResult();
+
+    if (indexName.equals(DOES_NOT_EXIST_INDEX)) {
+      opResult.setResultCode(404);
+    } else {
+      opResult.setResultCode(200);
+      opResult.setResult(indexName + "@" + queryText);
+    }
+
+    return opResult;
+  }
+
+  @Override
+  public SearchOperationResult searchWithPayload(String indexName,
+                                                 String query) throws DocumentStoreOperationException {
+    SearchOperationResult opResult = buildSampleSearchOperationResult();
+
+    if (indexName.equals(DOES_NOT_EXIST_INDEX)) {
+      opResult.setResultCode(404);
+    } else {
+      opResult.setResultCode(200);
+      opResult.setResult(indexName + "@" + query);
+    }
+
+    return opResult;
+  }
+
+  @Override
+  public OperationResult performBulkOperations(BulkRequest[] requests) throws DocumentStoreOperationException {
+
+    OperationResult opResult = new OperationResult();
+    opResult.setResultCode(200);
+
+    return opResult;
+  }
+
+  private DocumentOperationResult buildSampleDocumentOperationResult() {
+    DocumentOperationResult result = new DocumentOperationResult();
+    Document doc = new Document();
+    doc.setEtag("etag1");
+
+    doc.setContent(new JSONObject());
+    result.setDocument(doc);
+    return result;
+  }
+
+  private SearchOperationResult buildSampleSearchOperationResult() {
+    SearchOperationResult result = new SearchOperationResult();
+
+    SearchHits searchHits = new SearchHits();
+    SearchHit[] searchHitArray = new SearchHit[1];
+    SearchHit searchHit = new SearchHit();
+    Document doc = new Document();
+    doc.setEtag("etag1");
+    Map<String, Object> content = new HashMap<String, Object>();
+    content.put("key1", "value1");
+    doc.setContent(new JSONObject());
+    searchHit.setDocument(doc);
+    searchHitArray[0] = searchHit;
+
+    searchHits.setHits(searchHitArray);
+    searchHits.setTotalHits("1");
+    result.setSearchResult(searchHits);
+
+    return result;
+
+  }
+
+}
\ No newline at end of file
diff --git a/src/test/java/org/openecomp/sa/rest/TestUtils.java b/src/test/java/org/openecomp/sa/rest/TestUtils.java
new file mode 100644 (file)
index 0000000..dc95d8f
--- /dev/null
@@ -0,0 +1,67 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import static org.junit.Assert.fail;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+
+public class TestUtils {
+
+  /**
+   * This helper method reads the contents of a file into a
+   * simple string.
+   *
+   * @param aFile - The file to be imported.
+   *
+   * @return - The file contents expressed as a simple string.
+   *
+   * @throws IOException
+   */
+  public static String readFileToString(File aFile) throws IOException {
+
+    BufferedReader br = new BufferedReader(new FileReader(aFile));
+    try {
+      StringBuilder sb = new StringBuilder();
+      String line = br.readLine();
+
+      while (line != null) {
+        sb.append(line);
+        line = br.readLine();
+      }
+
+      return sb.toString().replaceAll("\\s+", "");
+    } finally {
+      try {
+        br.close();
+      } catch (IOException e) {
+        fail("Unexpected IOException: " + e.getMessage());
+      }
+    }
+  }
+}
\ No newline at end of file
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/AggregationResponseParsingTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/AggregationResponseParsingTest.java
new file mode 100644 (file)
index 0000000..b0ea69b
--- /dev/null
@@ -0,0 +1,103 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
+
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.junit.Test;
+import org.openecomp.sa.searchdbabstraction.entity.AggregationResult;
+import org.openecomp.sa.searchdbabstraction.entity.AggregationResults;
+import org.openecomp.sa.searchdbabstraction.util.AggregationParsingUtil;
+
+public class AggregationResponseParsingTest {
+
+  @Test
+  public void testParseAggregationResponse() {
+    JSONParser parser = new JSONParser();
+    JSONObject root;
+
+    String input =
+        "{\r\n  \"aggregations\": {\r\n    \"violations\": {\r\n      \"doc_count\": 2,\r\n      \"by_Timestamp\": {\r\n        \"doc_count_error_upper_bound\": 0,\r\n        \"sum_other_doc_count\": 0,\r\n        \"buckets\": [\r\n          {\r\n            \"key\": 7199992,\r\n            \"key_as_string\": \"Jan 1 1970 01:59:59\",\r\n            \"doc_count\": 2\r\n          }\r\n        ]\r\n      }\r\n    }\r\n  }\r\n}";
+
+    try {
+      root = (JSONObject) parser.parse(input);
+      JSONObject aggregations = (JSONObject) root.get("aggregations");
+      AggregationResult[] results = AggregationParsingUtil.parseAggregationResults(aggregations);
+      AggregationResults aggs = new AggregationResults();
+      ObjectMapper mapper = new ObjectMapper();
+      aggs.setAggregations(results);
+      System.out.println(mapper.setSerializationInclusion(Include.NON_NULL)
+          .writerWithDefaultPrettyPrinter().writeValueAsString(aggs));
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+
+  @Test
+  public void testParseAggregationResponse2() {
+    JSONParser parser = new JSONParser();
+    JSONObject root;
+
+    String input =
+        "{\r\n  \"aggregations\": {\r\n    \"entityType\": {\r\n      \"doc_count_error_upper_bound\": 0,\r\n      \"sum_other_doc_count\": 0,\r\n      \"buckets\": [\r\n        {\r\n          \"key\": \"entity1\",\r\n          \"doc_count\": 5,\r\n          \"byVersion\": {\r\n            \"doc_count_error_upper_bound\": 0,\r\n            \"sum_other_doc_count\": 0,\r\n            \"buckets\": [\r\n              {\r\n                \"key\": \"0\",\r\n                \"doc_count\": 5\r\n              }\r\n            ]\r\n          }\r\n        }\r\n      ]\r\n    }\r\n  }\r\n}";
+
+    try {
+      root = (JSONObject) parser.parse(input);
+      JSONObject aggregations = (JSONObject) root.get("aggregations");
+      AggregationResult[] results = AggregationParsingUtil.parseAggregationResults(aggregations);
+      AggregationResults aggs = new AggregationResults();
+      ObjectMapper mapper = new ObjectMapper();
+      aggs.setAggregations(results);
+      System.out.println(mapper.setSerializationInclusion(Include.NON_NULL)
+          .writerWithDefaultPrettyPrinter().writeValueAsString(aggs));
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+
+  @Test
+  public void testParseAggregationResponse3() {
+    JSONParser parser = new JSONParser();
+    JSONObject root;
+
+    String input =
+        "{\r\n  \"aggregations\": {\r\n    \"validateTimes\": {\r\n      \"buckets\": [\r\n        {\r\n          \"key\": \"Jan 10 2017 21:6:6-Jan 24 2017 13:43:5\",\r\n          \"from\": 1484082366000,\r\n          \"from_as_string\": \"Jan 10 2017 21:6:6\",\r\n          \"to\": 1485265385000,\r\n          \"to_as_string\": \"Jan 24 2017 13:43:5\",\r\n          \"doc_count\": 95\r\n        },\r\n        {\r\n          \"key\": \"Feb 3 2017 18:27:39-*\",\r\n          \"from\": 1486146459000,\r\n          \"from_as_string\": \"Feb 3 2017 18:27:39\",\r\n          \"doc_count\": 2\r\n        }\r\n      ]\r\n    }\r\n  }\r\n}";
+
+    try {
+      root = (JSONObject) parser.parse(input);
+      JSONObject aggregations = (JSONObject) root.get("aggregations");
+      AggregationResult[] results = AggregationParsingUtil.parseAggregationResults(aggregations);
+      AggregationResults aggs = new AggregationResults();
+      ObjectMapper mapper = new ObjectMapper();
+      aggs.setAggregations(results);
+      System.out.println(mapper.setSerializationInclusion(Include.NON_NULL)
+          .writerWithDefaultPrettyPrinter().writeValueAsString(aggs));
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchHttpControllerTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchHttpControllerTest.java
new file mode 100644 (file)
index 0000000..2439f48
--- /dev/null
@@ -0,0 +1,235 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.config.ElasticSearchConfig;
+import org.openecomp.sa.searchdbabstraction.entity.OperationResult;
+
+import java.util.Properties;
+
+@Ignore("All tests in this classes require an Elasticsearch instance to run locally")
+public class ElasticSearchHttpControllerTest {
+
+  private static ElasticSearchHttpController elasticSearch;
+  private static AAIEntityTestObject testDocument;
+
+  private static final String indexMappings = "{\r\n    \"properties\": {\r\n        \"entityType\": {\r\n            \"type\": \"string\"\r\n        },\r\n        \"edgeTagQueryEntityFieldName\": {\r\n            \"type\": \"string\",\r\n            \"index\": \"no\"\r\n        },\r\n        \"edgeTagQueryEntityFieldValue\": {\r\n            \"type\": \"string\",\r\n            \"index\": \"no\"\r\n        },\r\n        \"searchTagIDs\" : {\r\n            \"type\" : \"string\"\r\n          },\r\n        \"searchTags\": {\r\n            \"type\": \"string\",\r\n            \"analyzer\": \"nGram_analyzer\",\r\n            \"search_analyzer\": \"whitespace_analyzer\"\r\n        }\r\n    }\r\n}";
+  private static final String indexSettings = "{\r\n    \"analysis\": {\r\n        \"filter\": {\r\n            \"nGram_filter\": {\r\n                \"type\": \"nGram\",\r\n                \"min_gram\": 1,\r\n                \"max_gram\": 50,\r\n                \"token_chars\": [\r\n                    \"letter\",\r\n                    \"digit\",\r\n                    \"punctuation\",\r\n                    \"symbol\"\r\n                ]\r\n            }\r\n        },\r\n        \"analyzer\": {\r\n            \"nGram_analyzer\": {\r\n                \"type\": \"custom\",\r\n                \"tokenizer\": \"whitespace\",\r\n                \"filter\": [\r\n                    \"lowercase\",\r\n                    \"asciifolding\",\r\n                    \"nGram_filter\"\r\n                ]\r\n            },\r\n            \"whitespace_analyzer\": {\r\n                \"type\": \"custom\",\r\n                \"tokenizer\": \"whitespace\",\r\n                \"filter\": [\r\n                    \"lowercase\",\r\n                    \"asciifolding\"\r\n                ]\r\n            }\r\n        }\r\n    }\r\n}";
+
+  @Before
+  public void setUp() throws Exception {
+    Properties properties = new Properties();
+    properties.put(ElasticSearchConfig.ES_IP_ADDRESS, "127.0.0.1");
+    properties.put(ElasticSearchConfig.ES_HTTP_PORT, "9200");
+    ElasticSearchConfig config = new ElasticSearchConfig(properties);
+    elasticSearch = new ElasticSearchHttpController(config);
+
+    testDocument = new AAIEntityTestObject();
+    testDocument.setId("test123");
+    testDocument.setEntityType("service-instance");
+    testDocument.setEdgeTagQueryEntityFieldName("service-instance.service-instance-id");
+    testDocument.setEdgeTagQueryEntityFieldValue("123456");
+    testDocument.setSearchTagIDs("0");
+    testDocument.setSearchTags("service-instance-id");
+
+  }
+
+  @Test
+  public void testCreateTable() throws Exception {
+    OperationResult result = elasticSearch.createTable("test", "aai-entities", indexSettings, indexMappings);
+    System.out.println(result);
+  }
+
+  @Test
+  public void testCreateDocument() throws Exception {
+    OperationResult result = elasticSearch.createDocument("test", testDocument);
+    System.out.println(result);
+
+    DocumentStoreDataEntityImpl ds = new DocumentStoreDataEntityImpl();
+    ds.setId(testDocument.getId());
+
+    result = elasticSearch.getDocument("test", ds);
+    System.out.println(result);
+  }
+
+  @Test
+  public void testUpdateDocument() throws Exception {
+    testDocument.setEdgeTagQueryEntityFieldValue("567890");
+
+    OperationResult result = elasticSearch.updateDocument("test", testDocument);
+    System.out.println(result);
+
+    result = elasticSearch.getDocument("test", testDocument);
+    System.out.println(result);
+  }
+
+  @Test
+  public void testDeleteDocument() throws Exception {
+    OperationResult result = elasticSearch.deleteDocument("test", testDocument);
+    System.out.println(result);
+
+    result = elasticSearch.getDocument("test", testDocument);
+    System.out.println(result);
+  }
+
+  @Test
+  public void testBulkCreateDocuments() throws Exception {
+    for (int i = 0; i < 10; i++) {
+      AAIEntityTestObject doc = new AAIEntityTestObject();
+      doc.setId("test-" + i);
+      doc.setEntityType("service-instance");
+      doc.setEdgeTagQueryEntityFieldName("service-instance.service-instance-id");
+      doc.setEdgeTagQueryEntityFieldValue("123456" + i);
+      doc.setSearchTagIDs("" + i);
+      doc.setSearchTags("service-instance-id");
+
+      OperationResult result = elasticSearch.createDocument("test", doc);
+      System.out.println(result);
+    }
+  }
+
+  @Test
+  public void serchByEntityType() throws Exception {
+    OperationResult result = elasticSearch.search("test", "q=instance");
+    System.out.println(result);
+  }
+
+  @Test
+  public void serchByTagIDs() throws Exception {
+    OperationResult result = elasticSearch.search("test", "q=9");
+    System.out.println(result);
+  }
+
+  @Test
+  public void serchByTags() throws Exception {
+    OperationResult result = elasticSearch.search("test", "q=service");
+    System.out.println(result);
+  }
+
+  @Test
+  public void testCreateDocumentWithoutId() throws Exception {
+    AAIEntityTestObject doc = new AAIEntityTestObject();
+    doc.setEntityType("service-instance");
+    doc.setEdgeTagQueryEntityFieldName("service-instance.service-instance-id");
+    doc.setEdgeTagQueryEntityFieldValue("1111111");
+    doc.setSearchTagIDs("321");
+    doc.setSearchTags("service-instance-id");
+
+    OperationResult result = elasticSearch.createDocument("test", doc);
+    System.out.println(result);
+  }
+
+  @Test
+  public void testDeleteIndex() throws Exception {
+    OperationResult result = elasticSearch.deleteIndex("test");
+    System.out.println(result);
+  }
+
+  class AAIEntityTestObject implements DocumentStoreDataEntity {
+    private String id;
+    private String entityType;
+    private String edgeTagQueryEntityFieldName;
+    private String edgeTagQueryEntityFieldValue;
+    private String searchTagIDs;
+    private String searchTags;
+
+    public void setId(String id) {
+      this.id = id;
+    }
+
+    @Override
+    public String getId() {
+      return this.id;
+    }
+
+    public String getEntityType() {
+      return entityType;
+    }
+
+    public void setEntityType(String entityType) {
+      this.entityType = entityType;
+    }
+
+    public String getEdgeTagQueryEntityFieldName() {
+      return edgeTagQueryEntityFieldName;
+    }
+
+    public void setEdgeTagQueryEntityFieldName(String edgeTagQueryEntityFieldName) {
+      this.edgeTagQueryEntityFieldName = edgeTagQueryEntityFieldName;
+    }
+
+    public String getEdgeTagQueryEntityFieldValue() {
+      return edgeTagQueryEntityFieldValue;
+    }
+
+    public void setEdgeTagQueryEntityFieldValue(String edgeTagQueryEntityFieldValue) {
+      this.edgeTagQueryEntityFieldValue = edgeTagQueryEntityFieldValue;
+    }
+
+    public String getSearchTagIDs() {
+      return searchTagIDs;
+    }
+
+    public void setSearchTagIDs(String searchTagIDs) {
+      this.searchTagIDs = searchTagIDs;
+    }
+
+    public String getSearchTags() {
+      return searchTags;
+    }
+
+    public void setSearchTags(String searchTags) {
+      this.searchTags = searchTags;
+    }
+
+    @Override
+    public String getVersion() {
+      return "1";
+    }
+
+    @Override
+    public String getContentInJson() {
+      try {
+        return new JSONObject()
+            .put("entityType", entityType)
+            .put("edgeTagQueryEntityFieldName", edgeTagQueryEntityFieldName)
+            .put("edgeTagQueryEntityFieldValue", edgeTagQueryEntityFieldValue)
+            .put("searchTagIDs", searchTagIDs)
+            .put("searchTags", searchTags).toString();
+      } catch (JSONException e) {
+        // TODO Auto-generated catch block
+        e.printStackTrace();
+        return null;
+      }
+    }
+
+  }
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationStatementTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationStatementTest.java
new file mode 100644 (file)
index 0000000..e73b882
--- /dev/null
@@ -0,0 +1,147 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class AggregationStatementTest {
+
+  private static ObjectMapper mapper = new ObjectMapper();
+
+  @Test
+  public void testGroupBy() {
+    String input = "{\r\n    \"group-by\": {\r\n      \"field\": \"entityType\"\r\n    }\r\n  }";
+
+    String expected = "{\"terms\": {\"field\": \"entityType\"}}";
+
+    AggregationStatement actual;
+    try {
+      actual = mapper.readValue(input, AggregationStatement.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+
+  }
+
+  @Test
+  public void testDateRange() {
+    String input = "{\r\n  \"date-range\": {\r\n    \"field\": \"mydate\",\r\n    \"ranges\": [\r\n      {\r\n        \"from\": \"2016-12-19T00:00:00.738-05:00\",\r\n        \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n      }\r\n    ],\r\n    \"format\": \"MM-yyy\",\r\n    \"size\": \"5\"\r\n  }\r\n}";
+
+    String expected = "{\"date_range\": {\"field\": \"mydate\", \"format\": \"MM-yyy\", \"ranges\": [{\"from\": \"2016-12-19T00:00:00.738-05:00\", \"to\": \"2016-12-23T23:59:59.738-05:00\"}], \"size\": 5}}";
+
+    AggregationStatement actual;
+    try {
+      actual = mapper.readValue(input, AggregationStatement.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+
+  }
+
+  @Test
+  public void testDateHistogram() {
+    String input = "{\r\n  \"date-histogram\": {\r\n    \"field\": \"mydate\",\r\n    \"interval\": \"day\"\r\n  }\r\n}";
+
+    String expected = "{\"date_histogram\": {\"field\": \"mydate\", \"interval\": \"day\"}}";
+
+    AggregationStatement actual;
+    try {
+      actual = mapper.readValue(input, AggregationStatement.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+
+  }
+
+  @Test
+  public void testSubAggregation1() {
+    String input = "{\r\n  \"group-by\": {\r\n    \"field\": \"severity\"\r\n  },\r\n  \"sub-aggregations\": [\r\n    {\r\n      \"name\": \"byType\",\r\n      \"aggregation\": {\r\n        \"group-by\": {\r\n          \"field\": \"entityType\"\r\n        }\r\n      }\r\n    }\r\n  ]\r\n}";
+    String expected = "{\"terms\": {\"field\": \"severity\"}, \"aggs\": {\"byType\": {\"terms\": {\"field\": \"entityType\"}}}}";
+
+    AggregationStatement actual;
+    try {
+      actual = mapper.readValue(input, AggregationStatement.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+
+  }
+
+  @Test
+  public void testSubAggregation2() {
+    String input = "{\r\n  \"group-by\": {\r\n    \"field\": \"severity\"\r\n  },\r\n  \"sub-aggregations\": [\r\n    {\r\n      \"name\": \"byType\",\r\n      \"aggregation\": {\r\n        \"group-by\": {\r\n          \"field\": \"violationType\"\r\n        }\r\n      }\r\n    },\r\n    {\r\n      \"name\": \"byRule\",\r\n      \"aggregation\": {\r\n        \"group-by\": {\r\n          \"field\": \"validationRule\"\r\n        }\r\n      }\r\n    }\r\n  ]\r\n}";
+    String expected = "{\"terms\": {\"field\": \"severity\"}, \"aggs\": {\"byType\": {\"terms\": {\"field\": \"violationType\"}},\"byRule\": {\"terms\": {\"field\": \"validationRule\"}}}}";
+
+    AggregationStatement actual;
+    try {
+      actual = mapper.readValue(input, AggregationStatement.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+
+  }
+
+
+  @Test
+  public void testNestedAggregation1() {
+    String input = "{\r\n  \"nested\": [{\r\n    \"name\": \"by_severity\",\r\n    \"aggregation\": {\r\n      \"group-by\": {\r\n        \"field\": \"violations.severity\"\r\n      }\r\n    }\r\n  }]\r\n}";
+    String expected = "{\"nested\": {\"path\": \"violations\"}, \"aggs\": {\"by_severity\": {\"terms\": {\"field\": \"violations.severity\"}}}}";
+
+    AggregationStatement actual;
+    try {
+      actual = mapper.readValue(input, AggregationStatement.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+
+  }
+
+  @Test
+  public void testNestedAggregation2() {
+    String input = "{\r\n  \"nested\": [\r\n    {\r\n      \"name\": \"by_severity\",\r\n      \"aggregation\": {\r\n        \"group-by\": {\r\n          \"field\": \"violations.severity\"\r\n        }\r\n      }\r\n    },\r\n    {\r\n      \"name\": \"by_type\",\r\n      \"aggregation\": {\r\n        \"group-by\": {\r\n          \"field\": \"violations.violationType\"\r\n        }\r\n      }\r\n    }\r\n  ]\r\n}";
+    String expected = "{\"nested\": {\"path\": \"violations\"}, \"aggs\": {\"by_severity\": {\"terms\": {\"field\": \"violations.severity\"}},\"by_type\": {\"terms\": {\"field\": \"violations.violationType\"}}}}";
+
+    AggregationStatement actual;
+    try {
+      actual = mapper.readValue(input, AggregationStatement.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+
+  }
+
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationTest.java
new file mode 100644 (file)
index 0000000..8b77c68
--- /dev/null
@@ -0,0 +1,52 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class AggregationTest {
+  private static ObjectMapper mapper = new ObjectMapper();
+
+  @Test
+  public void test() {
+    String input = "{\r\n  \"name\": \"byDate\",\r\n  \"aggregation\": {\r\n    \"date-range\": {\r\n      \"field\": \"mydate\",\r\n      \"ranges\": [\r\n        {\r\n          \"from\": \"2016-12-19T00:00:00.738-05:00\",\r\n          \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n        }\r\n      ]\r\n    },\r\n    \"sub-aggregations\": [{\r\n        \"name\": \"byTerm\",\r\n        \"aggregation\": {\r\n          \"group-by\": {\r\n            \"field\": \"myterm\"\r\n          }\r\n        }\r\n      }]\r\n  }\r\n}";
+
+    String expected = "\"byDate\": {\"date_range\": {\"field\": \"mydate\", \"ranges\": [{\"from\": \"2016-12-19T00:00:00.738-05:00\", \"to\": \"2016-12-23T23:59:59.738-05:00\"}]}, \"aggs\": {\"byTerm\": {\"terms\": {\"field\": \"myterm\"}}}}";
+
+    Aggregation actual;
+    try {
+      actual = mapper.readValue(input, Aggregation.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+
+  }
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateHistogramAggregationTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateHistogramAggregationTest.java
new file mode 100644 (file)
index 0000000..cb93644
--- /dev/null
@@ -0,0 +1,87 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class DateHistogramAggregationTest {
+  ObjectMapper mapper = new ObjectMapper();
+
+  @Test
+  public void testFullSet() {
+    String input =
+        "{\r\n  \"field\": \"mydate\",\r\n  \"interval\": \"day\",\r\n  \"time-zone\": \"-01:00\"\r\n}";
+
+    String expected =
+        "\"date_histogram\": {\"field\": \"mydate\", \"interval\": \"day\", \"time_zone\": \"-01:00\"}";
+
+    DateHistogramAggregation actual;
+    try {
+      actual = mapper.readValue(input, DateHistogramAggregation.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+  }
+
+  @Test
+  public void test2() {
+    String input =
+        "{\r\n  \"field\": \"mydate\",\r\n  \"interval\": \"day\"\r\n}";
+
+    String expected =
+        "\"date_histogram\": {\"field\": \"mydate\", \"interval\": \"day\"}";
+
+    DateHistogramAggregation actual;
+    try {
+      actual = mapper.readValue(input, DateHistogramAggregation.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+  }
+
+  @Test
+  public void test3() {
+    String input =
+        "{\r\n  \"field\": \"mydate\"\r\n}";
+
+    String expected =
+        "\"date_histogram\": {\"field\": \"mydate\"}";
+
+    DateHistogramAggregation actual;
+    try {
+      actual = mapper.readValue(input, DateHistogramAggregation.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+  }
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeAggregationTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeAggregationTest.java
new file mode 100644 (file)
index 0000000..cb08c47
--- /dev/null
@@ -0,0 +1,71 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class DateRangeAggregationTest {
+
+  private static ObjectMapper mapper = new ObjectMapper();
+
+
+  @Test
+  public void test() {
+
+    String input =
+        "{\r\n    \"field\": \"mydate\",\r\n    \"ranges\": [\r\n      {\r\n        \"from\": \"2016-12-19T00:00:00.738-05:00\",\r\n        \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n      },\r\n      {\r\n        \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n      },\r\n      {\r\n        \"from\": \"2016-12-19T00:00:00.738-05:00\"\r\n      }\r\n    ],\r\n    \"format\": \"MM-yyy\",\r\n    \"size\": \"5\"\r\n}";
+    String expected = "\"date_range\": {\"field\": \"mydate\", \"format\": \"MM-yyy\", \"ranges\": [{\"from\": \"2016-12-19T00:00:00.738-05:00\", \"to\": \"2016-12-23T23:59:59.738-05:00\"},{\"to\": \"2016-12-23T23:59:59.738-05:00\"},{\"from\": \"2016-12-19T00:00:00.738-05:00\"}], \"size\": 5}";
+
+    DateRangeAggregation actual;
+    try {
+      actual = mapper.readValue(input, DateRangeAggregation.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+  }
+
+  @Test
+  public void testNoFormatNoSize() {
+
+    String input =
+        "{\r\n    \"field\": \"mydate\",\r\n    \"ranges\": [\r\n      {\r\n        \"from\": \"2016-12-19T00:00:00.738-05:00\",\r\n        \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n      },\r\n      {\r\n        \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n      },\r\n      {\r\n        \"from\": \"2016-12-19T00:00:00.738-05:00\"\r\n      }\r\n    ]\r\n}";
+    String expected = "\"date_range\": {\"field\": \"mydate\", \"ranges\": [{\"from\": \"2016-12-19T00:00:00.738-05:00\", \"to\": \"2016-12-23T23:59:59.738-05:00\"},{\"to\": \"2016-12-23T23:59:59.738-05:00\"},{\"from\": \"2016-12-19T00:00:00.738-05:00\"}]}";
+
+    DateRangeAggregation actual;
+    try {
+      actual = mapper.readValue(input, DateRangeAggregation.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+  }
+
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeTest.java
new file mode 100644 (file)
index 0000000..59d5bdd
--- /dev/null
@@ -0,0 +1,78 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class DateRangeTest {
+  private static ObjectMapper mapper = new ObjectMapper();
+
+  @Test
+  public void testBoth() {
+    String input = "{\r\n  \"from\": \"2016-12-19T00:00:00.738-05:00\",\r\n  \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n}";
+    String expected = "{\"from\": \"2016-12-19T00:00:00.738-05:00\", \"to\": \"2016-12-23T23:59:59.738-05:00\"}";
+
+    DateRange actual;
+    try {
+      actual = mapper.readValue(input, DateRange.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+  }
+
+  @Test
+  public void testFrom() {
+    String input = "{\"from\": \"2016-12-19T00:00:00.738-05:00\"}";
+    String expected = "{\"from\": \"2016-12-19T00:00:00.738-05:00\"}";
+
+    DateRange actual;
+    try {
+      actual = mapper.readValue(input, DateRange.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+  }
+
+  @Test
+  public void testTo() {
+    String input = "{\r\n  \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n}";
+    String expected = "{\"to\": \"2016-12-23T23:59:59.738-05:00\"}";
+
+    DateRange actual;
+    try {
+      actual = mapper.readValue(input, DateRange.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+  }
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/FilterTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/FilterTest.java
new file mode 100644 (file)
index 0000000..6c7e5d2
--- /dev/null
@@ -0,0 +1,53 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import java.io.IOException;
+
+public class FilterTest {
+
+  @Test
+  public void test() throws JsonParseException, JsonMappingException, IOException {
+
+
+    String json = "{ \"any\": [ "
+        + "{\"match\": {\"field\": \"searchTags\", \"value\": \"a\"}},"
+        + "{\"match\": {\"field\": \"searchTags\", \"value\": \"b\"}}"
+        + "],"
+        + "\"all\": ["
+        + "{\"parsed-query\": {\"field\": \"fieldname\", \"query-string\": \"string\"}}"
+        + "]"
+        + "}";
+
+    ObjectMapper mapper = new ObjectMapper();
+    Filter filter = mapper.readValue(json, Filter.class);
+    System.out.println("GDF: filter = " + filter);
+  }
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/GroupByAggregationTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/GroupByAggregationTest.java
new file mode 100644 (file)
index 0000000..a81de6e
--- /dev/null
@@ -0,0 +1,66 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class GroupByAggregationTest {
+  private static ObjectMapper mapper = new ObjectMapper();
+
+  @Test
+  public void test() {
+    String input = "{\"field\" : \"entityType\", \"size\": 20}\r\n";
+
+    String expected = "\"terms\": {\"field\": \"entityType\", \"size\": 20}";
+
+    GroupByAggregation actual;
+    try {
+      actual = mapper.readValue(input, GroupByAggregation.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+  }
+
+  @Test
+  public void testNoSize() {
+    String input = "{\"field\" : \"entityType\"}\r\n";
+
+    String expected = "\"terms\": {\"field\": \"entityType\"}";
+
+    GroupByAggregation actual;
+    try {
+      actual = mapper.readValue(input, GroupByAggregation.class);
+      assertEquals(expected, actual.toElasticSearch());
+    } catch (Exception e) {
+      fail("Exception occurred: " + e.getMessage());
+    }
+  }
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/QueryTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/QueryTest.java
new file mode 100644 (file)
index 0000000..e754ce2
--- /dev/null
@@ -0,0 +1,356 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+
+public class QueryTest {
+
+  /**
+   * This test validates that we are able to marshal json structures
+   * representing term queries into POJOs and that we can then
+   * unmarshal those POJOs into ElasticSearch syntax.
+   *
+   * @throws JsonParseException
+   * @throws JsonMappingException
+   * @throws IOException
+   */
+  @Test
+  public void termQueryTest() throws JsonParseException, JsonMappingException, IOException {
+
+    Integer intValue = 1;
+    String field = "searchTags";
+    String termQueryWithIntegerValueJson = "{\"field\": \"" + field + "\", \"value\": " + intValue + "}";
+    String termQueryWithIntegerValueExpectedES = "{\"term\": {\"" + field + "\" : " + intValue + "}}";
+
+    Double doubleValue = 5.7;
+    String termQueryWithDoubleValueJson = "{\"field\": \"" + field + "\", \"value\": " + doubleValue + "}";
+    String termQueryWithDoubleValueExpectedES = "{\"term\": {\"" + field + "\" : " + doubleValue + "}}";
+
+    String stringValue = "theValue";
+    String termQueryWithStringValueJson = "{\"field\": \"" + field + "\", \"value\": \"" + stringValue + "\"}";
+    String termQueryWithStringValueExpectedES = "{\"term\": {\"" + field + "\" : \"" + stringValue + "\"}}";
+
+    ObjectMapper mapper = new ObjectMapper();
+
+
+    // Validate that we can marshal a term query where the supplied value
+    // is an Integer.
+    TermQuery integerTermQuery = mapper.readValue(termQueryWithIntegerValueJson, TermQuery.class);
+    assertTrue("Expected value to be of type Integer, but was type " + integerTermQuery.getValue().getClass().getName(),
+        integerTermQuery.getValue() instanceof Integer);
+    assertEquals(intValue, integerTermQuery.getValue());
+
+    assertTrue("ElasticSearch term query translation does not match the expected result",
+        termQueryWithIntegerValueExpectedES.equals(integerTermQuery.toElasticSearch()));
+
+    // Validate that we can marshal a term query where the supplied value
+    // is a Double.
+    TermQuery doubleTermQuery = mapper.readValue(termQueryWithDoubleValueJson, TermQuery.class);
+    assertTrue("Expected value to be of type Double, but was type " + doubleTermQuery.getValue().getClass().getName(),
+        doubleTermQuery.getValue() instanceof Double);
+    assertEquals(doubleValue, doubleTermQuery.getValue());
+    assertTrue("ElasticSearch term query translation does not match the expected result",
+        termQueryWithDoubleValueExpectedES.equals(doubleTermQuery.toElasticSearch()));
+
+    // Validate that we can marshal a term query where the supplied value
+    // is a String literal.
+    TermQuery stringTermQuery = mapper.readValue(termQueryWithStringValueJson, TermQuery.class);
+    assertTrue("Expected value to be of type String, but was type " + stringTermQuery.getValue().getClass().getName(),
+        stringTermQuery.getValue() instanceof String);
+    assertEquals(stringValue, stringTermQuery.getValue());
+    assertTrue("ElasticSearch term query translation does not match the expected result",
+        termQueryWithStringValueExpectedES.equals(stringTermQuery.toElasticSearch()));
+
+
+  }
+
+
+  /**
+   * This test validates that we are able to marshal json structures
+   * representing parsed queries into POJOs and that we can then
+   * unmarshal those POJOs into ElasticSearch syntax.
+   *
+   * @throws JsonParseException
+   * @throws JsonMappingException
+   * @throws IOException
+   */
+  @Test
+  public void parsedQueryTest() throws JsonParseException, JsonMappingException, IOException {
+
+    String field = "fieldname";
+    String queryString = "The query string";
+
+    String queryJson = "{\"field\": \"" + field + "\", \"query-string\": \"" + queryString + "\"}";
+    String queryExpectedES = "{\"query_string\": {\"default_field\": \"" + field + "\", \"query\": \"" + queryString + "\"}}";
+
+    ObjectMapper mapper = new ObjectMapper();
+    ParsedQuery pq = mapper.readValue(queryJson, ParsedQuery.class);
+
+    assertTrue("Unexpected marshalled value for 'field' - expected: " + field + " actual: " + pq.getField(),
+        field.equals(pq.getField()));
+    assertTrue("Unexpected marshalled value for 'query-string' - expected: " + queryString + " actual: " + pq.getQueryString(),
+        queryString.equals(pq.getQueryString()));
+    assertTrue("Unexpected ElasticSearch syntax.  Expected: " + queryExpectedES + " Actual: " + pq.toElasticSearch(),
+        queryExpectedES.equals(pq.toElasticSearch()));
+  }
+
+
+  /**
+   * This test validates that a ranged query cannot be parsed with values
+   * for both the 'gte' and 'gt' fields or the 'lte' and 'lt' fields, and
+   * that we do not allow mixing of numeric and date types in the same
+   * query.
+   *
+   * @throws JsonParseException
+   * @throws IOException
+   */
+  @Test
+  public void rangeQueryConflictingBoundsTest() throws JsonParseException, IOException {
+
+    String invalidGTAndGTE = "{ \"field\": \"timestamp\", \"gte\": \"2016-10-06T00:00:00.558+03:00\", \"gt\": \"2016-10-06T23:59:59.558+03:00\"}";
+    String invalidLTAndLTE = "{ \"field\": \"timestamp\", \"lte\": \"2016-10-06T00:00:00.558+03:00\", \"lt\": \"2016-10-06T23:59:59.558+03:00\"}";
+    String invalidTypes = "{ \"field\": \"timestamp\", \"lte\": 5, \"gte\": \"2016-10-06T23:59:59.558+03:00\"}";
+
+    ObjectMapper mapper = new ObjectMapper();
+
+    // Attempt to parse a query where we are setting values for both the
+    // 'greater than' and 'greater than and equal to' operators.
+    boolean gotExpectedException = false;
+    try {
+      RangeQuery badRangeQuery = mapper.readValue(invalidGTAndGTE, RangeQuery.class);
+    } catch (JsonMappingException e) {
+      gotExpectedException = true;
+    }
+    assertTrue("Attempting to set both a 'gt' and 'gte' value on the same query should not have been allowed",
+        gotExpectedException);
+
+    // Attempt to parse a query where we are setting values for both the
+    // 'less than' and 'less than and equal to' operators.
+    gotExpectedException = false;
+    try {
+      RangeQuery badRangeQuery = mapper.readValue(invalidLTAndLTE, RangeQuery.class);
+    } catch (JsonMappingException e) {
+      gotExpectedException = true;
+    }
+    assertTrue("Attempting to set both a 'lt' and 'lte' value on the same query should not have been allowed",
+        gotExpectedException);
+
+    // Attempt to parse a query where we are mixing numeric and date values
+    // in the same query.
+    gotExpectedException = false;
+    try {
+      RangeQuery badRangeQuery = mapper.readValue(invalidTypes, RangeQuery.class);
+    } catch (JsonMappingException e) {
+      gotExpectedException = true;
+    }
+    assertTrue("Attempting to mix numeric and date values in the same query should not have been allowed",
+        gotExpectedException);
+
+
+  }
+
+
+  /**
+   * This test validates that date range queries can be marshalled to a Java
+   * POJO and unmarshalled to ElasticSearch syntax.
+   *
+   * @throws JsonParseException
+   * @throws JsonMappingException
+   * @throws IOException
+   */
+  @Test
+  public void dateRangeQueryTest() throws JsonParseException, JsonMappingException, IOException {
+
+    String field = "timestamp";
+    String greaterThanDate = "2016-10-06T00:00:00.558+03:00";
+    String lessThanDate = "2016-10-06T23:59:59.558+03:00";
+
+    ObjectMapper mapper = new ObjectMapper();
+
+    // Generate a date range query using 'greater than or equal' and 'less
+    // than or equal' operations.
+    String dateRangeJson =
+        "{ \"field\": \"" + field + "\", \"gte\": \"" + greaterThanDate + "\", \"lte\": \"" + lessThanDate + "\"}";
+    String dateRangeExpectedES =
+        "{\"range\": {\"timestamp\": {\"gte\": \"2016-10-06T00:00:00.558+03:00\", \"lte\": \"2016-10-06T23:59:59.558+03:00\"}}}";
+
+    // Validate that the query is marshalled correctly to the POJO and that
+    // the generated ElasticSearch syntax looks as expected.
+    RangeQuery dateRangeQuery = mapper.readValue(dateRangeJson, RangeQuery.class);
+
+    assertTrue("Unexpected marshalled value for 'field'.  Expected: " + field + " Actual: " + dateRangeQuery.getField(),
+        field.equals(dateRangeQuery.getField()));
+    assertTrue("Unexpected type for 'gte' value.  Expected: String  Actual: " + dateRangeQuery.getGte().getClass().getName(),
+        dateRangeQuery.getGte() instanceof String);
+    assertTrue("Unexpected type for 'lte' value.  Expected: String  Actual: " + dateRangeQuery.getLte().getClass().getName(),
+        dateRangeQuery.getLte() instanceof String);
+    assertTrue("Unexpected marshalled value for 'gte'.  Expected: " + greaterThanDate + " Actual: " + dateRangeQuery.getGte(),
+        greaterThanDate.equals(dateRangeQuery.getGte()));
+    assertTrue("Unexpected marshalled value for 'lte'.  Expected: " + lessThanDate + " Actual: " + dateRangeQuery.getLte(),
+        lessThanDate.equals(dateRangeQuery.getLte()));
+    assertTrue("Unexpected ElasticSearch syntax.  Expected: " + dateRangeExpectedES + " Actual: " + dateRangeQuery.toElasticSearch(),
+        dateRangeExpectedES.equals(dateRangeQuery.toElasticSearch()));
+
+
+    // Generate a date range query using 'greater than' and 'less than or
+    // equal' operations.
+    dateRangeJson =
+        "{ \"field\": \"" + field + "\", \"gt\": \"" + greaterThanDate + "\", \"lte\": \"" + lessThanDate + "\"}";
+    dateRangeExpectedES =
+        "{\"range\": {\"timestamp\": {\"gt\": \"2016-10-06T00:00:00.558+03:00\", \"lte\": \"2016-10-06T23:59:59.558+03:00\"}}}";
+
+    // Validate that the query is marshalled correctly to the POJO and that
+    // the generated ElasticSearch syntax looks as expected.
+    dateRangeQuery = mapper.readValue(dateRangeJson, RangeQuery.class);
+
+    assertTrue("Unexpected marshalled value for 'field'.  Expected: " + field + " Actual: " + dateRangeQuery.getField(),
+        field.equals(dateRangeQuery.getField()));
+
+    assertTrue("Unexpected type for 'gt' value.  Expected: String  Actual: " + dateRangeQuery.getGt().getClass().getName(),
+        dateRangeQuery.getGt() instanceof String);
+
+    assertTrue("Unexpected type for 'lte' value.  Expected: String  Actual: " + dateRangeQuery.getLte().getClass().getName(),
+        dateRangeQuery.getLte() instanceof String);
+
+    assertTrue("Unexpected marshalled value for 'gt'.  Expected: " + greaterThanDate + " Actual: " + dateRangeQuery.getGt(),
+        greaterThanDate.equals(dateRangeQuery.getGt()));
+
+    assertTrue("Unexpected marshalled value for 'lte'.  Expected: " + lessThanDate + " Actual: " + dateRangeQuery.getLte(),
+        lessThanDate.equals(dateRangeQuery.getLte()));
+
+    assertTrue("Unexpected ElasticSearch syntax.  Expected: " + dateRangeExpectedES + " Actual: " + dateRangeQuery.toElasticSearch(),
+        dateRangeExpectedES.equals(dateRangeQuery.toElasticSearch()));
+
+
+    // Generate a date range query using only a 'greater than' operation.
+    dateRangeJson =
+        "{ \"field\": \"" + field + "\", \"gt\": \"" + greaterThanDate + "\"}";
+    dateRangeExpectedES =
+        "{\"range\": {\"timestamp\": {\"gt\": \"2016-10-06T00:00:00.558+03:00\"}}}";
+
+    // Validate that the query is marshalled correctly to the POJO and that
+    // the generated ElasticSearch syntax looks as expected.
+    dateRangeQuery = mapper.readValue(dateRangeJson, RangeQuery.class);
+
+    assertTrue("Unexpected marshalled value for 'field'.  Expected: " + field + " Actual: " + dateRangeQuery.getField(),
+        field.equals(dateRangeQuery.getField()));
+
+    assertTrue("Unexpected type for 'gt' value.  Expected: String  Actual: " + dateRangeQuery.getGt().getClass().getName(),
+        dateRangeQuery.getGt() instanceof String);
+
+    assertTrue("Unexpected marshalled value for 'gt'.  Expected: " + greaterThanDate + " Actual: " + dateRangeQuery.getGt(),
+        greaterThanDate.equals(dateRangeQuery.getGt()));
+
+    assertTrue("Unexpected ElasticSearch syntax.  Expected: " + dateRangeExpectedES + " Actual: " + dateRangeQuery.toElasticSearch(),
+        dateRangeExpectedES.equals(dateRangeQuery.toElasticSearch()));
+
+  }
+
+  /**
+   * This test validates that numeric range queries can be marshalled to a Java
+   * POJO and unmarshalled to ElasticSearch syntax.
+   *
+   * @throws JsonParseException
+   * @throws JsonMappingException
+   * @throws IOException
+   */
+  @Test
+  public void numericRangeQueryTest() throws JsonParseException, JsonMappingException, IOException {
+
+    String field = "version";
+    Integer greaterThanInt = 5;
+    Integer lessThanInt = 100;
+
+    ObjectMapper mapper = new ObjectMapper();
+
+    // Generate a numeric range query using 'greater than or equal' and 'less
+    // than or equal' operations.
+    String numericRangeJson =
+        "{ \"field\": \"" + field + "\", \"gte\": " + greaterThanInt + ", \"lte\": " + lessThanInt + "}";
+    String numericRangeExpectedES =
+        "{\"range\": {\"" + field + "\": {\"gte\": " + greaterThanInt + ", \"lte\": " + lessThanInt + "}}}";
+
+    // Validate that the query is marshalled correctly to the POJO and that
+    // the generated ElasticSearch syntax looks as expected.
+    RangeQuery numericRangeQuery = mapper.readValue(numericRangeJson, RangeQuery.class);
+
+    assertTrue("Unexpected marshalled value for 'field'.  Expected: " + field + " Actual: " + numericRangeQuery.getField(),
+        field.equals(numericRangeQuery.getField()));
+    assertTrue("Unexpected type for 'gte' value.  Expected: Integer  Actual: " + numericRangeQuery.getGte().getClass().getName(),
+        numericRangeQuery.getGte() instanceof Integer);
+    assertTrue("Unexpected type for 'lte' value.  Expected: Integer  Actual: " + numericRangeQuery.getLte().getClass().getName(),
+        numericRangeQuery.getLte() instanceof Integer);
+    assertEquals("Unexpected marshalled value for 'gte'.  Expected: " + greaterThanInt + " Actual: " + numericRangeQuery.getGte(),
+        greaterThanInt, numericRangeQuery.getGte());
+    assertEquals("Unexpected marshalled value for 'lte'.  Expected: " + lessThanInt + " Actual: " + numericRangeQuery.getLte(),
+        lessThanInt, numericRangeQuery.getLte());
+    assertTrue("Unexpected ElasticSearch syntax.  Expected: " + numericRangeExpectedES + " Actual: " + numericRangeQuery.toElasticSearch(),
+        numericRangeExpectedES.equals(numericRangeQuery.toElasticSearch()));
+
+
+    Double greaterThanDouble = 5.0;
+    Double lessThanDouble = 100.0;
+
+    // Generate a date range query using 'greater than' and 'less than or
+    // equal' operations.
+    numericRangeJson =
+        "{ \"field\": \"" + field + "\", \"gt\": " + greaterThanDouble + ", \"lte\": " + lessThanDouble + "}";
+    numericRangeExpectedES =
+        "{\"range\": {\"" + field + "\": {\"gt\": " + greaterThanDouble + ", \"lte\": " + lessThanDouble + "}}}";
+
+    // Validate that the query is marshalled correctly to the POJO and that
+    // the generated ElasticSearch syntax looks as expected.
+    numericRangeQuery = mapper.readValue(numericRangeJson, RangeQuery.class);
+
+    assertTrue("Unexpected marshalled value for 'field'.  Expected: " + field + " Actual: " + numericRangeQuery.getField(),
+        field.equals(numericRangeQuery.getField()));
+
+    assertTrue("Unexpected type for 'gt' value.  Expected: Double  Actual: " + numericRangeQuery.getGt().getClass().getName(),
+        numericRangeQuery.getGt() instanceof Double);
+
+    assertTrue("Unexpected type for 'lte' value.  Expected: Double  Actual: " + numericRangeQuery.getLte().getClass().getName(),
+        numericRangeQuery.getLte() instanceof Double);
+
+    assertEquals("Unexpected marshalled value for 'gt'.  Expected: " + greaterThanDouble + " Actual: " + numericRangeQuery.getGt(),
+        greaterThanDouble, numericRangeQuery.getGt());
+
+    assertEquals("Unexpected marshalled value for 'lte'.  Expected: " + lessThanDouble + " Actual: " + numericRangeQuery.getLte(),
+        lessThanDouble, numericRangeQuery.getLte());
+
+    assertTrue("Unexpected ElasticSearch syntax.  Expected: " + numericRangeExpectedES + " Actual: " + numericRangeQuery.toElasticSearch(),
+        numericRangeExpectedES.equals(numericRangeQuery.toElasticSearch()));
+  }
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/SearchStatementTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/SearchStatementTest.java
new file mode 100644 (file)
index 0000000..b4c2fb8
--- /dev/null
@@ -0,0 +1,270 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+import org.openecomp.sa.rest.TestUtils;
+
+import java.io.File;
+import java.io.IOException;
+
+import static org.junit.Assert.*;
+
+public class SearchStatementTest {
+
+  @Test
+  public void simpleQueryTest() throws JsonParseException, JsonMappingException, IOException {
+
+    String field = "searchTags";
+    String queryString = "aai3255";
+    String queryJson =
+        "{"
+            + "\"queries\": ["
+            + "{\"may\": {\"parsed-query\": {"
+            + "\"field\": \"" + field + "\","
+            + "\"query-string\": \"" + queryString + "\"}}}"
+            + "]"
+            + "}"
+            + "}";
+
+    String queryES =
+        "{"
+            + "\"version\": true,"
+            + "\"query\": {"
+            + "\"bool\": {"
+            + "\"must\": [], "
+            + "\"should\": ["
+            + "{\"query_string\": {\"default_field\": \"searchTags\", \"query\": \"aai3255\"}}"
+            + "],"
+            + "\"must_not\": []}"
+            + "}"
+            + "}";
+
+    // Marshal our simple query JSON to a SearchStatement object.
+    ObjectMapper mapper = new ObjectMapper();
+    SearchStatement ss = mapper.readValue(queryJson, SearchStatement.class);
+
+    // We expect to have a search statement with one query.
+    assertEquals("Unexpected number of queries in marshalled result",
+        1, ss.getQueries().length);
+
+    // Validate that the query is of the expected type and contains the
+    // expected values.
+    QueryStatement query = ss.getQueries()[0].getQueryStatement();
+    assertNotNull("Expected marshalled statement to contain a 'parsed query'",
+        query.getParsedQuery());
+    assertTrue("Unexpected field name in marshalled query.  Expected: " + field + " Actual: " + query.getParsedQuery().getField(),
+        field.equals(query.getParsedQuery().getField()));
+    assertTrue("Unexpected query string in marshalled query.  Expected: " + queryString + " Actual: " + query.getParsedQuery().getQueryString(),
+        queryString.equals(query.getParsedQuery().getQueryString()));
+
+    // Validate that we are able to produce the expected ElasticSearch
+    // query syntax from the search statement.
+    assertTrue("Unexpected ElasticSearch syntax.  Expected: " + queryES + " Actual: " + ss.toElasticSearch(),
+        queryES.equals(ss.toElasticSearch()));
+  }
+
+
+  @Test
+  public void simpleSortedQueryTest() throws JsonParseException, JsonMappingException, IOException {
+
+    String field = "searchTags";
+    String queryString = "aai3255";
+    String queryJson =
+        "{"
+            + "\"queries\": ["
+            + "{\"may\": {\"parsed-query\": {"
+            + "\"field\": \"" + field + "\","
+            + "\"query-string\": \"" + queryString + "\"}}}"
+            + "],"
+            + "\"sort\": { \"field\": \"date\", \"order\": \"ascending\" }"
+            + "}";
+
+
+    String queryES =
+        "{"
+            + "\"version\": true,"
+            + "\"query\": {"
+            + "\"bool\": {"
+            + "\"must\": [], "
+            + "\"should\": ["
+            + "{\"query_string\": {\"default_field\": \"searchTags\", \"query\": \"aai3255\"}}"
+            + "],"
+            + "\"must_not\": []"
+            + "}"
+            + "}, "
+            + "\"sort\": { \"date\": { \"order\": \"asc\"}}"
+            + "}";
+
+    // Marshal our simple query JSON to a SearchStatement object.
+    ObjectMapper mapper = new ObjectMapper();
+    SearchStatement ss = mapper.readValue(queryJson, SearchStatement.class);
+
+    // We expect to have a search statement with one query.
+    assertEquals("Unexpected number of queries in marshalled result",
+        1, ss.getQueries().length);
+
+    // Validate that the query is of the expected type and contains the
+    // expected values.
+    QueryStatement query = ss.getQueries()[0].getQueryStatement();
+    assertNotNull("Expected marshalled statement to contain a 'parsed query'",
+        query.getParsedQuery());
+    assertTrue("Unexpected field name in marshalled query.  Expected: " + field + " Actual: " + query.getParsedQuery().getField(),
+        field.equals(query.getParsedQuery().getField()));
+    assertTrue("Unexpected query string in marshalled query.  Expected: " + queryString + " Actual: " + query.getParsedQuery().getQueryString(),
+        queryString.equals(query.getParsedQuery().getQueryString()));
+    System.out.println("GDF: ES = " + ss.toElasticSearch());
+    // Validate that we are able to produce the expected ElasticSearch
+    // query syntax from the search statement.
+    assertTrue("Unexpected ElasticSearch syntax.  Expected: " + queryES + " Actual: " + ss.toElasticSearch(),
+        queryES.equals(ss.toElasticSearch()));
+    assertNull(ss.getAggregations());
+  }
+
+  @Test
+  public void filteredQueryTest() throws JsonParseException, JsonMappingException, IOException {
+
+    String filterField1 = "field1";
+    String filterField2 = "field2";
+    String filterField3 = "field3";
+    String filterValue1 = "a";
+    String filterValue2 = "b";
+    String filterValue3 = "string";
+    String filterJson = "{ \"any\": [ "
+        + "{\"match\": {\"field\": \"" + filterField1 + "\", \"value\": \"" + filterValue1 + "\"}},"
+        + "{\"match\": {\"field\": \"" + filterField2 + "\", \"value\": \"" + filterValue2 + "\"}}"
+        + "],"
+        + "\"all\": ["
+        + "{\"parsed-query\": {\"field\": \"" + filterField3 + "\", \"query-string\": \"" + filterValue3 + "\"}}"
+        + "]"
+        + "}";
+
+    String filterStanzaJson = "\"filter\": " + filterJson;
+
+    String queryStanzaJson = "\"queries\": [ "
+        + "{\"may\": {\"match\": {\"field\": \"searchTags\", \"value\": \"a\"}}},"
+        + "{\"may\": {\"match\": {\"field\": \"searchTags\", \"value\": \"b\"}}},"
+        + "{\"may\": {\"parsed-query\": {\"field\": \"fieldname\", \"query-string\": \"string\"}}}"
+        + "]";
+
+    String queryES =
+        "{"
+            + "\"version\": true,"
+            + "\"query\": {"
+            + "\"bool\": {"
+            + "\"must\": [], "
+            + "\"should\": ["
+            + "{\"term\": {\"searchTags\" : \"a\"}}, "
+            + "{\"term\": {\"searchTags\" : \"b\"}}, "
+            + "{\"query_string\": {\"default_field\": \"fieldname\", \"query\": \"string\"}}"
+            + "],"
+            + "\"must_not\": [], "
+            + "\"filter\": {"
+            + "\"bool\": {"
+            + "\"must\": ["
+            + "{\"query_string\": {\"default_field\": \"field3\", \"query\": \"string\"}}"
+            + "],"
+            + "\"must_not\": [],"
+            + "\"should\": ["
+            + "{\"term\": {\"field1\" : \"a\"}}, "
+            + "{\"term\": {\"field2\" : \"b\"}}"
+            + "],"
+            + "\"must_not\": []"
+            + "}"
+            + "}"
+            + "}"
+            + "}"
+            + "}";
+
+    StringBuilder sb = new StringBuilder();
+    sb.append("{");
+    sb.append(filterStanzaJson).append(", ");
+    sb.append(queryStanzaJson);
+    sb.append("}");
+
+    ObjectMapper mapper = new ObjectMapper();
+    SearchStatement ss = mapper.readValue(sb.toString(), SearchStatement.class);
+
+    assertEquals("Unexpected number of queries in the 'any' list for this statement's filter",
+        2, ss.getFilter().getAny().length);
+    assertEquals("Unexpected number of queries in the 'all' list for this statement's filter",
+        1, ss.getFilter().getAll().length);
+
+    assertTrue("Unexpected ElasticSearch syntax.  Expected: " + queryES + " Actual: " + ss.toElasticSearch(),
+        queryES.equals(ss.toElasticSearch()));
+
+    assertNull(ss.getAggregations());
+  }
+
+  @Test
+  public void aggregationTest() {
+    String input = "{\r\n  \"queries\": [\r\n    {\r\n      \"must\": {\r\n        \"match\": {\r\n          \"field\": \"searchTags\",\r\n          \"value\": \"a\"\r\n        }\r\n      }\r\n    }\r\n  ],\r\n  \"aggregations\": [\r\n    {\r\n      \"name\": \"byDate\",\r\n      \"aggregation\": {\r\n        \"date-range\": {\r\n          \"field\": \"mydate\",\r\n          \"ranges\": [\r\n            {\r\n              \"from\": \"2016-12-19T00:00:00.738-05:00\",\r\n              \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n            }\r\n          ]\r\n        },\r\n        \"sub-aggregations\": [\r\n          {\r\n            \"name\": \"byTerm\",\r\n            \"aggregation\": {\r\n              \"group-by\": {\r\n                \"field\": \"myterm\"\r\n              }\r\n            }\r\n          },\r\n          {\r\n            \"name\": \"byDate\",\r\n            \"aggregation\": {\r\n              \"date-histogram\": {\r\n                \"field\": \"myDate\",\r\n                \"interval\": \"myInterval\"\r\n              }\r\n            }\r\n          }\r\n        ]\r\n      }\r\n    },\r\n    {\r\n      \"name\": \"2nd\",\r\n      \"aggregation\": {\r\n        \"group-by\": {\r\n          \"field\": \"anotherTerm\"\r\n        }\r\n      }\r\n    }\r\n  ]\r\n}";
+
+    ObjectMapper mapper = new ObjectMapper();
+    try {
+      SearchStatement ss = mapper.readValue(input, SearchStatement.class);
+      Aggregation[] aggs = ss.getAggregations();
+      assertNotNull(aggs);
+      assertEquals("Unexpected number aggregations", 2, aggs.length);
+      assertEquals("byDate", aggs[0].getName());
+      assertNotNull(aggs[0].getStatement().getDateRange());
+      assertEquals("mydate", aggs[0].getStatement().getDateRange().getField());
+      assertNotNull(aggs[0].getStatement().getSubAggregations());
+      assertEquals(2, aggs[0].getStatement().getSubAggregations().length);
+      assertEquals("byTerm", aggs[0].getStatement().getSubAggregations()[0].getName());
+      assertEquals("byDate", aggs[0].getStatement().getSubAggregations()[1].getName());
+      assertNull(aggs[0].getStatement().getGroupBy());
+      assertEquals("2nd", aggs[1].getName());
+      assertNotNull(aggs[1].getStatement().getGroupBy());
+      assertEquals("anotherTerm", aggs[1].getStatement().getGroupBy().getField());
+      assertNull(aggs[1].getStatement().getDateRange());
+      assertNull(aggs[1].getStatement().getSubAggregations());
+
+    } catch (Exception e) {
+      fail("Encountered exception: " + e.getMessage());
+    }
+  }
+
+  @Test
+  public void resultSetRangeTest() throws IOException {
+
+    // Simple query with a result set subrange specified.
+    File queryWithSubrangeFile = new File("src/test/resources/json/queries/query-with-subrange.json");
+    String queryWithSubrangeStr = TestUtils.readFileToString(queryWithSubrangeFile);
+    String queryWithSubrangeExpectedESString =
+        "{\"version\": true,\"from\": 0, \"size\": 10, \"query\": {\"bool\": {\"must\": [{\"term\": {\"field1\" : \"Bob\"}}], \"should\": [],\"must_not\": []}}}";
+
+    ObjectMapper mapper = new ObjectMapper();
+    SearchStatement ss = mapper.readValue(queryWithSubrangeStr, SearchStatement.class);
+
+    assertEquals("Unexpected index for result set start", ss.getFrom(), (Integer) 0);
+    assertEquals("Unexpected value for result set size", ss.getSize(), (Integer) 10);
+    assertTrue("Unexpected elastic search query generated from search statement",
+        ss.toElasticSearch().equals(queryWithSubrangeExpectedESString));
+  }
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/SortTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/SortTest.java
new file mode 100644 (file)
index 0000000..0374865
--- /dev/null
@@ -0,0 +1,54 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertTrue;
+
+public class SortTest {
+
+  @Test
+  public void sortFieldTest() throws JsonParseException, JsonMappingException, IOException {
+
+    String field = "fieldname";
+    String order = "ascending";
+    String json = "{\"field\": \"" + field + "\", \"order\": \"" + order + "\"}";
+
+    ObjectMapper mapper = new ObjectMapper();
+    Sort sort = mapper.readValue(json, Sort.class);
+
+    assertTrue("Unexpected field name in marshalled object.  Expected: " + field + " Actual: " + sort.getField(),
+        field.equals(sort.getField()));
+    assertTrue("Unexpected order field in marshalled object.  Expected: " + order + " Actual: " + sort.getOrder(),
+        order.equals(sort.getOrder().toString()));
+
+  }
+}
diff --git a/src/test/resources/json/analysis-config.json b/src/test/resources/json/analysis-config.json
new file mode 100644 (file)
index 0000000..a622dcf
--- /dev/null
@@ -0,0 +1,21 @@
+[
+  {
+    "name": "nGram_analyzer",
+    "description": "NGram Analyzer",
+    "tokenizer": "whitespace",
+    "filters": [
+      "lowercase",
+      "asciifolding",
+      "nGram_filter"
+    ]
+  },
+  {
+    "name": "whitespace_analyzer",
+    "description": "Whitespace Analyzer",
+    "tokenizer": "whitespace",
+    "filters": [
+      "lowercase",
+      "asciifolding"
+    ]
+  }
+]
\ No newline at end of file
diff --git a/src/test/resources/json/bulk-ops-invalid.json b/src/test/resources/json/bulk-ops-invalid.json
new file mode 100644 (file)
index 0000000..4708498
--- /dev/null
@@ -0,0 +1,32 @@
+{
+  "operations": [
+    {
+      "operation": "create",
+      "meta-data": {
+        "index": "test-index"
+      },
+      "document": {
+        "field1": "value1",
+        "field2": "value2"
+      }
+    },
+    {
+      "operation": "dance!",
+      "meta-data": {
+        "index": "test-index",
+        "id": "2",
+        "version": "5"
+      },
+      "document": {
+        "field1": "new-value"
+      }
+    },
+    {
+      "operation": "delete",
+      "meta-data": {
+        "index": "test-index",
+        "id": "4"
+      }
+    }
+  ]
+}
\ No newline at end of file
diff --git a/src/test/resources/json/bulk-ops-valid.json b/src/test/resources/json/bulk-ops-valid.json
new file mode 100644 (file)
index 0000000..6e805cf
--- /dev/null
@@ -0,0 +1,31 @@
+[
+  {
+    "create": {
+      "metaData": {
+        "url": "/indexes/test-index/documents/"
+      },
+      "document": {
+        "field1": "value1",
+        "field2": "value2"
+      }
+    }
+  },
+  {
+    "update": {
+      "metaData": {
+        "url": "/indexes/test-index/documents/3",
+        "etag": "5"
+      },
+      "document": {
+        "field1": "new-value"
+      }
+    }
+  },
+  {
+    "delete": {
+      "metaData": {
+        "url": "/indexes/test-index/documents/7"
+      }
+    }
+  }
+]
diff --git a/src/test/resources/json/filter-config.json b/src/test/resources/json/filter-config.json
new file mode 100644 (file)
index 0000000..d1de1d7
--- /dev/null
@@ -0,0 +1,7 @@
+[
+  {
+    "name": "nGram_filter",
+    "description": "Custom NGram Filter.",
+    "configuration": " \"type\": \"nGram\", \"min_gram\": 1, \"max_gram\": 50, \"token_chars\": [ \"letter\", \"digit\", \"punctuation\", \"symbol\" ]"
+  }
+]
\ No newline at end of file
diff --git a/src/test/resources/json/nested-document.json b/src/test/resources/json/nested-document.json
new file mode 100644 (file)
index 0000000..8373a6d
--- /dev/null
@@ -0,0 +1,49 @@
+{
+  "fields": [
+    {
+      "name": "serverName",
+      "data-type": "string",
+      "searchable": true,
+      "search-analyzer": "whitespace",
+      "sub-fields": []
+    },
+    {
+      "name": "serverComplex",
+      "data-type": "string",
+      "search-analyzer": "whitespace",
+      "sub-fields": []
+    },
+    {
+      "name": "address",
+      "data-type": "nested",
+      "sub-fields": [
+        {
+          "name": "street",
+          "data-type": "string",
+          "sub-fields": []
+        },
+        {
+          "name": "city",
+          "data-type": "string",
+          "sub-fields": []
+        },
+        {
+          "name": "phone-numbers",
+          "data-type": "nested",
+          "sub-fields": [
+            {
+              "name": "home",
+              "data-type": "string",
+              "sub-fields": []
+            },
+            {
+              "name": "cell",
+              "data-type": "string",
+              "sub-fields": []
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}
diff --git a/src/test/resources/json/queries/query-with-subrange.json b/src/test/resources/json/queries/query-with-subrange.json
new file mode 100644 (file)
index 0000000..36e5f15
--- /dev/null
@@ -0,0 +1,14 @@
+{
+  "results-start": 0,
+  "results-size": 10,
+  "queries": [
+    {
+      "must": {
+        "match": {
+          "field": "field1",
+          "value": "Bob"
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file
diff --git a/src/test/resources/json/queries/simple-parsed-query.json b/src/test/resources/json/queries/simple-parsed-query.json
new file mode 100644 (file)
index 0000000..50ce681
--- /dev/null
@@ -0,0 +1,10 @@
+{
+  "queries": [
+    {
+      "parsed-query": {
+        "field": "searchTags",
+        "query-string": "a"
+      }
+    }
+  ]
+}
\ No newline at end of file
diff --git a/src/test/resources/json/simpleDocument.json b/src/test/resources/json/simpleDocument.json
new file mode 100644 (file)
index 0000000..c21c574
--- /dev/null
@@ -0,0 +1,17 @@
+{
+  "fields": [
+    {
+      "name": "serverName",
+      "data-type": "string",
+      "searchable": true,
+      "search-analyzer": "whitespace",
+      "sub-fields": []
+    },
+    {
+      "name": "serverComplex",
+      "data-type": "string",
+      "search-analyzer": "whitespace",
+      "sub-fields": []
+    }
+  ]
+}
diff --git a/src/test/resources/json/tier-support-document.json b/src/test/resources/json/tier-support-document.json
new file mode 100644 (file)
index 0000000..c6a20db
--- /dev/null
@@ -0,0 +1,30 @@
+{
+  "document-type": "tier-support",
+  "document-id": "ts-1",
+  "fields": [
+    {
+      "name": "entityType",
+      "data-type": "string"
+    },
+    {
+      "name": "edgeTagQueryEntityFieldName",
+      "data-type": "string",
+      "index": false
+    },
+    {
+      "name": "edgeTagQueryEntityFieldValue",
+      "data-type": "string",
+      "index": false
+    },
+    {
+      "name": "searchTagIDs",
+      "data-type": "string"
+    },
+    {
+      "name": "searchTags",
+      "data-type": "string",
+      "index-analyzer": "nGram_analyzer",
+      "search-analyzer": "whitespace_analyzer"
+    }
+  ]
+}
\ No newline at end of file