- print(rule)
- url = '/distributor/distribution-targets'
- url2 = url + '/notfound'
- url3 = url2 + '/process-groups'
- assert(len(client.get(url).get_json()['distributionTargets']) == 0)
- assert(client.get(url2).status_code == 404)
- assert(client.put(url2, json={'name': 'notfound1', 'runtimeApiUrl': 'http://notfound/url'}).status_code == 404)
- assert(client.delete(url2).status_code == 404)
- assert(client.post(url3, json={'processGroupId': 'group1'}).status_code == 404)
- resp = client.post(url, json={'name': 'target1', 'runtimeApiUrl': 'http://target/url'})
- assert(resp.status_code == 200)
- print(resp.get_json())
- url2 = '/distributor/distribution-targets/' + resp.get_json()['id']
- url3 = url2 + '/process-groups'
- assert(len(client.get(url).get_json()['distributionTargets']) == 1)
- assert(client.get(url2).status_code == 200)
- assert(client.put(url2, json={'name': 'newtarget1', 'runtimeApiUrl': 'http://newtarget1/url'}).status_code == 200)
- assert(client.post(url3, json={'processGroupId': 'group1'}).status_code == 404)
- dummyflow['identifier'] = 'group1'
- assert(client.post(url3, json={'processGroupId': 'group1'}).status_code == 501)
- assert(client.delete(url2).status_code == 200)
- assert(client.delete(url2).status_code == 404)
+ print(rule)
+ url = "/distributor/distribution-targets"
+ url2 = url + "/notfound"
+ url3 = url2 + "/process-groups"
+ assert len(client.get(url).get_json()["distributionTargets"]) == 0
+ assert client.get(url2).status_code == 404
+ assert client.put(url2, json={"name": "notfound1", "runtimeApiUrl": "http://notfound/url"}).status_code == 404
+ assert client.delete(url2).status_code == 404
+ assert client.post(url3, json={"processGroupId": "group1"}).status_code == 404
+ resp = client.post(url, json={"name": "target1", "runtimeApiUrl": "http://target/url"})
+ assert resp.status_code == 200
+
+ # print(resp.get_json())
+ url2 = "/distributor/distribution-targets/" + resp.get_json()["id"]
+ url3 = url2 + "/process-groups"
+ assert len(client.get(url).get_json()["distributionTargets"]) == 1
+
+ assert client.get(url2).status_code == 200
+ assert client.put(url2, json={"name": "newtarget1", "runtimeApiUrl": "http://newtarget1/url"}).status_code == 200
+ assert client.put(url2, json={"name": "newtarget1", "runtimeApiUrl": "http://newtarget1/url"}).status_code == 200
+
+ assert client.post(url3, json={"processGroupId": "group1"}).status_code == 404
+ assert client.post(url3, json={"processGroupId": "group1"}).status_code == 404
+ dummyflow["identifier"] = "group1"
+ assert client.post(url3, json={"processGroupId": "group1"}).status_code == 501
+
+ assert client.delete(url2).status_code == 200
+ assert client.delete(url2).status_code == 404
+ url4 = "/does/not/exist"
+
+ # the following tests do not require an http client but do use requests lib
+
+ # test get_json() exception case
+ try:
+ utils.get_json(url4)
+ assert not "utils.get_json(url4) should throw errors.DistributorAPIError"
+ except errors.DistributorAPIError as e:
+ # expected result
+ pass
+
+ # _req.SHOWMATCHES = True
+ ret = onboarding_client.get_components_indexed(url, [("foo", "bar")])
+ assert ret == {("foo", "bar"): {"id": "groupComponentUrl"}}
+
+ #
+ try:
+ ret = onboarding_client.get_components_indexed(url, [("foo", "bar2")])
+ assert (
+ not "onboarding_client.get_components_indexed(...foo,bar2) should throw errors.DistributorAPIResourceNotFound"
+ )
+ except errors.DistributorAPIResourceNotFound as e:
+ # expected result
+ pass
+
+
+def test_data_access():
+ # various tests for data_access.py
+
+ saved_cache = copy.deepcopy(data_access.get_distribution_targets())
+ ret = data_access.get_distribution_target("ds")
+ assert ret == {}
+
+ # new transform_request()
+ req1 = {"name": "req1", "runtimeApiUrl": "rtau1", "nextDistributionTargetId": "ndti1"}
+ treq1 = data_access.transform_request(req1)
+ assert isdate(treq1["created"])
+ assert isdate(treq1["modified"])
+ assert isuuid(treq1["dt_id"])
+ assert treq1["processGroups"] == []
+
+ # new transform_request()
+ req2 = {"name": "req2", "runtimeApiUrl": "rtau2", "nextDistributionTargetId": "ndti1"}
+ treq2 = data_access.transform_request(req2)
+ assert isdate(treq2["created"])
+ assert isdate(treq2["modified"])
+ assert isuuid(treq2["dt_id"])
+ assert treq2["processGroups"] == []
+
+ # merge_request() should copy certain values from 2nd arg into 1st arg
+ ret = data_access.merge_request(treq1, treq2)
+ assert ret["name"] == treq2["name"]
+ assert ret["runtimeApiUrl"] == treq2["runtimeApiUrl"]
+ assert ret["description"] is None
+ assert ret["nextDistributionTargetId"] == treq2["nextDistributionTargetId"]
+
+ # add_distribution_target() adds to the cache
+ ret = data_access.add_distribution_target({"dt_id": "dt1", "val": "1", "processGroups": []})
+ assert data_access.get_distribution_target("dt1")["val"] == "1"
+
+ # update_distribution_target() updates an existing element of the cache
+ # If the element exists, it returns True
+ ret = data_access.update_distribution_target({"dt_id": "dt1", "val": "1b", "processGroups": []})
+ assert ret
+ assert data_access.get_distribution_target("dt1")["val"] == "1b"
+
+ # update_distribution_target() updates an existing element of the cache
+ # If the element does not exist, it returns False
+ ret = data_access.update_distribution_target({"dt_id": "dt2", "val": "2", "processGroups": []})
+ assert not ret
+
+ # add_process_group adds an element to the processGroups array of the distribution target
+ # if the element exists, returns true, else false
+ assert data_access.add_process_group("dt1", {"processed": "p1"})
+ assert isdate(data_access.get_distribution_target("dt1")["processGroups"][0]["processed"])
+ assert not data_access.add_process_group("dt2", {"processed": "p1"})
+
+ # clean up the cache
+ # if the element exists,
+ assert data_access.delete_distribution_target("dt1")
+ assert not data_access.delete_distribution_target("dt2")
+
+ assert data_access.get_distribution_targets() == saved_cache
+
+
+def test_transform():
+ # various tests for transform.py
+ flow1 = {"flowContents": {"processors": []}}
+ flow2 = {
+ "flowContents": {
+ "processors": [
+ {
+ "bundle": {"artifact": "artifact1", "version": "version1"},
+ }
+ ]
+ }
+ }
+ flow3 = {
+ "flowContents": {
+ "processors": [
+ {
+ "bundle": {"artifact": "artifact1", "version": "version1"},
+ },
+ {"bundle": {"artifact": "artifact2", "version": "version2"}},
+ ]
+ }
+ }
+ assert transform.extract_components_from_flow(flow1) == []
+ assert transform.extract_components_from_flow(flow2) == [("artifact1", "version1")]
+ assert transform.extract_components_from_flow(flow3) == [("artifact1", "version1"), ("artifact2", "version2")]