From 5d389fe63b86700b5041928f9af11e13a4be5d55 Mon Sep 17 00:00:00 2001 From: Aaron Date: Wed, 24 Jun 2020 03:53:39 -0500 Subject: [PATCH 01/79] Linting --- .pylintrc | 1 + tests/README.md | 4 ++++ 2 files changed, 5 insertions(+) create mode 100644 .pylintrc create mode 100644 tests/README.md diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..2e0f5c5 --- /dev/null +++ b/.pylintrc @@ -0,0 +1 @@ +init-hook='from sys.path import append; from os import getcwd; append(getcwd())' \ No newline at end of file diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 0000000..3717d26 --- /dev/null +++ b/tests/README.md @@ -0,0 +1,4 @@ +# How To Use + +When running, please use a mockserver defined in your user_variables.py +The name of the mockserver should be "mockserver1". The tenant should be "tenant1" \ No newline at end of file From 8ad3c80c8215fe4a6923a2fe7ea9b0c8cbed52cc Mon Sep 17 00:00:00 2001 From: Aaron Date: Wed, 24 Jun 2020 03:54:41 -0500 Subject: [PATCH 02/79] PAF-20 #First Working Test Case --- .../mock_hostgroup_tenantwide.json | 34 +++++++++++++ tests/test_host_groups.py | 31 +++++++++++ tests/tooling_for_test.py | 51 +++++++++++++++++++ 3 files changed, 116 insertions(+) create mode 100644 tests/mockserver_expectations/mock_hostgroup_tenantwide.json create mode 100644 tests/test_host_groups.py create mode 100644 tests/tooling_for_test.py diff --git a/tests/mockserver_expectations/mock_hostgroup_tenantwide.json b/tests/mockserver_expectations/mock_hostgroup_tenantwide.json new file mode 100644 index 0000000..867cd50 --- /dev/null +++ b/tests/mockserver_expectations/mock_hostgroup_tenantwide.json @@ -0,0 +1,34 @@ +[{ + "entityId": "HOST-238441A17F95B305", + "displayName": "testserver", + "discoveredName": "testserver", + "firstSeenTimestamp": 1592513300463, + "lastSeenTimestamp": 1592980597441, + "tags": [], + "fromRelationships": {}, + "toRelationships": { + "isProcessOf": [], + "runsOn": [] + }, + "osType": "LINUX", + "osArchitecture": "X86", + "osVersion": "Debian GNU/Linux 10 (buster) (kernel 4.19.0-9-amd64)", + "bitness": "64bit", + "cpuCores": 12, + "logicalCpuCores": 24, + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "agentVersion": { + "major": 1, + "minor": 195, + "revision": 54, + "timestamp": "20200529-113801", + "sourceRevision": "" + }, + "consumedHostUnits": 8.0, + "userLevel": "SUPERUSER", + "hostGroup": { + "meId": "HOST_GROUP-ABCDEFGH12345678", + "name": "HOST_GROUP_1" + } +}] \ No newline at end of file diff --git a/tests/test_host_groups.py b/tests/test_host_groups.py new file mode 100644 index 0000000..822b267 --- /dev/null +++ b/tests/test_host_groups.py @@ -0,0 +1,31 @@ +# import change_pythonpath +import unittest +import json +import user_variables +from tests import tooling_for_test +from dynatrace.tenant import host_groups + +CLUSTER = user_variables.FULL_SET["mockserver1"] +TENANT = "tenant1" +URL_PATH = "/api/v1/entity/infrastructure/hosts" + +class TestHostGroupFunctions(unittest.TestCase): + def test_get_host_groups_tenantwide(self): + parameters = { + "relativeTime": ["day"], + "includeDetails": [ "true" ], + "Api-Token": [CLUSTER["api_token"][TENANT]], + } + mockserver_expectation_file = "tests/mockserver_expectations/mock_hostgroup_tenantwide.json" + tooling_for_test.create_mockserver_expectation( + CLUSTER, TENANT, URL_PATH, "GET", parameters, mockserver_expectation_file) + command_tested = host_groups.get_host_groups_tenantwide(CLUSTER, TENANT) + + expected_result = { + 'HOST_GROUP-ABCDEFGH12345678': 'HOST_GROUP_1' + } + self.assertEqual(command_tested, expected_result) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/tooling_for_test.py b/tests/tooling_for_test.py new file mode 100644 index 0000000..37f6606 --- /dev/null +++ b/tests/tooling_for_test.py @@ -0,0 +1,51 @@ +"""Mockserver Expectation Setup""" +import requests +import json +from dynatrace.requests.request_handler import generate_tenant_url, no_ssl_verification + + +def create_mockserver_expectation(cluster, tenant, url_path, request_type, parameters, response_payload_file=None, response_code=200, mock_id=None): + expectation = { + "httpRequest": { + "queryStringParameters": { + "Api-Token": ["sample_api_token"] # TODO Change this Hard Code + }, + }, + "httpResponse": { + "statusCode": 302 + }, + "times": { + "remainingTimes": 1, + "unlimited": False + }, + "id": "OneOff", + } + + # Paramaters should always at least have Api-Token + expectation["httpRequest"]["queryStringParameters"] = parameters + + if response_payload_file: + with open(response_payload_file) as f: + response_payload = json.load(f) + expectation["httpResponse"]["body"] = { + "type": "JSON", + "json": response_payload, + } + if response_code and isinstance(response_code, int): + expectation["httpResponse"]["statusCode"] = response_code + if mock_id: + expectation["id"] = mock_id + + expectation_url = generate_tenant_url( + cluster, tenant) + "/mockserver/expectation" + with no_ssl_verification(): + test_req = requests.request( + "PUT", expectation_url, json=expectation, verify=False) + if test_req.status_code > 300: + print(expectation, test_req.status_code, test_req.text, end="\n") + raise ValueError(test_req.status_code) + + +def expected_payload(json_file): + with open(json_file) as f: + return json.load(f) \ No newline at end of file From 2312a95b92911642a9275970e3abea7d8815e7f9 Mon Sep 17 00:00:00 2001 From: Aaron Date: Wed, 24 Jun 2020 04:11:56 -0500 Subject: [PATCH 03/79] Fixing Smells --- tests/test_host_groups.py | 2 +- tests/tooling_for_test.py | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/tests/test_host_groups.py b/tests/test_host_groups.py index 822b267..97631d5 100644 --- a/tests/test_host_groups.py +++ b/tests/test_host_groups.py @@ -1,4 +1,4 @@ -# import change_pythonpath +"""Testing dynatrace.tenant.host_groups""" import unittest import json import user_variables diff --git a/tests/tooling_for_test.py b/tests/tooling_for_test.py index 37f6606..5126a41 100644 --- a/tests/tooling_for_test.py +++ b/tests/tooling_for_test.py @@ -4,7 +4,7 @@ from dynatrace.requests.request_handler import generate_tenant_url, no_ssl_verification -def create_mockserver_expectation(cluster, tenant, url_path, request_type, parameters, response_payload_file=None, response_code=200, mock_id=None): +def create_mockserver_expectation(cluster, tenant, url_path, request_type, parameters, response_payload_file=None, mock_id=None): expectation = { "httpRequest": { "queryStringParameters": { @@ -12,7 +12,7 @@ def create_mockserver_expectation(cluster, tenant, url_path, request_type, param }, }, "httpResponse": { - "statusCode": 302 + "statusCode": 200 }, "times": { "remainingTimes": 1, @@ -31,8 +31,7 @@ def create_mockserver_expectation(cluster, tenant, url_path, request_type, param "type": "JSON", "json": response_payload, } - if response_code and isinstance(response_code, int): - expectation["httpResponse"]["statusCode"] = response_code + if mock_id: expectation["id"] = mock_id From 4497d660bdf8620f87c2eb2dca8cd2c4fd7f8344 Mon Sep 17 00:00:00 2001 From: Aaron Date: Wed, 24 Jun 2020 04:30:45 -0500 Subject: [PATCH 04/79] PAF-20 #Adding CircleCI for testing --- .circleci/config.yml | 29 ++++++++++++++++++++++++++++ .circleci/mockserver.py | 42 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 71 insertions(+) create mode 100644 .circleci/config.yml create mode 100644 .circleci/mockserver.py diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 0000000..76173f6 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,29 @@ +version: 2 +jobs: + build: + working_directory: ~/opt/self-service/ + docker: + - image: circleci/python:3.8.3 + environment: + PIPENV_VENV_IN_PROJECT: true + - image: mockserver/mockserver + name: mockserver.mockserver + steps: + - checkout # check out source code to working directory + - run: + command: | + sudo chown -R circleci:circleci /usr/local/bin + sudo chown -R circleci:circleci /usr/local/lib/python3.8/site-packages + - run: + command: | + cp .circleci/user_variables.py framework/user_variables.py + pip install pipenv + pipenv install --dev + - run: + command: | + pipenv run python -m unittest tests/test_host_groups.py + - store_test_results: + path: test-results + - store_artifacts: + path: test-results + destination: tr1 diff --git a/.circleci/mockserver.py b/.circleci/mockserver.py new file mode 100644 index 0000000..acfb7fd --- /dev/null +++ b/.circleci/mockserver.py @@ -0,0 +1,42 @@ +"""User variables to use toolkit for Dynatrace""" +FULL_SET = { + "mockserver1": { + "url": "mockserver:1080", + "tenant": { + "tenant1": "mockserver", + }, + "api_token": { + "tenant1": "sample_api_token", + }, + "verify_ssl": False, + "is_managed": False, + "cluster_token": "Required for Cluster Operations in Managed" + } +} + +LOG_LEVEL="INFO" + +# ROLE TYPE KEYS +# access_env +# change_settings +# install_agent +# view_logs +# view_senstive +# change_sensitive + +USER_GROUPS = { + "role_types": { + "access_env": "accessenv", + "change_settings": "changesettings", + "view_logs": "logviewer", + "view_sensitive": "viewsensitive" + }, + "role_tenants": [ + "nonprod", + "prod" + ] +} + +USER_GROUP_TEMPLATE = "prefix_{USER_TYPE}_{TENANT}_{APP_NAME}_suffix" + +DEFAULT_TIMEZONE = "America/Chicago" From 01d5ad4039f302a119a06310e703985ed1bf84e7 Mon Sep 17 00:00:00 2001 From: Aaron Date: Wed, 24 Jun 2020 04:31:20 -0500 Subject: [PATCH 05/79] Fixing Path --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 76173f6..0762040 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,7 +16,7 @@ jobs: sudo chown -R circleci:circleci /usr/local/lib/python3.8/site-packages - run: command: | - cp .circleci/user_variables.py framework/user_variables.py + cp .circleci/mockserver.py user_variables.py pip install pipenv pipenv install --dev - run: From d5f5956a6c43990aa1a79943789aaf383b89e00b Mon Sep 17 00:00:00 2001 From: Aaron Date: Wed, 24 Jun 2020 04:34:04 -0500 Subject: [PATCH 06/79] Trying to trigger CircleCI --- tests/mockserver_expectations/mock_hostgroup_tenantwide.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/mockserver_expectations/mock_hostgroup_tenantwide.json b/tests/mockserver_expectations/mock_hostgroup_tenantwide.json index 867cd50..2b383c5 100644 --- a/tests/mockserver_expectations/mock_hostgroup_tenantwide.json +++ b/tests/mockserver_expectations/mock_hostgroup_tenantwide.json @@ -14,8 +14,8 @@ "osArchitecture": "X86", "osVersion": "Debian GNU/Linux 10 (buster) (kernel 4.19.0-9-amd64)", "bitness": "64bit", - "cpuCores": 12, - "logicalCpuCores": 24, + "cpuCores": 1, + "logicalCpuCores": 2, "monitoringMode": "FULL_STACK", "networkZoneId": "default", "agentVersion": { From 66bc897a028943648d214e01992069aaaf236416 Mon Sep 17 00:00:00 2001 From: Aaron Date: Wed, 24 Jun 2020 04:36:03 -0500 Subject: [PATCH 07/79] fixing working directory --- .circleci/config.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0762040..11080ed 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,7 +1,6 @@ version: 2 jobs: build: - working_directory: ~/opt/self-service/ docker: - image: circleci/python:3.8.3 environment: From 0425e8b6f5f21582d4a52bf1dd212636a7e4297c Mon Sep 17 00:00:00 2001 From: Aaron Date: Wed, 24 Jun 2020 04:37:24 -0500 Subject: [PATCH 08/79] Updating Pipfiles to 3.8 --- Pipfile | 2 +- Pipfile.lock | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Pipfile b/Pipfile index 334d058..b253a43 100644 --- a/Pipfile +++ b/Pipfile @@ -11,4 +11,4 @@ autopep8 = "*" requests = "*" [requires] -python_version = "3.4" +python_version = "3.8" diff --git a/Pipfile.lock b/Pipfile.lock index 194f912..3da6634 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "661f0c6028f892faee91814dacb44c678abaf7b6d1645af87357c1483119fa7f" + "sha256": "f1850de3b2311e799288920e10d9afb1837a02c65754e827532b4f40af27ab0d" }, "pipfile-spec": 6, "requires": { - "python_version": "3.4" + "python_version": "3.8" }, "sources": [ { @@ -18,10 +18,10 @@ "default": { "certifi": { "hashes": [ - "sha256:5ad7e9a056d25ffa5082862e36f119f7f7cec6457fa07ee2f8c339814b80c9b1", - "sha256:9cd41137dc19af6a5e03b630eefe7d1f458d964d406342dd3edf625839b944cc" + "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3", + "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41" ], - "version": "==2020.4.5.2" + "version": "==2020.6.20" }, "chardet": { "hashes": [ @@ -39,11 +39,11 @@ }, "requests": { "hashes": [ - "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee", - "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6" + "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b", + "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898" ], "index": "pypi", - "version": "==2.23.0" + "version": "==2.24.0" }, "urllib3": { "hashes": [ From 37c70e3a43627791d1f077c61350f1664e6a5be1 Mon Sep 17 00:00:00 2001 From: Aaron Date: Wed, 15 Jul 2020 08:21:44 -0500 Subject: [PATCH 09/79] PAF-20 #Adjusting Naming for MockServer Tests --- ...k_hostgroup_tenantwide.json => mock_hostgroup_1.json} | 0 tests/test_host_groups.py | 9 ++++++++- 2 files changed, 8 insertions(+), 1 deletion(-) rename tests/mockserver_expectations/{mock_hostgroup_tenantwide.json => mock_hostgroup_1.json} (100%) diff --git a/tests/mockserver_expectations/mock_hostgroup_tenantwide.json b/tests/mockserver_expectations/mock_hostgroup_1.json similarity index 100% rename from tests/mockserver_expectations/mock_hostgroup_tenantwide.json rename to tests/mockserver_expectations/mock_hostgroup_1.json diff --git a/tests/test_host_groups.py b/tests/test_host_groups.py index 97631d5..f2b4cba 100644 --- a/tests/test_host_groups.py +++ b/tests/test_host_groups.py @@ -16,7 +16,7 @@ def test_get_host_groups_tenantwide(self): "includeDetails": [ "true" ], "Api-Token": [CLUSTER["api_token"][TENANT]], } - mockserver_expectation_file = "tests/mockserver_expectations/mock_hostgroup_tenantwide.json" + mockserver_expectation_file = "tests/mockserver_expectations/mock_hostgroup_1.json" tooling_for_test.create_mockserver_expectation( CLUSTER, TENANT, URL_PATH, "GET", parameters, mockserver_expectation_file) command_tested = host_groups.get_host_groups_tenantwide(CLUSTER, TENANT) @@ -26,6 +26,13 @@ def test_get_host_groups_tenantwide(self): } self.assertEqual(command_tested, expected_result) + def test_get_host_groups_clusterwide(self): + parameters = { + "relativeTime": ["day"], + "includeDetails": [ "true" ], + "Api-Token": [CLUSTER["api_token"][TENANT]], + } + if __name__ == '__main__': unittest.main() From b8e7d5b17b0666ee92a80c37e76fc625082cc971 Mon Sep 17 00:00:00 2001 From: Aaron Date: Thu, 23 Jul 2020 22:11:47 -0500 Subject: [PATCH 10/79] Removing Unused Function --- tests/test_host_groups.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/test_host_groups.py b/tests/test_host_groups.py index f2b4cba..ca39ea9 100644 --- a/tests/test_host_groups.py +++ b/tests/test_host_groups.py @@ -26,13 +26,5 @@ def test_get_host_groups_tenantwide(self): } self.assertEqual(command_tested, expected_result) - def test_get_host_groups_clusterwide(self): - parameters = { - "relativeTime": ["day"], - "includeDetails": [ "true" ], - "Api-Token": [CLUSTER["api_token"][TENANT]], - } - - if __name__ == '__main__': unittest.main() From e20848716e4d90cc6ff1b5c7c99d4c3a67098b63 Mon Sep 17 00:00:00 2001 From: Radu Date: Fri, 24 Jul 2020 15:27:59 +0100 Subject: [PATCH 11/79] changes as per framework_changes.docx; merge from my codebase --- change_variables.py | 35 ++- dynatrace/cluster/config.py | 20 +- dynatrace/cluster/ssl.py | 43 +-- dynatrace/cluster/sso.py | 76 +++-- dynatrace/cluster/user_groups.py | 159 +++++----- dynatrace/cluster/users.py | 83 +++-- dynatrace/requests/request_handler.py | 420 ++++++++----------------- dynatrace/tenant/extensions.py | 34 ++ dynatrace/tenant/host_groups.py | 56 ++-- dynatrace/tenant/maintenance.py | 299 ++++++++++-------- dynatrace/tenant/management_zones.py | 180 ++++++----- dynatrace/tenant/request_attributes.py | 121 +++---- dynatrace/tenant/request_naming.py | 79 ++--- dynatrace/timeseries/metrics.py | 24 ++ dynatrace/timeseries/timeseries.py | 46 ++- dynatrace/topology/applications.py | 105 ++++--- dynatrace/topology/custom.py | 5 +- dynatrace/topology/hosts.py | 81 +++-- dynatrace/topology/process.py | 11 +- dynatrace/topology/process_groups.py | 38 ++- dynatrace/topology/services.py | 38 ++- dynatrace/topology/shared.py | 165 +++++----- 22 files changed, 1121 insertions(+), 997 deletions(-) create mode 100644 dynatrace/tenant/extensions.py create mode 100644 dynatrace/timeseries/metrics.py diff --git a/change_variables.py b/change_variables.py index ed12cdb..aba3923 100644 --- a/change_variables.py +++ b/change_variables.py @@ -3,23 +3,28 @@ import argparse import os + def replace_set(set_file): - """Replace Variable File""" - # Options are Darwin, Linux, Java and Windows. Java not supported - if "Windows" in system(): - os.system("copy variable_sets\\" + str(set_file) + ".py user_variables.py") - else: - os.system("cp variable_sets/" + str(set_file) + ".py user_variables.py") + """Replace Variable File""" + # Options are Darwin, Linux, Java and Windows. Java not supported + if "Windows" in system(): + os.system("copy variable_sets\\" + + str(set_file) + ".py user_variables.py") + else: + os.system("cp variable_sets/" + str(set_file) + + ".py user_variables.py") + def get_variable_set_file(variable_set_arg): - """Checks if the set file was provided via arg else prompt""" - if variable_set_arg: - return variable_set_arg - return input("Enter Set to Import: ") + """Checks if the set file was provided via arg else prompt""" + if variable_set_arg: + return variable_set_arg + return input("Enter Set to Import: ") + if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--set-file', '-s') - args = parser.parse_args() - set_file = get_variable_set_file(args.set_file) - replace_set(set_file) + parser = argparse.ArgumentParser() + parser.add_argument('--set-file', '-s') + args = parser.parse_args() + set_file = get_variable_set_file(args.set_file) + replace_set(set_file) diff --git a/dynatrace/cluster/config.py b/dynatrace/cluster/config.py index 54b9dd1..0d51613 100644 --- a/dynatrace/cluster/config.py +++ b/dynatrace/cluster/config.py @@ -1,13 +1,21 @@ import dynatrace.requests.request_handler as rh + def get_node_info(cluster): - response = rh.cluster_get(cluster,"cluster") - return response.json() + response = rh.make_api_call(cluster=cluster, + endpoint=rh.ClusterAPIs.CLUSTER) + return response.json() + def get_node_config(cluster): - response = rh.cluster_get(cluster,"cluster/configuration") - return response.json() + response = rh.make_api_call(cluster=cluster, + endpoint=rh.ClusterAPIs.CONFIG) + return response.json() + def set_node_config(cluster, json): - response = rh.cluster_post(cluster,"cluster/configuration", json=json) - return response.status_code \ No newline at end of file + response = rh.make_api_call(cluster=cluster, + endpoint=rh.ClusterAPIs.CONFIG, + method=rh.HTTP.POST, + json=json) + return response.status_code diff --git a/dynatrace/cluster/ssl.py b/dynatrace/cluster/ssl.py index d6fe335..b09e4e4 100644 --- a/dynatrace/cluster/ssl.py +++ b/dynatrace/cluster/ssl.py @@ -2,28 +2,31 @@ """Cluster SSL Certificate Operations""" import dynatrace.requests.request_handler as rh + def get_cert_details(cluster, entity_type, entity_id): - """Get SSL Certificate information for Server or Cluster ActiveGate""" - response = rh.cluster_get( - cluster, - "sslCertificate/" + str(entity_type) + "/" + str(entity_id) - ) - return response.json() + """Get SSL Certificate information for Server or Cluster ActiveGate""" + response = rh.make_api_call( + cluster=cluster, + endpoint=f"{rh.ClusterAPIs.SSL}/{entity_type}/{entity_id}" + ) + return response.json() + def get_cert_install_status(cluster, entity_id): - """Get SSL Storage Status for Cluster ActiveGate""" - response = rh.cluster_get( - cluster, - "sslCertificate/store/COLLECTOR/" + str(entity_id) - ) - return response.text + """Get SSL Storage Status for Cluster ActiveGate""" + response = rh.make_api_call( + cluster=cluster, + endpoint=f"{rh.ClusterAPIs.SSL_STORE}/COLLECTOR/{entity_id}" + ) + return response.text -def set_cert(cluster, entity_type, entity_id, ssl_json): - """Set SSL Storage Status for Server or Cluster ActiveGate""" - response = rh.cluster_post( - cluster, - "sslCertificate/store/" + str(entity_type) + "/" + str(entity_id), - json=ssl_json - ) - return response.json() +def set_cert(cluster, entity_type, entity_id, ssl_json): + """Set SSL Storage Status for Server or Cluster ActiveGate""" + response = rh.make_api_call( + cluster=cluster, + method=rh.HTTP.POST, + endpoint=f"{rh.ClusterAPIs.SSL_STORE}/{entity_type}/{entity_id}", + json=ssl_json + ) + return response.json() diff --git a/dynatrace/cluster/sso.py b/dynatrace/cluster/sso.py index 30e1e11..df73b0a 100644 --- a/dynatrace/cluster/sso.py +++ b/dynatrace/cluster/sso.py @@ -3,38 +3,48 @@ ENDPOINT = "sso/ssoProvider" -def disable_sso (cluster): - """Disable SSO Sign-in""" - disable_payload = { - "ssoProvider": "NONE", - "loginPage": "STANDARD", - "ssoEnabled": False, - "ssoGroupsEnabled": False, - "ssoLoginDisabled": True - } - response = rh.cluster_post(cluster, ENDPOINT, json=disable_payload) - return response.status_code + +def disable_sso(cluster): + """Disable SSO Sign-in""" + disable_payload = { + "ssoProvider": "NONE", + "loginPage": "STANDARD", + "ssoEnabled": False, + "ssoGroupsEnabled": False, + "ssoLoginDisabled": True + } + response = rh.make_api_call(cluster=cluster, + endpoint=ENDPOINT, + method=rh.HTTP.POST, + json=disable_payload) + return response.status_code + def enable_sso(cluster, disable_local=False, groups_enabled=False, is_openid=False): - """Turns on SSO that has already been configured""" - enable_payload = { - "ssoProvider":"SAML", - "loginPage":"STANDARD", - "ssoEnabled":True, - "ssoGroupsEnabled":False, - "ssoLoginDisabled":False - } - - if disable_local: - enable_payload['loginPage'] = "SSO" - if groups_enabled: - enable_payload['ssoGroupsEnabled'] = True - if is_openid: - enable_payload['ssoProvider'] = "OIDC" - - response = rh.cluster_post(cluster, ENDPOINT, json=enable_payload) - return response.status_code - -def get_sso_status (cluster): - response = rh.cluster_get(cluster, ENDPOINT) - return response.json() + """Turns on SSO that has already been configured""" + enable_payload = { + "ssoProvider": "SAML", + "loginPage": "STANDARD", + "ssoEnabled": True, + "ssoGroupsEnabled": False, + "ssoLoginDisabled": False + } + + if disable_local: + enable_payload['loginPage'] = "SSO" + if groups_enabled: + enable_payload['ssoGroupsEnabled'] = True + if is_openid: + enable_payload['ssoProvider'] = "OIDC" + + response = rh.make_api_call(cluster=cluster, + endpoint=ENDPOINT, + method=rh.HTTP.POST, + json=enable_payload) + return response.status_code + + +def get_sso_status(cluster): + response = rh.make_api_call(cluster=cluster, + endpoint=ENDPOINT) + return response.json() diff --git a/dynatrace/cluster/user_groups.py b/dynatrace/cluster/user_groups.py index f6d4ec2..c073bac 100644 --- a/dynatrace/cluster/user_groups.py +++ b/dynatrace/cluster/user_groups.py @@ -11,91 +11,102 @@ "view_senstive": "VIEW_SENSITIVE_REQUEST_DATA" } + def generate_group_name(template, user_type, tenant, app_name): - template = template.replace("{USER_TYPE}", user_type) - template = template.replace("{TENANT}", tenant) - template = template.replace("{APP_NAME}", app_name) - template = template.lower() - return template + template = template.replace("{USER_TYPE}", user_type) + template = template.replace("{TENANT}", tenant) + template = template.replace("{APP_NAME}", app_name) + template = template.lower() + return template + def create_app_groups(cluster, app_name): - """Create Dynatrace User Groups for Applications""" - role_types = user_variables.USER_GROUPS['role_types'] - role_tenants = user_variables.USER_GROUPS['role_tenants'] - - all_new_groups = {} - for current_tenant in role_tenants: - all_new_groups[current_tenant] = {} - for current_type_key, current_type_value in role_types.items(): - group_id = generate_group_name(user_variables.USER_GROUP_TEMPLATE, current_type_value, current_tenant, app_name) - current_group = { - "isClusterAdminGroup": False, - "name":group_id, - "ldapGroupNames": [ - group_id, - ], - "accessRight": {} - } - - response = rh.cluster_post( - cluster, - "groups", - json=current_group - ) - all_new_groups[current_tenant][current_type_key] = ((response.json())['id']) - return all_new_groups + """Create Dynatrace User Groups for Applications""" + role_types = user_variables.USER_GROUPS['role_types'] + role_tenants = user_variables.USER_GROUPS['role_tenants'] + + all_new_groups = {} + for current_tenant in role_tenants: + all_new_groups[current_tenant] = {} + for current_type_key, current_type_value in role_types.items(): + group_id = generate_group_name( + user_variables.USER_GROUP_TEMPLATE, current_type_value, current_tenant, app_name) + current_group = { + "isClusterAdminGroup": False, + "name": group_id, + "ldapGroupNames": [ + group_id, + ], + "accessRight": {} + } + + response = rh.make_api_call( + cluster=cluster, + endpoint=rh.ClusterAPIs.GROUPS, + method=rh.HTTP.POST, + json=current_group + ) + all_new_groups[current_tenant][current_type_key] = ( + (response.json())['id']) + return all_new_groups + def create_app_groups_setwide(app_name): - """Create Dynatrace User Groups for Applications""" - for cluster in user_variables.FULL_SET.values(): - if cluster['is_managed']: - create_app_groups(cluster, app_name) - -def delete_app_groups (cluster, app_name): - role_types = user_variables.USER_GROUPS['role_types'] - role_tenants = user_variables.USER_GROUPS['role_tenants'] - - for current_tenant in role_tenants: - for current_type_value in role_types: - group_id = generate_group_name(user_variables.USER_GROUP_TEMPLATE, current_type_value, current_tenant, app_name) - group_id = ''.join(e for e in group_id if e.isalnum()) - rh.cluster_delete( - cluster, - "groups/" + group_id - ) + """Create Dynatrace User Groups for Applications""" + for cluster in user_variables.FULL_SET.values(): + if cluster['is_managed']: + create_app_groups(cluster, app_name) + + +def delete_app_groups(cluster, app_name): + role_types = user_variables.USER_GROUPS['role_types'] + role_tenants = user_variables.USER_GROUPS['role_tenants'] + + for current_tenant in role_tenants: + for current_type_value in role_types: + group_id = generate_group_name( + user_variables.USER_GROUP_TEMPLATE, current_type_value, current_tenant, app_name) + group_id = ''.join(e for e in group_id if e.isalnum()) + rh.make_api_call( + cluster=cluster, + method=rh.HTTP.DELETE, + endpoint=f"{rh.ClusterAPIs.GROUPS}/{group_id}" + ) + def delete_app_groups_setwide(app_name): - """Create Dynatrace User Groups for Applications""" - for cluster in user_variables.FULL_SET.values(): - if cluster['is_managed']: - delete_app_groups(cluster, app_name) + """Create Dynatrace User Groups for Applications""" + for cluster in user_variables.FULL_SET.values(): + if cluster['is_managed']: + delete_app_groups(cluster, app_name) + def create_app_clusterwide(cluster, app_name, zones=None): - """Create App User Groups and Management Zones""" - # Create Standard App MZs - mz_list = {} - for tenant_key in cluster['tenant'].keys(): - mzh.add_management_zone( - cluster, - tenant_key, - str.upper(app_name) - ) - if tenant_key in zones: - mz_list[tenant_key] = [] - for zone in zones[tenant_key]: - mz_id = mzh.add_management_zone( + """Create App User Groups and Management Zones""" + # Create Standard App MZs + mz_list = {} + for tenant_key in cluster['tenant'].keys(): + mzh.add_management_zone( cluster, tenant_key, - str.upper(app_name), - zone + str.upper(app_name) ) - if mz_id is not None: - mz_list[tenant_key].append(mz_id) + if tenant_key in zones: + mz_list[tenant_key] = [] + for zone in zones[tenant_key]: + mz_id = mzh.add_management_zone( + cluster, + tenant_key, + str.upper(app_name), + zone + ) + if mz_id is not None: + mz_list[tenant_key].append(mz_id) - # Create User Groups - user_groups = create_app_groups(cluster, app_name) - print(user_groups) + # Create User Groups + user_groups = create_app_groups(cluster, app_name) + print(user_groups) - # for tenant in user_variables.USER_GROUPS['role_tenants']: - # if "access_env" in user_groups [tenant]: - # add_mz_to_user \ No newline at end of file + # for tenant in user_variables.USER_GROUPS['role_tenants']: + # if "access_env" in user_groups [tenant]: + # add_mz_to_user diff --git a/dynatrace/cluster/users.py b/dynatrace/cluster/users.py index 65ceda2..6922136 100644 --- a/dynatrace/cluster/users.py +++ b/dynatrace/cluster/users.py @@ -3,49 +3,70 @@ # TODO add check for is_managed + def check_is_managed(cluster, ignore_saas): - """Checks if the cluster is Managed""" - if not cluster['is_managed'] and not ignore_saas: - raise Exception ('Cannot run operation on SaaS instances!') - return cluster['is_managed'] + """Checks if the cluster is Managed""" + if not cluster['is_managed'] and not ignore_saas: + raise Exception('Cannot run operation on SaaS instances!') + return cluster['is_managed'] + def get_users(cluster, ignore_saas=True): - """Get the list of Users on the Cluster""" - check_is_managed(cluster, ignore_saas) - response = rh.cluster_get(cluster, "users") - return response.json() + """Get the list of Users on the Cluster""" + check_is_managed(cluster, ignore_saas) + response = rh.make_api_call(cluster=cluster, + endpoint=rh.ClusterAPIs.USERS) + return response.json() + def add_user(cluster, user_json, ignore_saas=True): - """Add User to Cluster""" - check_is_managed(cluster, ignore_saas) - rh.cluster_post(cluster, "/users", json=user_json) - return 'OK' + """Add User to Cluster""" + check_is_managed(cluster, ignore_saas) + rh.make_api_call(cluster=cluster, + endpoint=rh.ClusterAPIs.USERS, + method=rh.HTTP.POST, + json=user_json) + return 'OK' + def update_user(cluster, user_json, ignore_saas=True): - """Update User to Cluster""" - check_is_managed(cluster, ignore_saas) - rh.cluster_put(cluster, "/users", json=user_json) - return 'OK' + """Update User to Cluster""" + check_is_managed(cluster, ignore_saas) + rh.make_api_call(cluster=cluster, + endpoint=rh.ClusterAPIs.USERS, + method=rh.HTTP.PUT, + json=user_json) + return 'OK' + def get_user(cluster, user_id, ignore_saas=True): - """Get Details for a Single User""" - check_is_managed(cluster, ignore_saas) - response = rh.cluster_get(cluster, "users/" + user_id) - return response.json() + """Get Details for a Single User""" + check_is_managed(cluster, ignore_saas) + response = rh.make_api_call(cluster=cluster, + endpoint=f"{rh.ClusterAPIs.USERS}/{user_id}") + return response.json() + def delete_user(cluster, user_id, ignore_saas=True): - """Delete a Single User""" - check_is_managed(cluster, ignore_saas) - response = rh.cluster_delete(cluster, "users/" + user_id) - return response.json() + """Delete a Single User""" + check_is_managed(cluster, ignore_saas) + response = rh.cluster_delete(cluster=cluster, + method=rh.HTTP.DELETE, + endpoint=f"{rh.ClusterAPIs.USERS}/{user_id}") + return response.json() + def get_user_count(cluster, ignore_saas=True): - """Return the number of, users in a cluster""" - check_is_managed(cluster, ignore_saas) - return len(get_users(cluster)) + """Return the number of, users in a cluster""" + check_is_managed(cluster, ignore_saas) + return len(get_users(cluster)) + def add_user_bulk(cluster, user_json, ignore_saas=True): - """Add Multiple Users""" - check_is_managed(cluster, ignore_saas) - rh.cluster_put(cluster, "/users/bulk", json=user_json) - return 'OK' + """Add Multiple Users""" + check_is_managed(cluster, ignore_saas) + rh.make_api_call(cluster=cluster, + method=rh.HTTP.POST, + endpoint=f"{rh.ClusterAPIs.USERS}/bulk", + json=user_json) + return 'OK' diff --git a/dynatrace/requests/request_handler.py b/dynatrace/requests/request_handler.py index d411f63..7cf9a21 100644 --- a/dynatrace/requests/request_handler.py +++ b/dynatrace/requests/request_handler.py @@ -1,300 +1,156 @@ """Make API Request to available Dynatrace API""" -import warnings -import contextlib import requests -from urllib3.exceptions import InsecureRequestWarning +import time +from enum import Enum, auto + +requests.packages.urllib3.disable_warnings() HTTPS_STR = "https://" -CLUSTER_V1_PATH = "/api/v1.0/onpremise/" -ENV_API_V1 = "/api/v1/" -CONFIG_API_V1 = "/api/config/v1/" -OLD_MERGE_ENVIRONMENT_SETTINGS = requests.Session.merge_environment_settings -@contextlib.contextmanager -def no_ssl_verification(): - """Silence Request Warning for Unchecked SSL""" - opened_adapters = set() +class ClusterAPIs(Enum): + """ + Enum representing Dynatrace Cluster REST API endpoints.\n + Use these values when adding the 'endpoint' argument. + """ + BASE = "/api/v1.0/onpremise" + CLUSTER = f"{BASE}/cluster" + CONFIG = f"{CLUSTER}/configuration" + CONFIG_STATUS = f"{CONFIG}/status" + SSL = f"{BASE}/sslCertificate" + SSL_STORE = f"{SSL}/store" + SSO = "" # Need to confirm endpoint + GROUPS = f"{BASE}/groups" + USERS = f"{BASE}/users" + + def __str__(self): + return self.value + + +class TenantAPIs(Enum): + """ + Enum representing Dynatrace Tenant REST API endpoints.\n + Use these values when adding the 'endpoint' argument. + """ + PROBLEM_DETAILS = "/api/v1/problem/details" + PROBLEM_FEED = "/api/v1/problem/feed" + PROBLEM_STATUS = "/api/v1/problem/status" + DEPLOY_ONEAGENT = "/api/v1/deployment/installer/agent" + DEPLOY_ONEAGENT_CONNECTION_INFO = "/api/v1/deployment/installer/agent/connectioninfo" + DEPLOY_ONEAGENT_CONNECTION_ENDPOINTS = "/api/v1/deployment/installer/agent/connectioninfo/endpoints" + DEPLOY_ACTIVEGATE = "/api/v1/deployment/installer/gateway" + DEPLOY_BOSH = "/api/v1/deployment/boshrelease" + EVENTS = "/api/v1/events" + USER_SESSIONS = "/api/v1/userSessionQueryLanguage" + TOKENS = "/api/v1/tokens" + SYNTHETIC_MONITORS = "/api/v1/synthetic/monitors" + SYNTHETIC_LOCATIONS = "/api/v1/synthetic/locations" + SYNTHETIC_NODES = "/api/v1/synthetic/nodes" + ENTITIES = "/api/v2/entities" + METRICS = "/api/v2/metrics" + TAGS = "/api/v2/tags" + NETWORK_ZONES = "/api/v2/networkZones" + MANAGEMENT_ZONES = "/api/config/v1/managementZones" + V1_TOPOLOGY = "/api/v1/entity" + MAINTENANCE_WINDOWS = "/api/config/v1/maintenanceWindows" + ONEAGENTS = "/api/v1/oneagents" + EXTENSIONS = "/api/config/v1/extensions" + REQUEST_ATTRIBUTES = "/api/config/v1/service/requestAttributes/" + REQUEST_NAMING = "/api/config/v1/service/requestNaming" + + def __str__(self): + return self.value + + +class HTTP(Enum): + ''' + Enum representing HTTP request methods.\n + Use these values when adding the 'method' argument. + ''' + GET = auto() + PUT = auto() + POST = auto() + DELETE = auto() + + def __str__(self): + return self.name + + def __repr__(self): + return self.name + + +def make_api_call(cluster, endpoint, tenant=None, params=None, json=None, method=HTTP.GET): + # Set the right URL for the operation + url = f"{generate_tenant_url(cluster, tenant)}{endpoint}" if tenant else cluster['url'] + + if not params: + params = {} + + # Get correct token for the operation + if 'onpremise' in str(endpoint) or 'cluster' in str(endpoint): + params['Api-Token'] = cluster['cluster_token'] + else: + params['Api-Token'] = cluster['api_token'][tenant] + + # Loop to retry in case of rate limits + while True: + if method == HTTP.GET: + response = requests.get(url=url, + params=params, + verify=cluster.get('verify_ssl')) + elif method == HTTP.PUT: + response = requests.put(url=url, + params=params, + verify=cluster.get('verify_ssl'), + json=json) + elif method == HTTP.POST: + response = requests.post(url=url, + params=params, + verify=cluster.get('verify_ssl'), + json=json) + elif method == HTTP.DELETE: + response = requests.delete(url=url, + params=params, + verify=cluster.get('verify_ssl')) + if check_response(response): + break - def merge_environment_settings(self, url, proxies, stream, verify, cert): - # Verification happens only once per connection so we need to close - # all the opened adapters once we're done. Otherwise, the effects of - # verify=False persist beyond the end of this context manager. - opened_adapters.add(self.get_adapter(url)) + return response - settings = OLD_MERGE_ENVIRONMENT_SETTINGS(self, url, proxies, stream, verify, cert) - return settings +def check_response(response): + """Checks if the Reponse has a Successful Status Code""" + headers = response.headers - requests.Session.merge_environment_settings = merge_environment_settings + if response.status_code == 429: + print("Endpoint request limit of " + f"{headers['x-ratelimit-limit']} was reached!") + # Wait until the limit resets and try again + time_to_wait = int(headers['x-ratelimit-reset'])/1000000 - time.time() - try: - with warnings.catch_warnings(): - warnings.simplefilter('ignore', InsecureRequestWarning) - yield - finally: - requests.Session.merge_environment_settings = OLD_MERGE_ENVIRONMENT_SETTINGS + # Check that there's actually time to wait + if time_to_wait > 0: + print(f"Waiting {time_to_wait} sec until the limit resets.") + time.sleep(float(time_to_wait)) + return False + elif not 200 <= response.status_code <= 299: + raise Exception(f"Response Error\n{response.url}\n" + f"{response.status_code}\n{response.text}") - for adapter in opened_adapters: - try: - adapter.close() - except Exception: - pass + return True -def check_response(response): - """Checks if the Reponse has a Successful Status Code""" - if not 200 <= response.status_code <= 299: - raise Exception( - "Response Error\n" + response.url + "\n" + str(response.status_code) + "\n" + response.text - ) def check_managed(managed_bool): - """Checks if the Cluster Operation is valid (Managed) for the current cluster""" - if not managed_bool: - raise Exception("Cluster Operations not supported for SaaS!") + """Checks if the Cluster Operation is valid (Managed) for the current cluster""" + if not managed_bool: + raise Exception("Cluster Operations not supported for SaaS!") -def sanitize_endpoint (endpoint): - if endpoint[0] == '/': - endpoint = endpoint [1:] - return endpoint def generate_tenant_url(cluster, tenant): - """Generate URL based on SaaS or Managed""" - url = HTTPS_STR - if cluster["is_managed"]: - url = url + cluster['url'] + "/e/" + cluster['tenant'][tenant] - else: - url = url + cluster['tenant'][tenant] + "." + cluster['url'] - return url - -def cluster_get(cluster, endpoint, params=None): - """Get Request to Cluster API""" - check_managed(cluster["is_managed"]) - - if not params: - params = {} - - endpoint = sanitize_endpoint(endpoint) - - with no_ssl_verification(): - params['Api-Token'] = cluster['cluster_token'] - - response = requests.get( - HTTPS_STR + cluster['url'] + CLUSTER_V1_PATH + endpoint, - params=params, - verify=(True if "verify_ssl" not in cluster else cluster ["verify_ssl"]) - ) - check_response(response) - return response - - -def cluster_post(cluster, endpoint, params=None, json=None): - """Post Request to Cluster API""" - check_managed(cluster["is_managed"]) - - if not params: - params = {} - - endpoint = sanitize_endpoint(endpoint) - - with no_ssl_verification(): - params['Api-Token'] = cluster['cluster_token'] - - response = requests.post( - HTTPS_STR + cluster['url'] + CLUSTER_V1_PATH + endpoint, - params=params, - json=json, - verify=(True if "verify_ssl" not in cluster else cluster ["verify_ssl"]) - ) - check_response(response) - return response - -def cluster_put(cluster, endpoint, params=None, json=None): - """Post Request to Cluster API""" - check_managed(cluster["is_managed"]) - - if not params: - params = {} - - endpoint = sanitize_endpoint(endpoint) - - with no_ssl_verification(): - params['Api-Token'] = cluster['cluster_token'] - - response = requests.put( - HTTPS_STR + cluster['url'] + CLUSTER_V1_PATH + endpoint, - params=params, - json=json, - verify=(True if "verify_ssl" not in cluster else cluster ["verify_ssl"]) - ) - check_response(response) - return response - -def cluster_delete(cluster, endpoint, params=None, json=None): - """Delete Request to Cluster API""" - check_managed(cluster["is_managed"]) - - if not params: - params = {} - - endpoint = sanitize_endpoint(endpoint) - - with no_ssl_verification(): - params['Api-Token'] = cluster['cluster_token'] - response = requests.delete( - HTTPS_STR + cluster['url'] + CLUSTER_V1_PATH + endpoint, - params=params, - json=json, - verify=(True if "verify_ssl" not in cluster else cluster ["verify_ssl"]) - ) - check_response(response) - return response - -def env_get(cluster, tenant, endpoint, params=None): - """Get Request to Tenant Environment API""" - if not params: - params = {} - - endpoint = sanitize_endpoint(endpoint) - - with no_ssl_verification(): - params['Api-Token'] = cluster['api_token'][tenant] - response = requests.get( - generate_tenant_url(cluster, tenant) + ENV_API_V1 + endpoint, - params=params, - verify=(True if "verify_ssl" not in cluster else cluster ["verify_ssl"]) - ) - check_response(response) - return response - -def env_post(cluster, tenant, endpoint, params=None, json=None): - """Post Request to Tenant Environment API""" - if not params: - params = {} - - endpoint = sanitize_endpoint(endpoint) - - with no_ssl_verification(): - params['Api-Token'] = cluster['api_token'][tenant] - - response = requests.post( - generate_tenant_url(cluster, tenant) + ENV_API_V1 + endpoint, - params=params, - verify=(True if "verify_ssl" not in cluster else cluster ["verify_ssl"]), - json=json - ) - check_response(response) - return response - -def env_put(cluster, tenant, endpoint, params=None, json=None): - """Post Request to Tenant Environment API""" - if not params: - params = {} - - endpoint = sanitize_endpoint(endpoint) - - with no_ssl_verification(): - params['Api-Token'] = cluster['api_token'][tenant] - - response = requests.put( - generate_tenant_url(cluster, tenant) + ENV_API_V1 + endpoint, - params=params, - verify=(True if "verify_ssl" not in cluster else cluster ["verify_ssl"]), - json=json - ) - check_response(response) - return response - -def env_delete(cluster, tenant, endpoint, params=None): - """Get Request to Tenant Environment API""" - if not params: - params = {} - - endpoint = sanitize_endpoint(endpoint) - - with no_ssl_verification(): - params['Api-Token'] = cluster['api_token'][tenant] - response = requests.delete( - generate_tenant_url(cluster, tenant) + ENV_API_V1 + endpoint, - params=params, - verify=(True if "verify_ssl" not in cluster else cluster ["verify_ssl"]) - ) - check_response(response) - return response - - -def config_get(cluster, tenant, endpoint, params=None, json=None): - """Get Request to Tenant Configuration API""" - if not params: - params = {} - - endpoint = sanitize_endpoint(endpoint) - - with no_ssl_verification(): - params['Api-Token'] = cluster['api_token'][tenant] - - response = requests.get( - generate_tenant_url(cluster, tenant) + CONFIG_API_V1 + endpoint, - params=params, - verify=(True if "verify_ssl" not in cluster else cluster ["verify_ssl"]), - json=json - ) - check_response(response) - return response - -def config_post(cluster, tenant, endpoint, params=None, json=None): - """Post Request to Tenant Configuration API""" - if not params: - params = {} - - endpoint = sanitize_endpoint(endpoint) - - with no_ssl_verification(): - params['Api-Token'] = cluster['api_token'][tenant] - - response = requests.post( - generate_tenant_url(cluster, tenant) + CONFIG_API_V1 + endpoint, - params=params, - verify=(True if "verify_ssl" not in cluster else cluster ["verify_ssl"]), - json=json - ) - check_response(response) - return response - -def config_put(cluster, tenant, endpoint, params=None, json=None): - """Put Request to Tenant Configuration API""" - if not params: - params = {} - - endpoint = sanitize_endpoint(endpoint) - - with no_ssl_verification(): - params['Api-Token'] = cluster['api_token'][tenant] - - response = requests.put( - generate_tenant_url(cluster, tenant) + CONFIG_API_V1 + endpoint, - params=params, - verify=(True if "verify_ssl" not in cluster else cluster ["verify_ssl"]), - json=json - ) - check_response(response) - return response - -def config_delete(cluster, tenant, endpoint, params=None, json=None): - """Delete Request to Tenant Configuration API""" - if not params: - params = {} - - endpoint = sanitize_endpoint(endpoint) - - with no_ssl_verification(): - params['Api-Token'] = cluster['api_token'][tenant] - - response = requests.delete( - generate_tenant_url(cluster, tenant) + CONFIG_API_V1 + endpoint, - params=params, - verify=(True if "verify_ssl" not in cluster else cluster ["verify_ssl"]), - json=json - ) - check_response(response) - return response + """Generate URL based on SaaS or Managed""" + url = HTTPS_STR + if cluster["is_managed"]: + url += cluster['url'] + "/e/" + cluster['tenant'][tenant] + else: + url += cluster['tenant'][tenant] + "." + cluster['url'] + return url diff --git a/dynatrace/tenant/extensions.py b/dynatrace/tenant/extensions.py new file mode 100644 index 0000000..a5967e8 --- /dev/null +++ b/dynatrace/tenant/extensions.py @@ -0,0 +1,34 @@ +from dynatrace.requests import request_handler as rh + +ENDPOINT = rh.TenantAPIs.EXTENSIONS + + +def get_all_extensions(cluster, tenant, params=None): + """ Gets the list of all extensions available""" + # TODO: Add pagination + + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=ENDPOINT, + params=params) + return response.json().get('extensions') + + +def get_extension_details(cluster, tenant, extension_id): + """ Get the details of a specific extension""" + + response = rh.make_api_call(cluster=cluster, + endpoint=f"{ENDPOINT}/{extension_id}", + tenant=tenant) + return response.json() + + +def get_extension_states(cluster, tenant, extension_id, params=None): + """ Gets all the deployment states of a specific extension""" + # TODO: Add pagination + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=f"{ENDPOINT}/{extension_id}/states", + params=params) + + return response.json().get('states') diff --git a/dynatrace/tenant/host_groups.py b/dynatrace/tenant/host_groups.py index b927775..8416817 100644 --- a/dynatrace/tenant/host_groups.py +++ b/dynatrace/tenant/host_groups.py @@ -1,7 +1,5 @@ """Host Group Information for Tenant""" -import user_variables from dynatrace.topology import hosts as topology_hosts -from dynatrace.requests import request_handler as rh # TODO redo export function (break out to export function?) # def export_host_groups_setwide(full_set): @@ -12,27 +10,35 @@ # outFile.write(groupName+"\n") # print(envName + " writing to 'HostGroups - " + envName + ".txt'") + def get_host_groups_tenantwide(cluster, tenant): - params = { - 'relativeTime':'day', - 'includeDetails':'true' - } - response = topology_hosts.get_hosts_tenantwide(cluster, tenant, params=params) - host_groups = {} - for host in response: - host_groups[host['hostGroup']['meId']] = host['hostGroup']['name'] - return host_groups - -def get_host_groups_clusterwide (cluster): - # TODO add split_by_tenant optional variable - host_groups_custerwide = {} - for tenant in cluster['tenant']: - host_groups_custerwide.update(get_host_groups_tenantwide(cluster, tenant)) - return host_groups_custerwide - -def get_host_groups_setwide (full_set): - # TODO add split_by_tenant optional variable - host_groups_setwide = {} - for cluster in full_set.values(): - host_groups_setwide.update(get_host_groups_clusterwide(cluster)) - return host_groups_setwide \ No newline at end of file + params = { + 'relativeTime': 'day', + 'includeDetails': 'true' + } + response = topology_hosts.get_hosts_tenantwide(cluster, + tenant, + params=params) + host_groups = {} + for host in response: + if host.get('hostGroup'): + host_groups[host['hostGroup']['meId']] = host['hostGroup']['name'] + return host_groups + + +def get_host_groups_clusterwide(cluster): + # TODO add split_by_tenant optional variable + host_groups_custerwide = {} + for tenant in cluster['tenant']: + host_groups_custerwide.update( + get_host_groups_tenantwide(cluster, tenant) + ) + return host_groups_custerwide + + +def get_host_groups_setwide(full_set): + # TODO add split_by_tenant optional variable + host_groups_setwide = {} + for cluster in full_set.values(): + host_groups_setwide.update(get_host_groups_clusterwide(cluster)) + return host_groups_setwide diff --git a/dynatrace/tenant/maintenance.py b/dynatrace/tenant/maintenance.py index a49dc27..0e90691 100644 --- a/dynatrace/tenant/maintenance.py +++ b/dynatrace/tenant/maintenance.py @@ -5,158 +5,185 @@ import user_variables as uv -MZ_ENDPOINT = "/maintenanceWindows/" +MZ_ENDPOINT = rh.TenantAPIs.MAINTENANCE_WINDOWS + class InvalidDateFormatException(ValueError): - def __init__(self, required_format): - self.required_format = required_format - print ("Incorrect Date for following entry: %s", required_format) + def __init__(self, required_format): + self.required_format = required_format + print("Incorrect Date for following entry: %s", required_format) -class InvalidScopeException(ValueError): - def __init__(self, required_format): - self.required_format = required_format - print ("Invalid scope used. Tag required for management zone, matching rule: %s", required_format) -def validate_datetime(datetime_text, required_format): - try: - datetime.datetime.strptime(datetime_text, required_format) - except ValueError as e: - raise InvalidDateFormatException(required_format) +class InvalidScopeException(ValueError): + def __init__(self, required_format): + self.required_format = required_format + print("Invalid scope used. Tag required for management zone, matching rule: %s", required_format) -def generate_scope(entities=None, filter_type=None, management_zone_id=None, tags=None, matches_any_tag=False): - if entities is None: - entities = [] - matches = [] - matches_payload = {} - if isinstance (filter_type, str): - matches_payload['type'] = filter_type - if management_zone_id: - matches_payload['managementZoneId'] = management_zone_id - if isinstance(tags, list): - matches_payload['tags'] = tags - - matches.append(matches_payload) - - scope = { - 'entities': entities, - 'matches': matches - } - return scope -def generate_window_json(name, description, suppression, schedule, scope=None, is_planned=False,): - """Generate JSON information needed for creating Maintenance Window""" - window_json = { - "name": name, - "description": description, - "suppression": suppression, - "schedule": schedule - } - window_json ['type'] = "PLANNED" if is_planned else "UNPLANNED" - if scope is not None: - window_json['scope'] = scope - return window_json - -def generate_schedule(recurrence_type, start_time, duration, range_start, range_end, day=None, zoneId=None,): - """Create schedule structure for maintenance window""" - # This structure requires a lot of input validation - types_available = [ "DAILY", "MONTHLY", "ONCE", "WEEKLY" ] - days_of_week = [ "FRIDAY", "MONDAY", "SATURDAY", "SUNDAY", "THURSDAY", "TUESDAY", "WEDNESDAY" ] - - recurrence_type = str(recurrence_type).upper() - - # Check Recurrence - if recurrence_type not in types_available: - raise Exception ("Invalid Recurrence Type! Allowed values are: ONCE, DAILY, WEEKLY, MONTHLY") - - # Check ranges - validate_datetime(range_start, "%Y-%m-%d %H:%M") - validate_datetime(range_end, "%Y-%m-%d %H:%M") - - schedule = { - "recurrenceType": recurrence_type, - "start": range_start, - "end": range_end - } - - if zoneId is None: - schedule['zoneId'] = uv.DEFAULT_TIMEZONE - - if recurrence_type != "ONCE": - # Check Start Time - validate_datetime(start_time, "%H:%M") - - # Check Duration +def validate_datetime(datetime_text, required_format): try: - int(duration) + datetime.datetime.strptime(datetime_text, required_format) except ValueError: - ("Duration time must be an integer! Duration is length of Maintainence Window in minutes") + raise InvalidDateFormatException(required_format) - schedule['recurrence'] = { - "startTime": start_time, - "durationMinutes": duration + +def generate_scope(entities=None, filter_type=None, management_zone_id=None, tags=None, matches_any_tag=False): + if entities is None: + entities = [] + matches = [] + matches_payload = {} + if isinstance(filter_type, str): + matches_payload['type'] = filter_type + if management_zone_id: + matches_payload['managementZoneId'] = management_zone_id + if isinstance(tags, list): + matches_payload['tags'] = tags + + matches.append(matches_payload) + + scope = { + 'entities': entities, + 'matches': matches } + return scope - # Check Weekly Day - if recurrence_type == "WEEKLY": - day = str(day).upper() - if day in days_of_week: - schedule['recurrence']['dayOfWeek'] = day - else: - raise Exception ("Invalid Weekly Day! Allowed values are " \ - + "SUNDAY, MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY") - # Check Monthly Day - if recurrence_type == "MONTHLY": - if (1 <= int(day) <= 31): - schedule['recurrence']['dayOfMonth'] = day - else: - raise Exception ("Invalid Monthly Day! Allowed values are 1-31") +def generate_window_json(name, description, suppression, schedule, scope=None, is_planned=False,): + """Generate JSON information needed for creating Maintenance Window""" + window_json = { + "name": name, + "description": description, + "suppression": suppression, + "schedule": schedule + } + window_json['type'] = "PLANNED" if is_planned else "UNPLANNED" + if scope is not None: + window_json['scope'] = scope + return window_json + - return schedule +def generate_schedule(recurrence_type, start_time, duration, range_start, range_end, day=None, zoneId=None,): + """Create schedule structure for maintenance window""" + # This structure requires a lot of input validation + types_available = ["DAILY", "MONTHLY", "ONCE", "WEEKLY"] + days_of_week = ["FRIDAY", "MONDAY", "SATURDAY", + "SUNDAY", "THURSDAY", "TUESDAY", "WEDNESDAY"] + + recurrence_type = str(recurrence_type).upper() + + # Check Recurrence + if recurrence_type not in types_available: + raise Exception( + "Invalid Recurrence Type! Allowed values are: ONCE, DAILY, WEEKLY, MONTHLY") + + # Check ranges + validate_datetime(range_start, "%Y-%m-%d %H:%M") + validate_datetime(range_end, "%Y-%m-%d %H:%M") + + schedule = { + "recurrenceType": recurrence_type, + "start": range_start, + "end": range_end + } -def create_window (cluster, tenant, json): - """Create Maintenance Window""" - response = rh.config_post(cluster, tenant, MZ_ENDPOINT, json=json) - return response.status_code + if zoneId is None: + schedule['zoneId'] = uv.DEFAULT_TIMEZONE + + if recurrence_type != "ONCE": + # Check Start Time + validate_datetime(start_time, "%H:%M") + + # Check Duration + try: + int(duration) + except ValueError: + ("Duration time must be an integer! Duration is length of Maintainence Window in minutes") + + schedule['recurrence'] = { + "startTime": start_time, + "durationMinutes": duration + } + + # Check Weekly Day + if recurrence_type == "WEEKLY": + day = str(day).upper() + if day in days_of_week: + schedule['recurrence']['dayOfWeek'] = day + else: + raise Exception("Invalid Weekly Day! Allowed values are " + + "SUNDAY, MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY") + + # Check Monthly Day + if recurrence_type == "MONTHLY": + if (1 <= int(day) <= 31): + schedule['recurrence']['dayOfMonth'] = day + else: + raise Exception("Invalid Monthly Day! Allowed values are 1-31") + + return schedule + + +def create_window(cluster, tenant, json): + """Create Maintenance Window""" + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + method=rh.HTTP.POST, + endpoint=MZ_ENDPOINT, + json=json) + return response.status_code + + +def update_window(cluster, tenant, window_id, json): + """Update Maintenance Window""" + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + method=rh.HTTP.PUT, + endpoint=f"{MZ_ENDPOINT}/{window_id}", + json=json) + return response.status_code + + +def delete_window(cluster, tenant, window_id): + """Delete Maintenance Window""" + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + method=rh.HTTP.DELETE, + endpoint=f"{MZ_ENDPOINT}/{window_id}") + return response.status_code + + +def get_windows(cluster, tenant): + """Return List of Maintenance Windows in Effect""" + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=MZ_ENDPOINT) + return response.json() + + +def get_window(cluster, tenant, window_id): + """Return Maintenance Window Details""" + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=f"{MZ_ENDPOINT}/{window_id}") + return response.json() -def update_window (cluster, tenant, window_id, json): - """Update Maintenance Window""" - response = rh.config_put(cluster, tenant, MZ_ENDPOINT + window_id, json=json) - return response.status_code -def delete_window (cluster, tenant, window_id): - """Delete Maintenance Window""" - response = rh.config_delete(cluster, tenant, MZ_ENDPOINT + window_id) - return response.status_code - -def get_windows (cluster, tenant): - """Return List of Maintenance Windows in Effect""" - response = rh.config_get(cluster, tenant, MZ_ENDPOINT) - return response.json() +def parse_tag(tag_string): + # Need a way to process literal colon inside a key + "Parsing Tag to to Context, Key and Value" + m = re.match( + r"(?:\[(\w+)\])?([\w\-\/`\+\.\!\@\#\$\%\^\&\*\(\)\?\[\]\{\}\,\<\>\ \:\;]+)(?:\:(\w*))?", + tag_string + ) + tag_dictionary = {} + if m.group(1): + tag_dictionary['context'] = m.group(1) + else: + tag_dictionary['context'] = "CONTEXTLESS" + tag_dictionary['key'] = m.group(2) # Key is always required -def get_window (cluster, tenant, window_id): - """Return Maintenance Window Details""" - response = rh.config_get(cluster, tenant, MZ_ENDPOINT + window_id) - return response.json() + if m.group(3): + tag_dictionary['value'] = m.group(3) -def parse_tag(tag_string): - # Need a way to process literal colon inside a key - "Parsing Tag to to Context, Key and Value" - m = re.match( - r"(?:\[(\w+)\])?([\w\-\/`\+\.\!\@\#\$\%\^\&\*\(\)\?\[\]\{\}\,\<\>\ \:\;]+)(?:\:(\w*))?", - tag_string - ) - tag_dictionary = {} - if m.group(1): - tag_dictionary['context'] = m.group(1) - else: - tag_dictionary['context'] = "CONTEXTLESS" - - tag_dictionary['key'] = m.group(2) # Key is always required - - if m.group(3): - tag_dictionary['value'] = m.group(3) - - return tag_dictionary - \ No newline at end of file + return tag_dictionary diff --git a/dynatrace/tenant/management_zones.py b/dynatrace/tenant/management_zones.py index e0750aa..192c587 100644 --- a/dynatrace/tenant/management_zones.py +++ b/dynatrace/tenant/management_zones.py @@ -5,107 +5,119 @@ import json from dynatrace.requests import request_handler as rh -def generate_mz_payload(application, env_zone=None): - """Create Payload for Management Zone based on Application and Environment""" - with open('../templates/mz_template.json', 'r') as mz_template: - mz_payload = json.load(mz_template) - - mz_payload['name'] = str(application) - # The Template will have - # Service Rules(0), Process Group Rules(1), Application Rules(2), - # Browser Monitors(3), HTTP Monitor(4), External Monitors(5), Manually Tagged Services (6), - # Manually Tagged Process Groups (7), Mobile Application (8), Custom Device Groups (9), - # Service and Process Groups are different because they allow Key/Value Pairs - - # TODO Consolidate by checking if Key/Value Pair exists - mz_payload['rules'][0]['conditions'][0]['comparisonInfo']['value']['value'] = str(application) - mz_payload['rules'][1]['conditions'][0]['comparisonInfo']['value']['value'] = str(application) +ENDPOINT = rh.TenantAPIs.MANAGEMENT_ZONES - for rule_num in range(2, 10): - mz_payload['rules'][rule_num]['conditions'][0]['comparisonInfo']['value']['key'] = "APP: " + str(application) - - if env_zone: - # If environment exists, rename MZ and add environment conditions - mz_payload['name'] = str(application) + " - " + str(env_zone) +def generate_mz_payload(application, env_zone=None): + """Create Payload for Management Zone based on Application and Environment""" + with open('../templates/mz_template.json', 'r') as mz_template: + mz_payload = json.load(mz_template) + + mz_payload['name'] = str(application) + # The Template will have + # Service Rules(0), Process Group Rules(1), Application Rules(2), + # Browser Monitors(3), HTTP Monitor(4), External Monitors(5), Manually Tagged Services (6), + # Manually Tagged Process Groups (7), Mobile Application (8), Custom Device Groups (9), # Service and Process Groups are different because they allow Key/Value Pairs - condition_payload = copy.deepcopy(mz_payload['rules'][0]['conditions'][0]) - condition_payload['comparisonInfo']['value']['key'] = "ENV" - condition_payload['comparisonInfo']['value']['value'] = str(env_zone) - mz_payload['rules'][0]['conditions'].append(condition_payload) - - del condition_payload - condition_payload = copy.deepcopy(mz_payload['rules'][1]['conditions'][0]) - condition_payload['comparisonInfo']['value']['key'] = "ENV" - condition_payload['comparisonInfo']['value']['value'] = str(env_zone) - mz_payload['rules'][1]['conditions'].append(condition_payload) - # Application, Browser Monitors, HTTP Monitor, External Monitors (in that order) + + # TODO Consolidate by checking if Key/Value Pair exists + mz_payload['rules'][0]['conditions'][0]['comparisonInfo']['value']['value'] = str( + application) + mz_payload['rules'][1]['conditions'][0]['comparisonInfo']['value']['value'] = str( + application) for rule_num in range(2, 10): - del condition_payload - condition_payload = copy.deepcopy(mz_payload['rules'][rule_num]['conditions'][0]) - condition_payload['comparisonInfo']['value']['key'] = "ENV: " + str(env_zone) - mz_payload['rules'][rule_num]['conditions'].append(condition_payload) + mz_payload['rules'][rule_num]['conditions'][0]['comparisonInfo']['value']['key'] = "APP: " + \ + str(application) + + if env_zone: + # If environment exists, rename MZ and add environment conditions + mz_payload['name'] = str(application) + " - " + str(env_zone) + + # Service and Process Groups are different because they allow Key/Value Pairs + condition_payload = copy.deepcopy( + mz_payload['rules'][0]['conditions'][0]) + condition_payload['comparisonInfo']['value']['key'] = "ENV" + condition_payload['comparisonInfo']['value']['value'] = str(env_zone) + mz_payload['rules'][0]['conditions'].append(condition_payload) + + del condition_payload + condition_payload = copy.deepcopy( + mz_payload['rules'][1]['conditions'][0]) + condition_payload['comparisonInfo']['value']['key'] = "ENV" + condition_payload['comparisonInfo']['value']['value'] = str(env_zone) + mz_payload['rules'][1]['conditions'].append(condition_payload) + # Application, Browser Monitors, HTTP Monitor, External Monitors (in that order) + + for rule_num in range(2, 10): + del condition_payload + condition_payload = copy.deepcopy( + mz_payload['rules'][rule_num]['conditions'][0]) + condition_payload['comparisonInfo']['value']['key'] = "ENV: " + \ + str(env_zone) + mz_payload['rules'][rule_num]['conditions'].append( + condition_payload) + + return mz_payload - return mz_payload def add_management_zone(cluster, tenant, application, env_zone=None): - """Add Management Zone based on Application and Environment""" - mz_payload = generate_mz_payload(application, env_zone) - - response = rh.config_post( - cluster, - tenant, - '/managementZones', - json=mz_payload - ) - if "id" in response.json(): - return (response.json())['id'] - else: - return (response.text) + """Add Management Zone based on Application and Environment""" + mz_payload = generate_mz_payload(application, env_zone) + + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + method=rh.HTTP.POST, + endpoint=ENDPOINT, + json=mz_payload) + if "id" in response.json(): + return (response.json())['id'] + else: + return (response.text) + def change_management_zone(cluster, tenant, mz_id, application, env_zone=None): - """Add Management Zone based on Application and Environment""" - mz_payload = generate_mz_payload(application, env_zone) + """Add Management Zone based on Application and Environment""" + mz_payload = generate_mz_payload(application, env_zone) + + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + method=rh.HTTP.PUT, + endpoint=f"{ENDPOINT}/{mz_id}", + json=mz_payload) + print(response.status_code) - response = rh.config_put( - cluster, - tenant, - 'managementZones/' + str(mz_id), - json=mz_payload - ) - print(response.status_code) def delete_management_zone_by_id(cluster, tenant, mz_id): - """Delete Management Zone by Management Zone ID""" - response = rh.config_delete( - cluster, - tenant, - "managementZones/" + str(mz_id), - ) - print(response.status_code) + """Delete Management Zone by Management Zone ID""" + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + method=rh.HTTP.DELETE, + endpoint=f"{ENDPOINT}/{mz_id}") + print(response.status_code) + def delete_management_zone_by_name(cluster, tenant, mz_name): - """Delete Management Zone by Management Zone Name""" - #TODO This function - return "TODO " + cluster + tenant + mz_name + """Delete Management Zone by Management Zone Name""" + # TODO This function + return "TODO " + cluster + tenant + mz_name + def get_management_zone_list(cluster, tenant): - """Get all Management Zones in Environment""" - #TODO Cache Management Zone list for Env, and add a cleanup script to remove after run. - response = rh.config_get( - cluster, - tenant, - "managementZones", - ) - mz_list_raw = response.json() - return mz_list_raw['values'] + """Get all Management Zones in Environment""" + # TODO Cache Management Zone list for Env, and add a cleanup script to remove after run. + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=ENDPOINT) + mz_list_raw = response.json() + return mz_list_raw['values'] + def get_management_zone_id(cluster, tenant, mz_name): - """Get Management Zone ID of Management Zone Name""" - mz_list = get_management_zone_list(cluster, tenant) + """Get Management Zone ID of Management Zone Name""" + mz_list = get_management_zone_list(cluster, tenant) - for m_zone in mz_list: - if m_zone['name'] == mz_name: - return m_zone['id'] - return None + for m_zone in mz_list: + if m_zone['name'] == mz_name: + return m_zone['id'] + return None diff --git a/dynatrace/tenant/request_attributes.py b/dynatrace/tenant/request_attributes.py index 7d4d6bb..d1340ac 100644 --- a/dynatrace/tenant/request_attributes.py +++ b/dynatrace/tenant/request_attributes.py @@ -3,64 +3,73 @@ import json from dynatrace.requests import request_handler as rh -ENDPOINT = "/service/requestAttributes/" +ENDPOINT = rh.TenantAPIs.REQUEST_ATTRIBUTES + def pull_to_files(cluster, tenant, ignore_disabled=True): - """Pull files from an environment to local""" - # API Calls needed: Pull RA, take the ID and pull the details of each RA - all_ra_call = rh.config_get(cluster, tenant, ENDPOINT) - all_ra_json = all_ra_call.json() - all_ra_json = all_ra_json['values'] - #print (json.dumps(all_ra_json, indent=2)) - ra_file_list = [] - for request_attribute in all_ra_json: - single_ra_call = rh.config_get( - cluster, - tenant, - ENDPOINT + str(request_attribute['id']) - ) - if single_ra_call.status_code == 200: - single_ra_json = single_ra_call.json() - if single_ra_json['enabled'] and ignore_disabled: - single_ra_json.pop("metadata") - single_ra_json.pop("id") - ra_file_name = "jsons/request_attributes/" + str(single_ra_json['name']) + ".json" - with open(ra_file_name, 'w') as current_file: - json.dump(single_ra_json, current_file, indent=2) - ra_file_list.append(ra_file_name) - else: - print (single_ra_call.status_code) - return ra_file_list + """Pull files from an environment to local""" + # API Calls needed: Pull RA, take the ID and pull the details of each RA + all_ra_call = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=ENDPOINT) + all_ra_json = all_ra_call.json() + all_ra_json = all_ra_json['values'] + # print (json.dumps(all_ra_json, indent=2)) + ra_file_list = [] + for request_attribute in all_ra_json: + single_ra_call = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=f"{ENDPOINT}/{request_attribute['id']}") + if single_ra_call.status_code == 200: + single_ra_json = single_ra_call.json() + if single_ra_json['enabled'] and ignore_disabled: + single_ra_json.pop("metadata") + single_ra_json.pop("id") + ra_file_name = "jsons/request_attributes/" + \ + str(single_ra_json['name']) + ".json" + with open(ra_file_name, 'w') as current_file: + json.dump(single_ra_json, current_file, indent=2) + ra_file_list.append(ra_file_name) + else: + print(single_ra_call.status_code) + return ra_file_list + def push_from_files(file_list, cluster, tenant): - """Push Request Attributes in JSONs to a tenant""" - - # Checks for Existing RAs to update them put request rather than a post that would fail - existing_ra_get = rh.config_get(cluster, tenant, ENDPOINT) - existing_ra_json = existing_ra_get.json() - existing_ra_json = existing_ra_json['values'] - existing_ra_list = {} - for existing_ra in existing_ra_json: - existing_ra_list["jsons/request_attributes/" + str(existing_ra['name']) + ".json"] = existing_ra['id'] + """Push Request Attributes in JSONs to a tenant""" + + # Checks for Existing RAs to update them put request rather than a post that would fail + existing_ra_get = rh.make_api_call(cluster=cluster, tenant=tenant, endpoint=ENDPOINT) + existing_ra_json = existing_ra_get.json() + existing_ra_json = existing_ra_json['values'] + existing_ra_list = {} + for existing_ra in existing_ra_json: + existing_ra_list["jsons/request_attributes/" + + str(existing_ra['name']) + ".json"] = existing_ra['id'] - for file in file_list: - with open(file, 'r') as ra_file: - ra_json = json.load(ra_file) - if file in existing_ra_list: - single_ra_post = rh.config_put( - cluster, - tenant, - ENDPOINT + existing_ra_list[file], - json=ra_json - ) - else: - single_ra_post = rh.config_post( - cluster, - tenant, - ENDPOINT, - json=ra_json - ) - if single_ra_post.status_code >= 400: - print("Error with " + file + ". Status Code: " + str(single_ra_post.status_code)) - else: - print("Success " + file + " " + single_ra_post.text) + for file in file_list: + with open(file, 'r') as ra_file: + ra_json = json.load(ra_file) + if file in existing_ra_list: + single_ra_post = rh.make_api_call( + cluster=cluster, + tenant=tenant, + method=rh.HTTP.PUT, + endpoint=f"{ENDPOINT}/{existing_ra_list[file]}", + json=ra_json + ) + else: + single_ra_post = rh.make_api_call( + cluster=cluster, + tenant=tenant, + method=rh.HTTP.POST, + endpoint=ENDPOINT, + json=ra_json + ) + if single_ra_post.status_code >= 400: + # NOTE: what about the check response in req handler!? + # That will throw an exception first, which this should except + print("Error with " + file + ". Status Code: " + + str(single_ra_post.status_code)) + else: + print("Success " + file + " " + single_ra_post.text) diff --git a/dynatrace/tenant/request_naming.py b/dynatrace/tenant/request_naming.py index b62f67c..e284524 100644 --- a/dynatrace/tenant/request_naming.py +++ b/dynatrace/tenant/request_naming.py @@ -5,43 +5,48 @@ import json from dynatrace.requests import request_handler as rh +ENDPOINT = rh.TenantAPIs.REQUEST_NAMING + + def pull_to_files(cluster, tenant, ignore_disabled=True): - """Pull Service Naming Rules to Files""" - all_rules_call = rh.config_get(cluster, tenant, "/service/requestNaming") - all_rules_list = all_rules_call.json() - all_rules_list = all_rules_list['values'] - # print (json.dumps(all_rules_list, indent=2)) - - rules_file_list = [] - rule_num = 0 - for naming_rule in all_rules_list: - rule_call = rh.config_get( - cluster, - tenant, - "/service/requestNaming/" + str(naming_rule['id']) - ) - if rule_call.status_code == 200: - rule_json = rule_call.json() - if rule_json['enabled'] and ignore_disabled: - rule_json.pop('metadata') - rule_json.pop('id') - rule_file_name = "jsons/request_naming/" + str(rule_num) + ".json" - with open(rule_file_name, 'w') as current_file: - json.dump(rule_json, current_file, indent=2) - rules_file_list.append(rule_file_name) - else: - print (rule_call.status_code) - rule_num = rule_num + 1 - return rules_file_list + """Pull Service Naming Rules to Files""" + all_rules_call = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=ENDPOINT) + all_rules_list = all_rules_call.json() + all_rules_list = all_rules_list['values'] + # print (json.dumps(all_rules_list, indent=2)) + + rules_file_list = [] + rule_num = 0 + for naming_rule in all_rules_list: + rule_call = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=f"{ENDPOINT}/{naming_rule['id']}") + if rule_call.status_code == 200: + rule_json = rule_call.json() + if rule_json['enabled'] and ignore_disabled: + rule_json.pop('metadata') + rule_json.pop('id') + rule_file_name = f"jsons/request_naming/{rule_num}.json" + with open(rule_file_name, 'w') as current_file: + json.dump(rule_json, current_file, indent=2) + rules_file_list.append(rule_file_name) + else: + print(rule_call.status_code) + rule_num = rule_num + 1 + return rules_file_list + def push_from_files(file_list, cluster, tenant): - """Push Service Naming Rules from Files""" - #TODO add safeties - for file_name in file_list: - print (file_name) - -def generate_file_list (): - file_list = os.listdir("./jsons/request_naming/") - for f in file_list: - print(str.isdigit(f)) - # print(file_list.sort(key=lambda f: filter(str.isdigit, f))) + """Push Service Naming Rules from Files""" + # TODO add safeties + for file_name in file_list: + print(file_name) + + +def generate_file_list(): + file_list = os.listdir("./jsons/request_naming/") + for f in file_list: + print(str.isdigit(f)) + # print(file_list.sort(key=lambda f: filter(str.isdigit, f))) diff --git a/dynatrace/timeseries/metrics.py b/dynatrace/timeseries/metrics.py new file mode 100644 index 0000000..6ed26fc --- /dev/null +++ b/dynatrace/timeseries/metrics.py @@ -0,0 +1,24 @@ +from dynatrace.requests import request_handler as rh + +ENDPOINT = rh.TenantAPIs.METRICS + + +def get_metrics(cluster, tenant, params=None): + """Gets the list of metrics and their details""" + nextPageKey = 1 + metrics = [] + + while nextPageKey: + # Upon subsequent calls, clear all other params + if nextPageKey != 1: + params = dict(nextPageKey=nextPageKey) + + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=ENDPOINT, + params=params) + + metrics.extend(response.json().get('metrics')) + nextPageKey = response.json().get('nextPageKey') + + return metrics diff --git a/dynatrace/timeseries/timeseries.py b/dynatrace/timeseries/timeseries.py index 804f2b4..be9e8ce 100644 --- a/dynatrace/timeseries/timeseries.py +++ b/dynatrace/timeseries/timeseries.py @@ -2,21 +2,33 @@ ENDPOINT = "timeseries/" + def get_timeseries_list(cluster, tenant, params=None): - """Get List of Timeseries Metics""" - response = rh.env_get(cluster, tenant, ENDPOINT, params=params) - return response.json() - -def get_timeseries_metric (cluster, tenant, metric, params=None): - """Get Timeseries Metric""" - #Chose to do GET, but could also be done as POST. Don't think there are any advantages to post - response = rh.env_get(cluster, tenant, ENDPOINT + metric, params=params) - return response.json() - -def create_custom_metric (cluster, tenant, metric, json, params=None): - response = rh.env_put(cluster, tenant, ENDPOINT + metric, params=params, json=json) - return response.status_code - -def delete_custom_metic (cluster, tenant, metric): - response = rh.env_delete(cluster, tenant, ENDPOINT + metric) - return response.status_code \ No newline at end of file + """Get List of Timeseries Metics""" + response = rh.make_api_call(cluster, tenant, ENDPOINT, params=params) + return response.json() + + +def get_timeseries_metric(cluster, tenant, metric, params=None): + """Get Timeseries Metric""" + # Chose to do GET, but could also be done as POST. Don't think there are any advantages to post + response = rh.make_api_call(cluster, tenant, ENDPOINT + metric, params=params) + return response.json() + + +def create_custom_metric(cluster, tenant, metric, json, params=None): + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=f"{ENDPOINT}{metric}", + params=params, + method=rh.HTTP.PUT, + json=json) + return response.status_code + + +def delete_custom_metic(cluster, tenant, metric): + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + method=rh.HTTP.DELETE, + endpoint=f"{ENDPOINT}{metric}") + return response.status_code diff --git a/dynatrace/topology/applications.py b/dynatrace/topology/applications.py index 1581402..201d13c 100644 --- a/dynatrace/topology/applications.py +++ b/dynatrace/topology/applications.py @@ -2,60 +2,81 @@ # Applications needs a seperate definition since the url is not the same (not /infrastructre/) from dynatrace.requests import request_handler as rh -ENDPOINT = "entity/applications/" +ENDPOINT = f"{rh.TenantAPIs.V1_TOPOLOGY}/applications" + def get_applications_tenantwide(cluster, tenant): - """Get Information for all applications in a tenant""" - response = rh.env_get(cluster, tenant, ENDPOINT) - return response.json() + """Get Information for all applications in a tenant""" + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=ENDPOINT) + return response.json() + def get_application(cluster, tenant, entity): - """Get Information on one application for in a tenant""" - response = rh.env_get(cluster, tenant, ENDPOINT + entity) - return response.json() + """Get Information on one application for in a tenant""" + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=f"{ENDPOINT}/{entity}") + return response.json() + def set_application_properties(cluster, tenant, entity, prop_json): - """Update properties of application entity""" - response = rh.env_post(cluster, tenant, ENDPOINT + entity, json=prop_json) - return response.json() + """Update properties of application entity""" + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=f"{ENDPOINT}/{entity}", + method=rh.HTTP.POST, + json=prop_json) + return response.json() + def get_application_count_tenantwide(cluster, tenant): - """Get total count for all applications in a tenant""" - params = { - "relativeTime" : "day", - "includeDetails" : "false" - } + """Get total count for all applications in a tenant""" + params = { + "relativeTime": "day", + "includeDetails": "false" + } + + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=ENDPOINT, + params=params) + env_app_count = len(response.json()) + return env_app_count - response = rh.env_get(cluster, tenant, ENDPOINT, params=params) - env_app_count = len(response.json()) - return env_app_count def get_application_count_clusterwide(cluster): - """Get total count for all applications in cluster""" - cluster_app_count = 0 - for env_key in cluster['tenant']: - cluster_app_count = cluster_app_count + get_application_count_tenantwide( - cluster, - env_key - ) - return cluster_app_count + """Get total count for all applications in cluster""" + cluster_app_count = 0 + for env_key in cluster['tenant']: + cluster_app_count = cluster_app_count \ + + get_application_count_tenantwide(cluster, + env_key) + return cluster_app_count + def get_application_count_setwide(full_set): - full_set_app_count = 0 - for cluster_items in full_set.values(): - full_set_app_count = full_set_app_count + get_application_count_clusterwide(cluster_items) - return full_set_app_count - -def add_application_tags (cluster, tenant, entity, tag_list): - """Add tags to application""" - if tag_list is None: - raise Exception ("tag_list cannot be None type") - tag_json = { - 'tags' : tag_list - } - return set_application_properties(cluster, tenant, entity, tag_json) + full_set_app_count = 0 + for cluster_items in full_set.values(): + full_set_app_count = full_set_app_count \ + + get_application_count_clusterwide(cluster_items) + return full_set_app_count + + +def add_application_tags(cluster, tenant, entity, tag_list): + """Add tags to application""" + if tag_list is None: + raise Exception("tag_list cannot be None type") + tag_json = { + 'tags': tag_list + } + return set_application_properties(cluster, tenant, entity, tag_json) + def get_application_baseline(cluster, tenant, entity): - """Get baselines on one application for in a tenant""" - response = rh.env_get(cluster, tenant, ENDPOINT + entity + "/baseline") - return response.json() + """Get baselines on one application for in a tenant""" + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=f"{ENDPOINT}/{entity}/baseline") + return response.json() diff --git a/dynatrace/topology/custom.py b/dynatrace/topology/custom.py index bf5c43e..6f1c59c 100644 --- a/dynatrace/topology/custom.py +++ b/dynatrace/topology/custom.py @@ -1,5 +1,6 @@ import dynatrace.topology.shared as topology_shared + def set_custom_properties(cluster, tenant, entity, prop_json): - """Update properties of process_group entity""" - return topology_shared.set_env_layer_properties(cluster, tenant, 'custom', entity, prop_json) + """Update properties of process_group entity""" + return topology_shared.set_env_layer_properties(cluster, tenant, 'custom', entity, prop_json) diff --git a/dynatrace/topology/hosts.py b/dynatrace/topology/hosts.py index 009fa35..4fd8a24 100644 --- a/dynatrace/topology/hosts.py +++ b/dynatrace/topology/hosts.py @@ -2,43 +2,74 @@ import dynatrace.topology.shared as topology_shared from dynatrace.requests import request_handler as rh + def get_hosts_tenantwide(cluster, tenant, params=None): - """Get Information for all hosts in a tenant""" - return topology_shared.get_env_layer_entities(cluster, tenant, 'hosts', params=params) + """Get Information for all hosts in a tenant""" + return topology_shared.get_env_layer_entities(cluster, tenant, 'hosts', params=params) + def get_host(cluster, tenant, entity, params=None): - """Get Information on one host for in a tenant""" - return topology_shared.get_env_layer_entity(cluster, tenant,'hosts', entity, params=params) + """Get Information on one host for in a tenant""" + return topology_shared.get_env_layer_entity(cluster, tenant, 'hosts', entity, params=params) + def set_host_properties(cluster, tenant, entity, prop_json): - """Update properties of host entity""" - return topology_shared.set_env_layer_properties(cluster, tenant, 'hosts', entity, prop_json) + """Update properties of host entity""" + return topology_shared.set_env_layer_properties(cluster, tenant, 'hosts', entity, prop_json) + def get_host_count_tenantwide(cluster, tenant, params=None): - """Get total count for all hosts in a tenant""" - return topology_shared.get_env_layer_count(cluster, tenant, 'hosts', params=params) + """Get total count for all hosts in a tenant""" + return topology_shared.get_env_layer_count(cluster, tenant, 'hosts', params=params) + def get_host_count_clusterwide(cluster, params=None): - """Get total count for all hosts in cluster""" - return topology_shared.get_cluster_layer_count(cluster, 'hosts', params=params) + """Get total count for all hosts in cluster""" + return topology_shared.get_cluster_layer_count(cluster, 'hosts', params=params) + def get_host_count_setwide(full_set, params=None): - """Get total count of hosts for all clusters definied in variable file""" - return topology_shared.get_set_layer_count(full_set, 'hosts', params=params) + """Get total count of hosts for all clusters definied in variable file""" + return topology_shared.get_set_layer_count(full_set, 'hosts', params=params) + + +def add_host_tags(cluster, tenant, entity, tag_list): + """Add tags to host""" + return topology_shared.add_env_layer_tags(cluster, tenant, 'hosts', entity, tag_list) -def add_host_tags (cluster, tenant, entity, tag_list): - """Add tags to host""" - return topology_shared.add_env_layer_tags (cluster, tenant, 'hosts', entity, tag_list) -def delete_host_tag (cluster, tenant, entity, tag): - """Remove single tag from host""" - if tag is None: - raise Exception ("Tag cannot be None!") - return rh.env_delete(cluster, tenant, "entity/infrastructure/hosts/" + entity + "/tags/" + str(tag)) +def delete_host_tag(cluster, tenant, entity, tag): + """Remove single tag from host""" + if tag is None: + raise Exception("Tag cannot be None!") + return rh.make_api_call(cluster=cluster, + tenant=tenant, + method=rh.HTTP.DELETE, + endpoint=f"{rh.TenantAPIs.V1_TOPOLOGY}/infrastructure/hosts/{entity}/tags/{tag}") + def get_host_units_tenantwide(cluster, tenant, params=None): - consumed_host_units = 0 - host_list = get_hosts_tenantwide (cluster, tenant, params=params) - for host in host_list: - consumed_host_units = consumed_host_units + host['consumedHostUnits'] - return consumed_host_units \ No newline at end of file + consumed_host_units = 0 + host_list = get_hosts_tenantwide(cluster, tenant, params=params) + for host in host_list: + consumed_host_units = consumed_host_units + host['consumedHostUnits'] + return consumed_host_units + + +def get_oneagents_tenantwide(cluster, tenant, params=None): + oneagents = [] + nextPageKey = 1 + + while nextPageKey: + if nextPageKey != 1: + params['nextPageKey'] = nextPageKey + + response = rh.make_api_call(cluster=cluster, + endpoint=rh.TenantAPIs.ONEAGENTS, + tenant=tenant, + params=params) + + oneagents.extend(response.json().get('hosts')) + nextPageKey = response.json().get('nextPageKey') + + return oneagents diff --git a/dynatrace/topology/process.py b/dynatrace/topology/process.py index 7b9aeca..4c2d010 100644 --- a/dynatrace/topology/process.py +++ b/dynatrace/topology/process.py @@ -1,11 +1,12 @@ """Process operations from the Dynatrace API""" import dynatrace.topology.shared as topology_shared -from dynatrace.requests import request_handler as rh + def get_processes_tenantwide(cluster, tenant, params=None): - """Get Information for all processes in a tenant""" - return topology_shared.get_env_layer_entities(cluster, tenant, 'processes', params=params) + """Get Information for all processes in a tenant""" + return topology_shared.get_env_layer_entities(cluster, tenant, 'processes', params=params) + def get_process(cluster, tenant, entity, params=None): - """Get Information on one process for in a tenant""" - return topology_shared.get_env_layer_entity(cluster, tenant,'processes', entity, params=params) \ No newline at end of file + """Get Information on one process for in a tenant""" + return topology_shared.get_env_layer_entity(cluster, tenant, 'processes', entity, params=params) diff --git a/dynatrace/topology/process_groups.py b/dynatrace/topology/process_groups.py index 5ff9692..a003b88 100644 --- a/dynatrace/topology/process_groups.py +++ b/dynatrace/topology/process_groups.py @@ -1,31 +1,37 @@ """Process Group operations from the Dynatrace API""" import dynatrace.topology.shared as topology_shared -from dynatrace.requests import request_handler as rh + def get_process_groups_tenantwide(cluster, tenant): - """Get Information for all process-groups in a tenant""" - return topology_shared.get_env_layer_entities(cluster, tenant, 'process-groups') + """Get Information for all process-groups in a tenant""" + return topology_shared.get_env_layer_entities(cluster, tenant, 'process-groups') + def get_process_group(cluster, tenant, entity): - """Get Information on one process-group for in a tenant""" - return topology_shared.get_env_layer_entity(cluster, tenant,'process-groups', entity) + """Get Information on one process-group for in a tenant""" + return topology_shared.get_env_layer_entity(cluster, tenant, 'process-groups', entity) + def set_process_group_properties(cluster, tenant, entity, prop_json): - """Update properties of process-group entity""" - return topology_shared.set_env_layer_properties(cluster, tenant, 'process-groups', entity, prop_json) + """Update properties of process-group entity""" + return topology_shared.set_env_layer_properties(cluster, tenant, 'process-groups', entity, prop_json) + def get_process_group_count_tenantwide(cluster, tenant, params=None): - """Get total count for all process-groups in a tenant""" - return topology_shared.get_env_layer_count(cluster, tenant, 'process-groups', params=params) + """Get total count for all process-groups in a tenant""" + return topology_shared.get_env_layer_count(cluster, tenant, 'process-groups', params=params) + def get_process_group_count_clusterwide(cluster, params=None): - """Get total count for all process-groups in cluster""" - return topology_shared.get_cluster_layer_count(cluster, 'process-groups', params=params) + """Get total count for all process-groups in cluster""" + return topology_shared.get_cluster_layer_count(cluster, 'process-groups', params=params) + def get_process_group_count_setwide(full_set, params=None): - """Get total count of process-groups for all clusters defined in variable file""" - return topology_shared.get_set_layer_count(full_set, 'process-groups', params=params) + """Get total count of process-groups for all clusters defined in variable file""" + return topology_shared.get_set_layer_count(full_set, 'process-groups', params=params) + -def add_process_group_tags (cluster, tenant, entity, tag_list): - """Add tags to a process group""" - return topology_shared.add_env_layer_tags (cluster, tenant, 'process-groups', entity, tag_list) \ No newline at end of file +def add_process_group_tags(cluster, tenant, entity, tag_list): + """Add tags to a process group""" + return topology_shared.add_env_layer_tags(cluster, tenant, 'process-groups', entity, tag_list) diff --git a/dynatrace/topology/services.py b/dynatrace/topology/services.py index 5c4abc4..b0542cf 100644 --- a/dynatrace/topology/services.py +++ b/dynatrace/topology/services.py @@ -1,31 +1,37 @@ """Service operations from the Dynatrace API""" import dynatrace.topology.shared as topology_shared -from dynatrace.requests import request_handler as rh + def get_services_tenantwide(cluster, tenant): - """Get Information for all services in a tenant""" - return topology_shared.get_env_layer_entities(cluster, tenant, 'services') + """Get Information for all services in a tenant""" + return topology_shared.get_env_layer_entities(cluster, tenant, 'services') + def get_service(cluster, tenant, entity): - """Get Information on one service for in a tenant""" - return topology_shared.get_env_layer_entity(cluster, tenant,'services', entity) + """Get Information on one service for in a tenant""" + return topology_shared.get_env_layer_entity(cluster, tenant, 'services', entity) + def set_service_properties(cluster, tenant, entity, prop_json): - """Update properties of service entity""" - return topology_shared.set_env_layer_properties(cluster, tenant, 'services', entity, prop_json) + """Update properties of service entity""" + return topology_shared.set_env_layer_properties(cluster, tenant, 'services', entity, prop_json) + def get_service_count_tenantwide(cluster, tenant, params=None): - """Get total count for all services in a tenant""" - return topology_shared.get_env_layer_count(cluster, tenant, 'services', params=params) + """Get total count for all services in a tenant""" + return topology_shared.get_env_layer_count(cluster, tenant, 'services', params=params) + def get_service_count_clusterwide(cluster, params=None): - """Get total count for all services in cluster""" - return topology_shared.get_cluster_layer_count(cluster, 'services', params=params) + """Get total count for all services in cluster""" + return topology_shared.get_cluster_layer_count(cluster, 'services', params=params) + def get_service_count_setwide(full_set, params=None): - """Get total count of services for all clusters definied in variable file""" - return topology_shared.get_set_layer_count(full_set, 'services', params=params) + """Get total count of services for all clusters definied in variable file""" + return topology_shared.get_set_layer_count(full_set, 'services', params=params) + -def add_service_tags (cluster, tenant, entity, tag_list): - """Add tags to a service""" - return topology_shared.add_env_layer_tags (cluster, tenant, 'services', entity, tag_list) \ No newline at end of file +def add_service_tags(cluster, tenant, entity, tag_list): + """Add tags to a service""" + return topology_shared.add_env_layer_tags(cluster, tenant, 'services', entity, tag_list) diff --git a/dynatrace/topology/shared.py b/dynatrace/topology/shared.py index a9498b7..1a18a52 100644 --- a/dynatrace/topology/shared.py +++ b/dynatrace/topology/shared.py @@ -8,93 +8,108 @@ ENDPOINT = "entity/infrastructure/" + def check_valid_layer(layer, layer_list): - """Check if the operation is valid for the layer""" - if layer is None or layer_list is None: - raise Exception ('Provide layer and layer_list!') - if layer not in layer_list: - raise Exception (layer + " layer does not exist or is invalid for this use!") - return + """Check if the operation is valid for the layer""" + if layer is None or layer_list is None: + raise Exception('Provide layer and layer_list!') + if layer not in layer_list: + raise Exception( + layer + " layer does not exist or is invalid for this use!") + return + def get_env_layer_entities(cluster, tenant, layer, params=None): - """Get all Entities of Specified Layer""" - layer_list = ['applications','hosts', 'processes', 'process-groups', 'services'] - check_valid_layer(layer, layer_list) - response = rh.env_get( - cluster, - tenant, - ENDPOINT + layer, - params=params - ) - return response.json() + """Get all Entities of Specified Layer""" + layer_list = ['applications', 'hosts', + 'processes', 'process-groups', 'services'] + check_valid_layer(layer, layer_list) + response = rh.make_api_call( + cluster=cluster, + tenant=tenant, + endpoint=f"{rh.Endpoints.V1_TOPOLOGY}/{layer}", + params=params + ) + return response.json() + def get_env_layer_entity(cluster, tenant, layer, entity, params=None): - """Get Entity Information for Specified Layer""" - layer_list = ['applications','hosts', 'processes', 'process-groups', 'services'] - check_valid_layer(layer, layer_list) - response = rh.env_get( - cluster, - tenant, - ENDPOINT + layer + "/" + entity, - params=params - ) - return response.json() + """Get Entity Information for Specified Layer""" + layer_list = ['applications', 'hosts', + 'processes', 'process-groups', 'services'] + check_valid_layer(layer, layer_list) + response = rh.make_api_call( + cluster=cluster, + tenant=tenant, + endpoint=f"{rh.Endpoints.V1_TOPOLOGY}/{layer}/{entity}", + params=params + ) + return response.json() + def set_env_layer_properties(cluster, tenant, layer, entity, prop_json): - """Update Properties of Entity""" - layer_list = ['applications', 'custom', 'hosts', 'process-groups', 'services'] - check_valid_layer(layer, layer_list) - response = rh.env_post( - cluster, - tenant, - ENDPOINT + layer + "/" + entity, - json=prop_json - ) - return response.status_code + """Update Properties of Entity""" + layer_list = ['applications', 'custom', + 'hosts', 'process-groups', 'services'] + check_valid_layer(layer, layer_list) + response = rh.make_api_call( + cluster=cluster, + tenant=tenant, + method=rh.HTTP.POST, + endpoint=f"{rh.Endpoints.V1_TOPOLOGY}/{layer}/{entity}", + json=prop_json + ) + return response.status_code + def get_env_layer_count(cluster, tenant, layer, params=None): - """Get total hosts in an environment""" + """Get total hosts in an environment""" + + layer_list = ['applications', 'hosts', + 'processes', 'process-groups', 'services'] - layer_list = ['applications','hosts', 'processes', 'process-groups', 'services'] - if not params: - params = {} - if 'relativeTime' not in params.keys(): - params['relativeTime'] : "day" - if 'includeDetails' not in params.keys(): - params['includeDetails'] : "false" + if 'relativeTime' not in params.keys(): + params['relativeTime'] = "day" + if 'includeDetails' not in params.keys(): + params['includeDetails'] = False + + check_valid_layer(layer, layer_list) + response = rh.make_api_call(cluster=cluster, + tenant=tenant, + endpoint=f"{rh.Endpoints.V1_TOPOLOGY}/{layer}", + params=params) + env_layer_count = len(response.json()) + return env_layer_count - check_valid_layer(layer, layer_list) - response = rh.env_get(cluster, tenant, ENDPOINT + layer, params=params) - env_layer_count = len(response.json()) - return env_layer_count def get_cluster_layer_count(cluster, layer, params=None): - """Get total count for all environments in cluster""" - cluster_layer_count = 0 - for env_key in cluster['tenant']: - cluster_layer_count = cluster_layer_count + get_env_layer_count( - cluster, - env_key, - layer, - params=params - ) - return cluster_layer_count + """Get total count for all environments in cluster""" + cluster_layer_count = 0 + for env_key in cluster['tenant']: + cluster_layer_count += get_env_layer_count(cluster=cluster, + tenant=env_key, + layer=layer, + params=params) + return cluster_layer_count + def get_set_layer_count(full_set, layer, params=None): - """Get total count for all clusters definied in variable file""" - full_set_layer_count = 0 - for cluster_items in full_set.values(): - full_set_layer_count = \ - full_set_layer_count + \ - get_cluster_layer_count(cluster_items, layer, params=params) - return full_set_layer_count - -def add_env_layer_tags (cluster, tenant, layer, entity, tag_list): - layer_list = ['applications','hosts', 'custom', 'process-groups', 'services'] - check_valid_layer(layer, layer_list) - if tag_list is None: - raise Exception ("tag_list cannot be None type") - tag_json = { - 'tags' : tag_list - } - return set_env_layer_properties(cluster, tenant, layer, entity, tag_json) \ No newline at end of file + """Get total count for all clusters definied in variable file""" + full_set_layer_count = 0 + for cluster in full_set.values(): + full_set_layer_count += get_cluster_layer_count(cluster, + layer, + params) + return full_set_layer_count + + +def add_env_layer_tags(cluster, tenant, layer, entity, tag_list): + layer_list = ['applications', 'hosts', + 'custom', 'process-groups', 'services'] + check_valid_layer(layer, layer_list) + if not tag_list: + raise Exception("tag_list cannot be None type") + tag_json = { + 'tags': tag_list + } + return set_env_layer_properties(cluster, tenant, layer, entity, tag_json) From 3316889bba41f7a194c31ab3f947d918b5c98adb Mon Sep 17 00:00:00 2001 From: Aaron Date: Tue, 28 Jul 2020 19:31:38 -0500 Subject: [PATCH 12/79] PAF-31 #Moving timeseries to tenant pkg --- dynatrace/{timeseries => tenant}/metrics.py | 0 dynatrace/{timeseries => tenant}/timeseries.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename dynatrace/{timeseries => tenant}/metrics.py (100%) rename dynatrace/{timeseries => tenant}/timeseries.py (100%) diff --git a/dynatrace/timeseries/metrics.py b/dynatrace/tenant/metrics.py similarity index 100% rename from dynatrace/timeseries/metrics.py rename to dynatrace/tenant/metrics.py diff --git a/dynatrace/timeseries/timeseries.py b/dynatrace/tenant/timeseries.py similarity index 100% rename from dynatrace/timeseries/timeseries.py rename to dynatrace/tenant/timeseries.py From 04ed021d2766e377c0d177d40267c67a98c71982 Mon Sep 17 00:00:00 2001 From: Aaron Date: Tue, 28 Jul 2020 19:32:28 -0500 Subject: [PATCH 13/79] PAF-32 #Moving topology into tenant pkg --- dynatrace/tenant/topology/__init__.py | 7 ++ .../{ => tenant}/topology/applications.py | 0 dynatrace/{ => tenant}/topology/custom.py | 2 +- dynatrace/{ => tenant}/topology/hosts.py | 2 +- dynatrace/{ => tenant}/topology/process.py | 2 +- .../{ => tenant}/topology/process_groups.py | 2 +- dynatrace/{ => tenant}/topology/services.py | 2 +- dynatrace/{ => tenant}/topology/shared.py | 73 +++++++++++++------ 8 files changed, 61 insertions(+), 29 deletions(-) create mode 100644 dynatrace/tenant/topology/__init__.py rename dynatrace/{ => tenant}/topology/applications.py (100%) rename dynatrace/{ => tenant}/topology/custom.py (78%) rename dynatrace/{ => tenant}/topology/hosts.py (97%) rename dynatrace/{ => tenant}/topology/process.py (88%) rename dynatrace/{ => tenant}/topology/process_groups.py (96%) rename dynatrace/{ => tenant}/topology/services.py (96%) rename dynatrace/{ => tenant}/topology/shared.py (59%) diff --git a/dynatrace/tenant/topology/__init__.py b/dynatrace/tenant/topology/__init__.py new file mode 100644 index 0000000..9ef4134 --- /dev/null +++ b/dynatrace/tenant/topology/__init__.py @@ -0,0 +1,7 @@ +from dynatrace.tenant.topology.applications import * +from dynatrace.tenant.topology.custom import * +from dynatrace.tenant.topology.hosts import * +from dynatrace.tenant.topology.process import * +from dynatrace.tenant.topology.process_groups import * +from dynatrace.tenant.topology.services import * +from dynatrace.tenant.topology.shared import * \ No newline at end of file diff --git a/dynatrace/topology/applications.py b/dynatrace/tenant/topology/applications.py similarity index 100% rename from dynatrace/topology/applications.py rename to dynatrace/tenant/topology/applications.py diff --git a/dynatrace/topology/custom.py b/dynatrace/tenant/topology/custom.py similarity index 78% rename from dynatrace/topology/custom.py rename to dynatrace/tenant/topology/custom.py index 6f1c59c..658f62f 100644 --- a/dynatrace/topology/custom.py +++ b/dynatrace/tenant/topology/custom.py @@ -1,4 +1,4 @@ -import dynatrace.topology.shared as topology_shared +import dynatrace.tenant.topology.shared as topology_shared def set_custom_properties(cluster, tenant, entity, prop_json): diff --git a/dynatrace/topology/hosts.py b/dynatrace/tenant/topology/hosts.py similarity index 97% rename from dynatrace/topology/hosts.py rename to dynatrace/tenant/topology/hosts.py index 4fd8a24..a0b9eae 100644 --- a/dynatrace/topology/hosts.py +++ b/dynatrace/tenant/topology/hosts.py @@ -1,5 +1,5 @@ """Host operations from the Dynatrace API""" -import dynatrace.topology.shared as topology_shared +import dynatrace.tenant.topology.shared as topology_shared from dynatrace.requests import request_handler as rh diff --git a/dynatrace/topology/process.py b/dynatrace/tenant/topology/process.py similarity index 88% rename from dynatrace/topology/process.py rename to dynatrace/tenant/topology/process.py index 4c2d010..fb6c523 100644 --- a/dynatrace/topology/process.py +++ b/dynatrace/tenant/topology/process.py @@ -1,5 +1,5 @@ """Process operations from the Dynatrace API""" -import dynatrace.topology.shared as topology_shared +import dynatrace.tenant.topology.shared as topology_shared def get_processes_tenantwide(cluster, tenant, params=None): diff --git a/dynatrace/topology/process_groups.py b/dynatrace/tenant/topology/process_groups.py similarity index 96% rename from dynatrace/topology/process_groups.py rename to dynatrace/tenant/topology/process_groups.py index a003b88..e45d912 100644 --- a/dynatrace/topology/process_groups.py +++ b/dynatrace/tenant/topology/process_groups.py @@ -1,5 +1,5 @@ """Process Group operations from the Dynatrace API""" -import dynatrace.topology.shared as topology_shared +import dynatrace.tenant.topology.shared as topology_shared def get_process_groups_tenantwide(cluster, tenant): diff --git a/dynatrace/topology/services.py b/dynatrace/tenant/topology/services.py similarity index 96% rename from dynatrace/topology/services.py rename to dynatrace/tenant/topology/services.py index b0542cf..6b4fe58 100644 --- a/dynatrace/topology/services.py +++ b/dynatrace/tenant/topology/services.py @@ -1,5 +1,5 @@ """Service operations from the Dynatrace API""" -import dynatrace.topology.shared as topology_shared +import dynatrace.tenant.topology.shared as topology_shared def get_services_tenantwide(cluster, tenant): diff --git a/dynatrace/topology/shared.py b/dynatrace/tenant/topology/shared.py similarity index 59% rename from dynatrace/topology/shared.py rename to dynatrace/tenant/topology/shared.py index 1a18a52..e88d8a2 100644 --- a/dynatrace/topology/shared.py +++ b/dynatrace/tenant/topology/shared.py @@ -9,11 +9,11 @@ ENDPOINT = "entity/infrastructure/" -def check_valid_layer(layer, layer_list): +def check_valid_layer(layer, layer_dict): """Check if the operation is valid for the layer""" - if layer is None or layer_list is None: - raise Exception('Provide layer and layer_list!') - if layer not in layer_list: + if layer is None or layer_dict is None: + raise Exception('Provide layer and layer_dict!') + if layer not in layer_dict: raise Exception( layer + " layer does not exist or is invalid for this use!") return @@ -21,13 +21,18 @@ def check_valid_layer(layer, layer_list): def get_env_layer_entities(cluster, tenant, layer, params=None): """Get all Entities of Specified Layer""" - layer_list = ['applications', 'hosts', - 'processes', 'process-groups', 'services'] - check_valid_layer(layer, layer_list) + layer_dict = { + 'applications': 'applications', + 'hosts': "infrastructure/hosts", + 'processes': "infrastructure/processes", + 'process-groups': "infrastructure/process-groups", + 'services': "infrastructure/services" + } + check_valid_layer(layer, layer_dict) response = rh.make_api_call( cluster=cluster, tenant=tenant, - endpoint=f"{rh.Endpoints.V1_TOPOLOGY}/{layer}", + endpoint=f"{rh.TenantAPIs.V1_TOPOLOGY}/{layer_dict[layer]}", params=params ) return response.json() @@ -35,13 +40,18 @@ def get_env_layer_entities(cluster, tenant, layer, params=None): def get_env_layer_entity(cluster, tenant, layer, entity, params=None): """Get Entity Information for Specified Layer""" - layer_list = ['applications', 'hosts', - 'processes', 'process-groups', 'services'] - check_valid_layer(layer, layer_list) + layer_dict = { + 'applications': 'applications', + 'hosts': "infrastructure/hosts", + 'processes': "infrastructure/processes", + 'process-groups': "infrastructure/process-groups", + 'services': "infrastructure/services" + } + check_valid_layer(layer, layer_dict) response = rh.make_api_call( cluster=cluster, tenant=tenant, - endpoint=f"{rh.Endpoints.V1_TOPOLOGY}/{layer}/{entity}", + endpoint=f"{rh.TenantAPIs.V1_TOPOLOGY}/{layer_dict[layer]}/{entity}", params=params ) return response.json() @@ -49,14 +59,19 @@ def get_env_layer_entity(cluster, tenant, layer, entity, params=None): def set_env_layer_properties(cluster, tenant, layer, entity, prop_json): """Update Properties of Entity""" - layer_list = ['applications', 'custom', - 'hosts', 'process-groups', 'services'] - check_valid_layer(layer, layer_list) + layer_dict = { + 'applications': 'applications', + 'custom': "infrastructure/custom", + 'hosts': "infrastructure/hosts", + 'process-groups': "infrastructure/process-groups", + 'services': "infrastructure/services" + } + check_valid_layer(layer, layer_dict) response = rh.make_api_call( cluster=cluster, tenant=tenant, method=rh.HTTP.POST, - endpoint=f"{rh.Endpoints.V1_TOPOLOGY}/{layer}/{entity}", + endpoint=f"{rh.TenantAPIs.V1_TOPOLOGY}/{layer_dict[layer]}/{entity}", json=prop_json ) return response.status_code @@ -64,19 +79,23 @@ def set_env_layer_properties(cluster, tenant, layer, entity, prop_json): def get_env_layer_count(cluster, tenant, layer, params=None): """Get total hosts in an environment""" - - layer_list = ['applications', 'hosts', - 'processes', 'process-groups', 'services'] + layer_dict = { + 'applications': 'applications', + 'hosts': "infrastructure/hosts", + 'processes': "infrastructure/processes", + 'process-groups': "infrastructure/process-groups", + 'services': "infrastructure/services" + } if 'relativeTime' not in params.keys(): params['relativeTime'] = "day" if 'includeDetails' not in params.keys(): params['includeDetails'] = False - check_valid_layer(layer, layer_list) + check_valid_layer(layer, layer_dict) response = rh.make_api_call(cluster=cluster, tenant=tenant, - endpoint=f"{rh.Endpoints.V1_TOPOLOGY}/{layer}", + endpoint=f"{rh.TenantAPIs.V1_TOPOLOGY}/{layer_dict[layer]}", params=params) env_layer_count = len(response.json()) return env_layer_count @@ -104,9 +123,15 @@ def get_set_layer_count(full_set, layer, params=None): def add_env_layer_tags(cluster, tenant, layer, entity, tag_list): - layer_list = ['applications', 'hosts', - 'custom', 'process-groups', 'services'] - check_valid_layer(layer, layer_list) + layer_dict = { + 'applications': 'applications', + 'hosts': "infrastructure/hosts", + 'custom': "infrastructure/custom", + 'process-groups': "infrastructure/process-groups", + 'services': "infrastructure/services" + } + + check_valid_layer(layer, layer_dict) if not tag_list: raise Exception("tag_list cannot be None type") tag_json = { From 093ea40a535dfb57d13bb918ad83bb9cce74b8ad Mon Sep 17 00:00:00 2001 From: Aaron Date: Tue, 28 Jul 2020 19:38:51 -0500 Subject: [PATCH 14/79] PAF-32 #Adding a newline --- dynatrace/tenant/topology/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dynatrace/tenant/topology/__init__.py b/dynatrace/tenant/topology/__init__.py index 9ef4134..17f1f19 100644 --- a/dynatrace/tenant/topology/__init__.py +++ b/dynatrace/tenant/topology/__init__.py @@ -4,4 +4,4 @@ from dynatrace.tenant.topology.process import * from dynatrace.tenant.topology.process_groups import * from dynatrace.tenant.topology.services import * -from dynatrace.tenant.topology.shared import * \ No newline at end of file +from dynatrace.tenant.topology.shared import * From a008803b2339e6328b208c3870cf67e1e1bd9bb4 Mon Sep 17 00:00:00 2001 From: Aaron Date: Tue, 28 Jul 2020 19:55:53 -0500 Subject: [PATCH 15/79] PAF-31 #Desmelling variable names --- dynatrace/tenant/metrics.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/dynatrace/tenant/metrics.py b/dynatrace/tenant/metrics.py index 6ed26fc..ed78db9 100644 --- a/dynatrace/tenant/metrics.py +++ b/dynatrace/tenant/metrics.py @@ -5,13 +5,13 @@ def get_metrics(cluster, tenant, params=None): """Gets the list of metrics and their details""" - nextPageKey = 1 + next_page_key = 1 metrics = [] - while nextPageKey: + while next_page_key: # Upon subsequent calls, clear all other params - if nextPageKey != 1: - params = dict(nextPageKey=nextPageKey) + if next_page_key != 1: + params = dict(nextPageKey=next_page_key) response = rh.make_api_call(cluster=cluster, tenant=tenant, @@ -19,6 +19,6 @@ def get_metrics(cluster, tenant, params=None): params=params) metrics.extend(response.json().get('metrics')) - nextPageKey = response.json().get('nextPageKey') + next_page_key = response.json().get('nextPageKey') return metrics From d79b191cf950541f276219fd1312827eb9a90376 Mon Sep 17 00:00:00 2001 From: Aaron Date: Tue, 28 Jul 2020 19:59:16 -0500 Subject: [PATCH 16/79] PAF-32 #Redoing layer_list & adding endpoint const --- dynatrace/tenant/topology/shared.py | 85 +++++++++++------------------ 1 file changed, 33 insertions(+), 52 deletions(-) diff --git a/dynatrace/tenant/topology/shared.py b/dynatrace/tenant/topology/shared.py index e88d8a2..cbd58f1 100644 --- a/dynatrace/tenant/topology/shared.py +++ b/dynatrace/tenant/topology/shared.py @@ -6,33 +6,34 @@ # 2. Get specific entity - application, host process, process group, service # 3. Update properties of entity - application, custom, host, process group, service -ENDPOINT = "entity/infrastructure/" +ENDPOINT_SUFFIX = { + 'applications': 'applications', + 'custom': "infrastructure/custom", + 'hosts': "infrastructure/hosts", + 'processes': "infrastructure/processes", + 'process-groups': "infrastructure/process-groups", + 'services': "infrastructure/services" +} -def check_valid_layer(layer, layer_dict): +def check_valid_layer(layer, layer_list): """Check if the operation is valid for the layer""" - if layer is None or layer_dict is None: - raise Exception('Provide layer and layer_dict!') - if layer not in layer_dict: + if layer is None or layer_list is None: + raise Exception('Provide layer and layer_list!') + if layer not in layer_list: raise Exception( layer + " layer does not exist or is invalid for this use!") - return def get_env_layer_entities(cluster, tenant, layer, params=None): """Get all Entities of Specified Layer""" - layer_dict = { - 'applications': 'applications', - 'hosts': "infrastructure/hosts", - 'processes': "infrastructure/processes", - 'process-groups': "infrastructure/process-groups", - 'services': "infrastructure/services" - } - check_valid_layer(layer, layer_dict) + layer_list = ['applications', 'hosts', + 'processes', 'process-groups', 'services'] + check_valid_layer(layer, layer_list) response = rh.make_api_call( cluster=cluster, tenant=tenant, - endpoint=f"{rh.TenantAPIs.V1_TOPOLOGY}/{layer_dict[layer]}", + endpoint=f"{rh.TenantAPIs.V1_TOPOLOGY}/{ENDPOINT_SUFFIX[layer]}", params=params ) return response.json() @@ -40,18 +41,13 @@ def get_env_layer_entities(cluster, tenant, layer, params=None): def get_env_layer_entity(cluster, tenant, layer, entity, params=None): """Get Entity Information for Specified Layer""" - layer_dict = { - 'applications': 'applications', - 'hosts': "infrastructure/hosts", - 'processes': "infrastructure/processes", - 'process-groups': "infrastructure/process-groups", - 'services': "infrastructure/services" - } - check_valid_layer(layer, layer_dict) + layer_list = ['applications', 'hosts', + 'processes', 'process-groups', 'services'] + check_valid_layer(layer, layer_list) response = rh.make_api_call( cluster=cluster, tenant=tenant, - endpoint=f"{rh.TenantAPIs.V1_TOPOLOGY}/{layer_dict[layer]}/{entity}", + endpoint=f"{rh.TenantAPIs.V1_TOPOLOGY}/{ENDPOINT_SUFFIX[layer]}/{entity}", params=params ) return response.json() @@ -59,19 +55,14 @@ def get_env_layer_entity(cluster, tenant, layer, entity, params=None): def set_env_layer_properties(cluster, tenant, layer, entity, prop_json): """Update Properties of Entity""" - layer_dict = { - 'applications': 'applications', - 'custom': "infrastructure/custom", - 'hosts': "infrastructure/hosts", - 'process-groups': "infrastructure/process-groups", - 'services': "infrastructure/services" - } - check_valid_layer(layer, layer_dict) + layer_list = ['applications', 'custom', + 'hosts', 'process-groups', 'services'] + check_valid_layer(layer, layer_list) response = rh.make_api_call( cluster=cluster, tenant=tenant, method=rh.HTTP.POST, - endpoint=f"{rh.TenantAPIs.V1_TOPOLOGY}/{layer_dict[layer]}/{entity}", + endpoint=f"{rh.TenantAPIs.V1_TOPOLOGY}/{ENDPOINT_SUFFIX[layer]}/{entity}", json=prop_json ) return response.status_code @@ -79,23 +70,19 @@ def set_env_layer_properties(cluster, tenant, layer, entity, prop_json): def get_env_layer_count(cluster, tenant, layer, params=None): """Get total hosts in an environment""" - layer_dict = { - 'applications': 'applications', - 'hosts': "infrastructure/hosts", - 'processes': "infrastructure/processes", - 'process-groups': "infrastructure/process-groups", - 'services': "infrastructure/services" - } + + layer_list = ['applications', 'hosts', + 'processes', 'process-groups', 'services'] if 'relativeTime' not in params.keys(): params['relativeTime'] = "day" if 'includeDetails' not in params.keys(): params['includeDetails'] = False - check_valid_layer(layer, layer_dict) + check_valid_layer(layer, layer_list) response = rh.make_api_call(cluster=cluster, tenant=tenant, - endpoint=f"{rh.TenantAPIs.V1_TOPOLOGY}/{layer_dict[layer]}", + endpoint=f"{rh.TenantAPIs.V1_TOPOLOGY}/{ENDPOINT_SUFFIX[layer]}", params=params) env_layer_count = len(response.json()) return env_layer_count @@ -123,18 +110,12 @@ def get_set_layer_count(full_set, layer, params=None): def add_env_layer_tags(cluster, tenant, layer, entity, tag_list): - layer_dict = { - 'applications': 'applications', - 'hosts': "infrastructure/hosts", - 'custom': "infrastructure/custom", - 'process-groups': "infrastructure/process-groups", - 'services': "infrastructure/services" - } - - check_valid_layer(layer, layer_dict) + layer_list = ['applications', 'hosts', + 'custom', 'process-groups', 'services'] + check_valid_layer(layer, layer_list) if not tag_list: raise Exception("tag_list cannot be None type") tag_json = { 'tags': tag_list } - return set_env_layer_properties(cluster, tenant, layer, entity, tag_json) + return set_env_layer_properties(cluster, tenant, layer, entity, tag_json) \ No newline at end of file From bc0ed34256afcb61ef15771352a0e00633dcf0d7 Mon Sep 17 00:00:00 2001 From: Aaron Date: Tue, 28 Jul 2020 20:02:35 -0500 Subject: [PATCH 17/79] PAF-32 #Desmelling Variable Names --- dynatrace/tenant/topology/hosts.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/dynatrace/tenant/topology/hosts.py b/dynatrace/tenant/topology/hosts.py index a0b9eae..d05d7f9 100644 --- a/dynatrace/tenant/topology/hosts.py +++ b/dynatrace/tenant/topology/hosts.py @@ -58,11 +58,11 @@ def get_host_units_tenantwide(cluster, tenant, params=None): def get_oneagents_tenantwide(cluster, tenant, params=None): oneagents = [] - nextPageKey = 1 + next_page_key = 1 - while nextPageKey: - if nextPageKey != 1: - params['nextPageKey'] = nextPageKey + while next_page_key: + if next_page_key != 1: + params['nextPageKey'] = next_page_key response = rh.make_api_call(cluster=cluster, endpoint=rh.TenantAPIs.ONEAGENTS, @@ -70,6 +70,6 @@ def get_oneagents_tenantwide(cluster, tenant, params=None): params=params) oneagents.extend(response.json().get('hosts')) - nextPageKey = response.json().get('nextPageKey') + next_page_key = response.json().get('nextPageKey') return oneagents From ef560aa0f78641f23a384926fcafc21213aa8049 Mon Sep 17 00:00:00 2001 From: Aaron Date: Tue, 28 Jul 2020 20:14:25 -0500 Subject: [PATCH 18/79] PAF-19 #Refactoring hosts exceptions to b specific --- dynatrace/tenant/topology/hosts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dynatrace/tenant/topology/hosts.py b/dynatrace/tenant/topology/hosts.py index d05d7f9..fbbee55 100644 --- a/dynatrace/tenant/topology/hosts.py +++ b/dynatrace/tenant/topology/hosts.py @@ -41,7 +41,7 @@ def add_host_tags(cluster, tenant, entity, tag_list): def delete_host_tag(cluster, tenant, entity, tag): """Remove single tag from host""" if tag is None: - raise Exception("Tag cannot be None!") + raise ValueError("Tag cannot be None!") return rh.make_api_call(cluster=cluster, tenant=tenant, method=rh.HTTP.DELETE, From 612163c551aa876795e916b172d99bebd9d1f492 Mon Sep 17 00:00:00 2001 From: Aaron Date: Tue, 28 Jul 2020 20:19:20 -0500 Subject: [PATCH 19/79] PAF-19 #Making topology exceptions more accurate --- dynatrace/tenant/topology/hosts.py | 2 +- dynatrace/tenant/topology/shared.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/dynatrace/tenant/topology/hosts.py b/dynatrace/tenant/topology/hosts.py index fbbee55..822725c 100644 --- a/dynatrace/tenant/topology/hosts.py +++ b/dynatrace/tenant/topology/hosts.py @@ -41,7 +41,7 @@ def add_host_tags(cluster, tenant, entity, tag_list): def delete_host_tag(cluster, tenant, entity, tag): """Remove single tag from host""" if tag is None: - raise ValueError("Tag cannot be None!") + raise TypeError("Tag cannot be None!") return rh.make_api_call(cluster=cluster, tenant=tenant, method=rh.HTTP.DELETE, diff --git a/dynatrace/tenant/topology/shared.py b/dynatrace/tenant/topology/shared.py index cbd58f1..be242d9 100644 --- a/dynatrace/tenant/topology/shared.py +++ b/dynatrace/tenant/topology/shared.py @@ -19,9 +19,9 @@ def check_valid_layer(layer, layer_list): """Check if the operation is valid for the layer""" if layer is None or layer_list is None: - raise Exception('Provide layer and layer_list!') + raise TypeError('Provide layer and layer_list!') if layer not in layer_list: - raise Exception( + raise ValueError( layer + " layer does not exist or is invalid for this use!") @@ -114,7 +114,7 @@ def add_env_layer_tags(cluster, tenant, layer, entity, tag_list): 'custom', 'process-groups', 'services'] check_valid_layer(layer, layer_list) if not tag_list: - raise Exception("tag_list cannot be None type") + raise TypeError("tag_list cannot be None type") tag_json = { 'tags': tag_list } From bcb3d80f30e4251e1cdc939938c709f829c305f6 Mon Sep 17 00:00:00 2001 From: Aaron Date: Tue, 28 Jul 2020 20:22:05 -0500 Subject: [PATCH 20/79] PAF-19 #Desmelling Applications excetion [skip ci] --- dynatrace/tenant/topology/applications.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dynatrace/tenant/topology/applications.py b/dynatrace/tenant/topology/applications.py index 201d13c..083ee34 100644 --- a/dynatrace/tenant/topology/applications.py +++ b/dynatrace/tenant/topology/applications.py @@ -67,7 +67,7 @@ def get_application_count_setwide(full_set): def add_application_tags(cluster, tenant, entity, tag_list): """Add tags to application""" if tag_list is None: - raise Exception("tag_list cannot be None type") + raise TypeError("tag_list cannot be None type") tag_json = { 'tags': tag_list } From 301f9652dae3c284ef14889471efab9b7bf28007 Mon Sep 17 00:00:00 2001 From: Aaron Date: Wed, 29 Jul 2020 11:58:43 -0500 Subject: [PATCH 21/79] PAF-33 Fixing test tooling's insecure SSL suppress --- tests/tooling_for_test.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/tests/tooling_for_test.py b/tests/tooling_for_test.py index 5126a41..0575acf 100644 --- a/tests/tooling_for_test.py +++ b/tests/tooling_for_test.py @@ -1,10 +1,11 @@ """Mockserver Expectation Setup""" import requests import json -from dynatrace.requests.request_handler import generate_tenant_url, no_ssl_verification +from dynatrace.requests.request_handler import generate_tenant_url def create_mockserver_expectation(cluster, tenant, url_path, request_type, parameters, response_payload_file=None, mock_id=None): + requests.packages.urllib3.disable_warnings() expectation = { "httpRequest": { "queryStringParameters": { @@ -37,14 +38,17 @@ def create_mockserver_expectation(cluster, tenant, url_path, request_type, param expectation_url = generate_tenant_url( cluster, tenant) + "/mockserver/expectation" - with no_ssl_verification(): - test_req = requests.request( - "PUT", expectation_url, json=expectation, verify=False) - if test_req.status_code > 300: - print(expectation, test_req.status_code, test_req.text, end="\n") - raise ValueError(test_req.status_code) + test_req = requests.request( + "PUT", + expectation_url, + json=expectation, + verify=False + ) + if test_req.status_code > 300: + print(expectation, test_req.status_code, test_req.text, end="\n") + raise ValueError(test_req.status_code) def expected_payload(json_file): with open(json_file) as f: - return json.load(f) \ No newline at end of file + return json.load(f) From 30cbefa81bd521325e0f994805d2c6e5b7c38c25 Mon Sep 17 00:00:00 2001 From: Aaron Date: Wed, 29 Jul 2020 11:59:20 -0500 Subject: [PATCH 22/79] fixing host_groups import since topology moved --- dynatrace/tenant/host_groups.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dynatrace/tenant/host_groups.py b/dynatrace/tenant/host_groups.py index 8416817..495b012 100644 --- a/dynatrace/tenant/host_groups.py +++ b/dynatrace/tenant/host_groups.py @@ -1,5 +1,5 @@ """Host Group Information for Tenant""" -from dynatrace.topology import hosts as topology_hosts +from dynatrace.tenant.topology import hosts as topology_hosts # TODO redo export function (break out to export function?) # def export_host_groups_setwide(full_set): From adb5ce995b40d1d4d45be7236a9ac3e33246623b Mon Sep 17 00:00:00 2001 From: Aaron Date: Wed, 29 Jul 2020 13:20:55 -0500 Subject: [PATCH 23/79] PAF-19 #Creating file for all project exceptions --- dynatrace/exceptions.py | 18 ++++++++++++++++++ dynatrace/requests/request_handler.py | 23 ++++++++++++++++++++--- dynatrace/tenant/maintenance.py | 20 ++++---------------- 3 files changed, 42 insertions(+), 19 deletions(-) create mode 100644 dynatrace/exceptions.py diff --git a/dynatrace/exceptions.py b/dynatrace/exceptions.py new file mode 100644 index 0000000..88b3bda --- /dev/null +++ b/dynatrace/exceptions.py @@ -0,0 +1,18 @@ +''' +Module containing all the custom exceptions for this project +''' +from sys import stderr + +class InvalidAPIResponseException (Exception): + def __init__ (self, message): + print(message, file=stderr) + +class InvalidDateFormatException(ValueError): + def __init__(self, required_format): + self.required_format = required_format + print("Incorrect Date for following entry: %s", required_format, file=stderr) + +class InvalidScopeException(ValueError): + def __init__(self, required_format): + self.required_format = required_format + print("Invalid scope used. Tag required for management zone, matching rule: %s", required_format, file=stderr) diff --git a/dynatrace/requests/request_handler.py b/dynatrace/requests/request_handler.py index 7cf9a21..c2fa503 100644 --- a/dynatrace/requests/request_handler.py +++ b/dynatrace/requests/request_handler.py @@ -1,6 +1,7 @@ """Make API Request to available Dynatrace API""" import requests import time +from dynatrace.exceptions import InvalidAPIResponseException from enum import Enum, auto requests.packages.urllib3.disable_warnings() @@ -80,6 +81,18 @@ def __repr__(self): def make_api_call(cluster, endpoint, tenant=None, params=None, json=None, method=HTTP.GET): + ''' + Function makes an API call in a safe way, taking into account the rate limits. + This will ensure the API call will always go through, with the program waiting for the limit to reset if needed.\n + + @param cluster - Cluster dictionary from variable_set\n + @param endpoint - API endpoint to call.\n + @param tenant - String of tenant name used in cluster dictionary\n + @param json - dictionary to be converted to JSON request\n + @param method - HTTP method to use in call. Use HTTP enum.\n + \n + @return - response from request\n + ''' # Set the right URL for the operation url = f"{generate_tenant_url(cluster, tenant)}{endpoint}" if tenant else cluster['url'] @@ -119,7 +132,12 @@ def make_api_call(cluster, endpoint, tenant=None, params=None, json=None, method def check_response(response): - """Checks if the Reponse has a Successful Status Code""" + ''' + Checks if the Response has a Successful Status Code + + @param response - The response variable returned from a request\n + + ''' headers = response.headers if response.status_code == 429: @@ -134,8 +152,7 @@ def check_response(response): time.sleep(float(time_to_wait)) return False elif not 200 <= response.status_code <= 299: - raise Exception(f"Response Error\n{response.url}\n" - f"{response.status_code}\n{response.text}") + raise InvalidAPIResponseException(f"Response Error:\n{response.url}\n{response.status_code}\n{response.text}") return True diff --git a/dynatrace/tenant/maintenance.py b/dynatrace/tenant/maintenance.py index 0e90691..dd73764 100644 --- a/dynatrace/tenant/maintenance.py +++ b/dynatrace/tenant/maintenance.py @@ -3,23 +3,11 @@ import re import dynatrace.requests.request_handler as rh import user_variables as uv +from dynatrace.exceptions import InvalidDateFormatException MZ_ENDPOINT = rh.TenantAPIs.MAINTENANCE_WINDOWS - -class InvalidDateFormatException(ValueError): - def __init__(self, required_format): - self.required_format = required_format - print("Incorrect Date for following entry: %s", required_format) - - -class InvalidScopeException(ValueError): - def __init__(self, required_format): - self.required_format = required_format - print("Invalid scope used. Tag required for management zone, matching rule: %s", required_format) - - def validate_datetime(datetime_text, required_format): try: datetime.datetime.strptime(datetime_text, required_format) @@ -73,7 +61,7 @@ def generate_schedule(recurrence_type, start_time, duration, range_start, range_ # Check Recurrence if recurrence_type not in types_available: - raise Exception( + raise ValueError( "Invalid Recurrence Type! Allowed values are: ONCE, DAILY, WEEKLY, MONTHLY") # Check ranges @@ -110,7 +98,7 @@ def generate_schedule(recurrence_type, start_time, duration, range_start, range_ if day in days_of_week: schedule['recurrence']['dayOfWeek'] = day else: - raise Exception("Invalid Weekly Day! Allowed values are " + raise ValueError("Invalid Weekly Day! Allowed values are " + "SUNDAY, MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY") # Check Monthly Day @@ -118,7 +106,7 @@ def generate_schedule(recurrence_type, start_time, duration, range_start, range_ if (1 <= int(day) <= 31): schedule['recurrence']['dayOfMonth'] = day else: - raise Exception("Invalid Monthly Day! Allowed values are 1-31") + raise ValueError("Invalid Monthly Day! Allowed values are 1-31") return schedule From cb115b205181f9ef3e7f7e7147f0407a737a27b5 Mon Sep 17 00:00:00 2001 From: Aaron Date: Wed, 29 Jul 2020 13:49:56 -0500 Subject: [PATCH 24/79] PAF-19 Adding Exception for Managed-Only --- dynatrace/cluster/users.py | 3 ++- dynatrace/exceptions.py | 4 ++++ dynatrace/requests/request_handler.py | 11 ++++++----- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/dynatrace/cluster/users.py b/dynatrace/cluster/users.py index 6922136..2331503 100644 --- a/dynatrace/cluster/users.py +++ b/dynatrace/cluster/users.py @@ -1,5 +1,6 @@ """User Operations in Cluster Mangement""" import dynatrace.requests.request_handler as rh +from dynatrace.exceptions import ManagedClusterOnlyException # TODO add check for is_managed @@ -7,7 +8,7 @@ def check_is_managed(cluster, ignore_saas): """Checks if the cluster is Managed""" if not cluster['is_managed'] and not ignore_saas: - raise Exception('Cannot run operation on SaaS instances!') + raise ManagedClusterOnlyException() return cluster['is_managed'] diff --git a/dynatrace/exceptions.py b/dynatrace/exceptions.py index 88b3bda..b92a074 100644 --- a/dynatrace/exceptions.py +++ b/dynatrace/exceptions.py @@ -16,3 +16,7 @@ class InvalidScopeException(ValueError): def __init__(self, required_format): self.required_format = required_format print("Invalid scope used. Tag required for management zone, matching rule: %s", required_format, file=stderr) + +class ManagedClusterOnlyException(TypeError): + def __init__(self): + print ("This operation is only supported on Dynatrace Managed!", file=stderr) diff --git a/dynatrace/requests/request_handler.py b/dynatrace/requests/request_handler.py index c2fa503..b2e303b 100644 --- a/dynatrace/requests/request_handler.py +++ b/dynatrace/requests/request_handler.py @@ -1,7 +1,7 @@ """Make API Request to available Dynatrace API""" import requests import time -from dynatrace.exceptions import InvalidAPIResponseException +from dynatrace.exceptions import InvalidAPIResponseException, ManagedClusterOnlyException from enum import Enum, auto requests.packages.urllib3.disable_warnings() @@ -94,13 +94,14 @@ def make_api_call(cluster, endpoint, tenant=None, params=None, json=None, method @return - response from request\n ''' # Set the right URL for the operation - url = f"{generate_tenant_url(cluster, tenant)}{endpoint}" if tenant else cluster['url'] + url = f"{generate_tenant_url(cluster, tenant)}{endpoint}" if tenant else f"{HTTPS_STR}{cluster['url']}" if not params: params = {} # Get correct token for the operation if 'onpremise' in str(endpoint) or 'cluster' in str(endpoint): + check_managed (cluster) params['Api-Token'] = cluster['cluster_token'] else: params['Api-Token'] = cluster['api_token'][tenant] @@ -157,10 +158,10 @@ def check_response(response): return True -def check_managed(managed_bool): +def check_managed(cluster): """Checks if the Cluster Operation is valid (Managed) for the current cluster""" - if not managed_bool: - raise Exception("Cluster Operations not supported for SaaS!") + if not cluster['is_managed']: + raise ManagedClusterOnlyException() def generate_tenant_url(cluster, tenant): From 012919ab3196c75a1ef0c66ceeb4d7592f5ba4b0 Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 3 Aug 2020 00:41:49 -0500 Subject: [PATCH 25/79] #PAF -21 Adding MWindow & moving Host Group JSONs --- ...oup_1.json => mock_hostgroup_response_1.json} | 0 ...ck_maintenance_create_once_expectation_1.json | 16 ++++++++++++++++ .../mock_maintenance_create_once_response_1.json | 5 +++++ tests/test_host_groups.py | 5 ++--- 4 files changed, 23 insertions(+), 3 deletions(-) rename tests/mockserver_expectations/{mock_hostgroup_1.json => mock_hostgroup_response_1.json} (100%) create mode 100644 tests/mockserver_expectations/mock_maintenance_create_once_expectation_1.json create mode 100644 tests/mockserver_expectations/mock_maintenance_create_once_response_1.json diff --git a/tests/mockserver_expectations/mock_hostgroup_1.json b/tests/mockserver_expectations/mock_hostgroup_response_1.json similarity index 100% rename from tests/mockserver_expectations/mock_hostgroup_1.json rename to tests/mockserver_expectations/mock_hostgroup_response_1.json diff --git a/tests/mockserver_expectations/mock_maintenance_create_once_expectation_1.json b/tests/mockserver_expectations/mock_maintenance_create_once_expectation_1.json new file mode 100644 index 0000000..d740f54 --- /dev/null +++ b/tests/mockserver_expectations/mock_maintenance_create_once_expectation_1.json @@ -0,0 +1,16 @@ +{ + "name":"Test Payload Daily", + "description":"Generating Payload for Test", + "suppression":"DETECT_PROBLEMS_AND_ALERT", + "schedule":{ + "recurrenceType":"DAILY", + "start":"2020-01-01 00:00", + "end":"2020-01-02 00:00", + "zoneId":"America/Chicago", + "recurrence":{ + "startTime":"23:00", + "durationMinutes":60 + } + }, + "type":"PLANNED" + } \ No newline at end of file diff --git a/tests/mockserver_expectations/mock_maintenance_create_once_response_1.json b/tests/mockserver_expectations/mock_maintenance_create_once_response_1.json new file mode 100644 index 0000000..de86d44 --- /dev/null +++ b/tests/mockserver_expectations/mock_maintenance_create_once_response_1.json @@ -0,0 +1,5 @@ +{ + "id": "1a000000-200a-3000-4000-5abc00000000", + "name": "Test Payload", + "description": "Generating Payload for Test" +} \ No newline at end of file diff --git a/tests/test_host_groups.py b/tests/test_host_groups.py index ca39ea9..bfcdb5b 100644 --- a/tests/test_host_groups.py +++ b/tests/test_host_groups.py @@ -1,6 +1,5 @@ """Testing dynatrace.tenant.host_groups""" import unittest -import json import user_variables from tests import tooling_for_test from dynatrace.tenant import host_groups @@ -16,9 +15,9 @@ def test_get_host_groups_tenantwide(self): "includeDetails": [ "true" ], "Api-Token": [CLUSTER["api_token"][TENANT]], } - mockserver_expectation_file = "tests/mockserver_expectations/mock_hostgroup_1.json" + mockserver_expectation_file = "tests/mockserver_expectations/mock_hostgroup_response_1.json" tooling_for_test.create_mockserver_expectation( - CLUSTER, TENANT, URL_PATH, "GET", parameters, mockserver_expectation_file) + CLUSTER, TENANT, URL_PATH, "GET", parameters, response_payload_file=mockserver_expectation_file) command_tested = host_groups.get_host_groups_tenantwide(CLUSTER, TENANT) expected_result = { From 6b1ebc72b4039fe5c067849576e3ef70f98cc0ae Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 3 Aug 2020 00:48:18 -0500 Subject: [PATCH 26/79] PAF-21 #Changing unittest to discover in CircleCI --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 11080ed..20c1262 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -20,7 +20,7 @@ jobs: pipenv install --dev - run: command: | - pipenv run python -m unittest tests/test_host_groups.py + pipenv run python -m unittest discover - store_test_results: path: test-results - store_artifacts: From d9b1a3cedb04d92733c2a15318b24e5470daec73 Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 3 Aug 2020 00:49:56 -0500 Subject: [PATCH 27/79] Think init file is needed for circleci discover --- tests/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 tests/__init__.py diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 From b36d769c1a915d9a5d16195a5cc797e88eb8ea34 Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 3 Aug 2020 01:17:15 -0500 Subject: [PATCH 28/79] PAF-21 First Maintenance Window Test --- ...intenance_create_daily_expectation_1.json} | 0 ..._maintenance_create_daily_response_1.json} | 2 +- tests/test_host_groups.py | 2 +- tests/test_maintenance_windows.py | 49 +++++++++++++++++++ tests/tooling_for_test.py | 27 +++++++--- 5 files changed, 70 insertions(+), 10 deletions(-) rename tests/mockserver_expectations/{mock_maintenance_create_once_expectation_1.json => mock_maintenance_create_daily_expectation_1.json} (100%) rename tests/mockserver_expectations/{mock_maintenance_create_once_response_1.json => mock_maintenance_create_daily_response_1.json} (75%) create mode 100644 tests/test_maintenance_windows.py diff --git a/tests/mockserver_expectations/mock_maintenance_create_once_expectation_1.json b/tests/mockserver_expectations/mock_maintenance_create_daily_expectation_1.json similarity index 100% rename from tests/mockserver_expectations/mock_maintenance_create_once_expectation_1.json rename to tests/mockserver_expectations/mock_maintenance_create_daily_expectation_1.json diff --git a/tests/mockserver_expectations/mock_maintenance_create_once_response_1.json b/tests/mockserver_expectations/mock_maintenance_create_daily_response_1.json similarity index 75% rename from tests/mockserver_expectations/mock_maintenance_create_once_response_1.json rename to tests/mockserver_expectations/mock_maintenance_create_daily_response_1.json index de86d44..36d21d7 100644 --- a/tests/mockserver_expectations/mock_maintenance_create_once_response_1.json +++ b/tests/mockserver_expectations/mock_maintenance_create_daily_response_1.json @@ -1,5 +1,5 @@ { "id": "1a000000-200a-3000-4000-5abc00000000", - "name": "Test Payload", + "name": "Test Payload Daily", "description": "Generating Payload for Test" } \ No newline at end of file diff --git a/tests/test_host_groups.py b/tests/test_host_groups.py index bfcdb5b..2a3d370 100644 --- a/tests/test_host_groups.py +++ b/tests/test_host_groups.py @@ -17,7 +17,7 @@ def test_get_host_groups_tenantwide(self): } mockserver_expectation_file = "tests/mockserver_expectations/mock_hostgroup_response_1.json" tooling_for_test.create_mockserver_expectation( - CLUSTER, TENANT, URL_PATH, "GET", parameters, response_payload_file=mockserver_expectation_file) + CLUSTER, TENANT, URL_PATH, "GET", parameters=parameters, response_payload_file=mockserver_expectation_file) command_tested = host_groups.get_host_groups_tenantwide(CLUSTER, TENANT) expected_result = { diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py new file mode 100644 index 0000000..b3f796d --- /dev/null +++ b/tests/test_maintenance_windows.py @@ -0,0 +1,49 @@ +""" +Test Cases For Maintenance Windows. +""" +import unittest +import user_variables +from tests import tooling_for_test +from dynatrace.tenant import maintenance +from dynatrace.requests.request_handler import TenantAPIs + +CLUSTER = user_variables.FULL_SET["mockserver1"] +TENANT = "tenant1" +URL_PATH = TenantAPIs.MAINTENANCE_WINDOWS + + +class TestMaintenanceWindowCreate(unittest.TestCase): + def test_create_daily_no_scope(self): + """ + Testing create daily Maintenance Window with no scope + """ + mockserver_expectation_file = "tests/mockserver_expectations/mock_maintenance_create_daily_expectation_1.json" + mockserver_response_file = "tests/mockserver_expectations/mock_maintenance_create_daily_response_1.json" + tooling_for_test.create_mockserver_expectation( + CLUSTER, + TENANT, + URL_PATH, + "POST", + request_payload_file=mockserver_expectation_file, + response_payload_file=mockserver_response_file, + ) + maintenance_schedule = maintenance.generate_schedule( + "DAILY", + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00" + ) + maintenance_json = maintenance.generate_window_json( + "Test Payload Daily", + "Generating Payload for Test", + "DETECT_PROBLEMS_AND_ALERT", + maintenance_schedule, + is_planned=True + ) + result = maintenance.create_window(CLUSTER, TENANT, maintenance_json) + self.assertEqual(result, tooling_for_test.expected_payload(mockserver_response_file)) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/tooling_for_test.py b/tests/tooling_for_test.py index 0575acf..442c2ac 100644 --- a/tests/tooling_for_test.py +++ b/tests/tooling_for_test.py @@ -4,7 +4,7 @@ from dynatrace.requests.request_handler import generate_tenant_url -def create_mockserver_expectation(cluster, tenant, url_path, request_type, parameters, response_payload_file=None, mock_id=None): +def create_mockserver_expectation(cluster, tenant, url_path, request_type, **kwargs): requests.packages.urllib3.disable_warnings() expectation = { "httpRequest": { @@ -23,21 +23,32 @@ def create_mockserver_expectation(cluster, tenant, url_path, request_type, param } # Paramaters should always at least have Api-Token - expectation["httpRequest"]["queryStringParameters"] = parameters + if 'parameters' in kwargs: + expectation["httpRequest"]["queryStringParameters"] = kwargs['parameters'] - if response_payload_file: - with open(response_payload_file) as f: + if "request_payload_file" in kwargs: + with open(kwargs['request_payload_file']) as f: + request_payload = json.load(f) + expectation["httpRequest"]["body"] = { + "type": "JSON", + "json": request_payload, + } + + if "response_payload_file" in kwargs: + with open(kwargs['response_payload_file']) as f: response_payload = json.load(f) expectation["httpResponse"]["body"] = { "type": "JSON", "json": response_payload, } + expectation["httpResponse"]["headers"] = { + "content-type": ["application/json"] + } - if mock_id: - expectation["id"] = mock_id + if "mock_id" in kwargs: + expectation["id"] = kwargs["mock_id"] - expectation_url = generate_tenant_url( - cluster, tenant) + "/mockserver/expectation" + expectation_url = f"{generate_tenant_url(cluster, tenant)}/mockserver/expectation" test_req = requests.request( "PUT", expectation_url, From effed7d8c2cbef7f054e16e6f7f3721a7f210408 Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 3 Aug 2020 01:19:51 -0500 Subject: [PATCH 29/79] PAF-21 #MW Create now returns JSON of new window --- dynatrace/tenant/maintenance.py | 40 ++++++++++++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/dynatrace/tenant/maintenance.py b/dynatrace/tenant/maintenance.py index dd73764..7441c41 100644 --- a/dynatrace/tenant/maintenance.py +++ b/dynatrace/tenant/maintenance.py @@ -4,10 +4,48 @@ import dynatrace.requests.request_handler as rh import user_variables as uv from dynatrace.exceptions import InvalidDateFormatException +from enum import Enum, auto MZ_ENDPOINT = rh.TenantAPIs.MAINTENANCE_WINDOWS +class Suppression(Enum): + """ + *** NOT ACTIVE YET*** + Types of suppression for create Maintenance Window JSON. Suppression is required + + Args: + Enum (DETECT_PROBLEMS_AND_ALERT): Full Alerting. Entites in scope will have notes that a Maintenance Window was active + Enum (DETECT_PROBLEMS_DONT_ALERT): Problems detected but alerting profiles in that scope are not triggered + Enum (DONT_DETECT_PROBLEMS): Problem detection completely off for the scope + """ + DETECT_PROBLEMS_AND_ALERT = auto() + DETECT_PROBLEMS_DONT_ALERT = auto() + DONT_DETECT_PROBLEMS = auto() + +class Day(Enum): + """ + *** NOT ACTIVE YET *** + Day of the Week + + Args: + Enum (MONDAY): MONDAY + Enum (TUESDAY): TUESDAY + Enum (WEDNESDAY): WEDNESDAY + Enum (THURSDAY): THURSDAY + Enum (FRIDAY): FRIDAY + Enum (SATURDAY): SATURDAY + Enum (SUNDAY): SUNDAY + """ + + MONDAY = auto() + TUESDAY = auto() + WEDNESDAY = auto() + THURSDAY = auto() + FRIDAY = auto() + SATURDAY = auto() + SUNDAY = auto() + def validate_datetime(datetime_text, required_format): try: datetime.datetime.strptime(datetime_text, required_format) @@ -118,7 +156,7 @@ def create_window(cluster, tenant, json): method=rh.HTTP.POST, endpoint=MZ_ENDPOINT, json=json) - return response.status_code + return response.json() def update_window(cluster, tenant, window_id, json): From 76d6ff6d207a8fac825b81754d10cc1350b8f2e8 Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 3 Aug 2020 01:25:56 -0500 Subject: [PATCH 30/79] PAF-21 #More Sensible Enums --- dynatrace/tenant/maintenance.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/dynatrace/tenant/maintenance.py b/dynatrace/tenant/maintenance.py index 7441c41..5356161 100644 --- a/dynatrace/tenant/maintenance.py +++ b/dynatrace/tenant/maintenance.py @@ -15,13 +15,13 @@ class Suppression(Enum): Types of suppression for create Maintenance Window JSON. Suppression is required Args: - Enum (DETECT_PROBLEMS_AND_ALERT): Full Alerting. Entites in scope will have notes that a Maintenance Window was active - Enum (DETECT_PROBLEMS_DONT_ALERT): Problems detected but alerting profiles in that scope are not triggered - Enum (DONT_DETECT_PROBLEMS): Problem detection completely off for the scope + Enum (FULL_ALERTING): Full Alerting. Entites in scope will have notes that a Maintenance Window was active + Enum (DISABLE_ALERTING): Problems detected but alerting profiles in that scope are not triggered + Enum (DISABLE_DETECTION): Problem detection completely off for the scope """ - DETECT_PROBLEMS_AND_ALERT = auto() - DETECT_PROBLEMS_DONT_ALERT = auto() - DONT_DETECT_PROBLEMS = auto() + FULL_ALERTING = "DETECT_PROBLEMS_AND_ALERT" + DISABLE_ALERTING = "DETECT_PROBLEMS_DONT_ALERT" + DISABLE_DETECTION = "DONT_DETECT_PROBLEMS" class Day(Enum): """ @@ -38,13 +38,13 @@ class Day(Enum): Enum (SUNDAY): SUNDAY """ - MONDAY = auto() - TUESDAY = auto() - WEDNESDAY = auto() - THURSDAY = auto() - FRIDAY = auto() - SATURDAY = auto() - SUNDAY = auto() + MONDAY = "MONDAY" + TUESDAY = "TUESDAY" + WEDNESDAY = "WEDNESDAY" + THURSDAY = "THURSDAY" + FRIDAY = "FRIDAY" + SATURDAY = "SATURDAY" + SUNDAY = "SUNDAY" def validate_datetime(datetime_text, required_format): try: From 0c9a2ea47b4691af5dbe69255474c008b47e612f Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 3 Aug 2020 08:54:26 -0500 Subject: [PATCH 31/79] PAF-21 #Refactored test JSON dir. Added Test Case --- .../maintenance/mock_create_daily_1.json} | 0 .../mock_create_daily_single_tag_1.json | 25 ++++++++++ .../host_groups/mock_get_general_1.json} | 0 .../maintenance/mock_create_daily_1.json} | 0 .../mock_create_daily_single_tag_1.json | 5 ++ tests/test_host_groups.py | 5 +- tests/test_maintenance_windows.py | 49 +++++++++++++++++-- 7 files changed, 78 insertions(+), 6 deletions(-) rename tests/{mockserver_expectations/mock_maintenance_create_daily_expectation_1.json => mockserver_payloads/requests/maintenance/mock_create_daily_1.json} (100%) create mode 100644 tests/mockserver_payloads/requests/maintenance/mock_create_daily_single_tag_1.json rename tests/{mockserver_expectations/mock_hostgroup_response_1.json => mockserver_payloads/responses/host_groups/mock_get_general_1.json} (100%) rename tests/{mockserver_expectations/mock_maintenance_create_daily_response_1.json => mockserver_payloads/responses/maintenance/mock_create_daily_1.json} (100%) create mode 100644 tests/mockserver_payloads/responses/maintenance/mock_create_daily_single_tag_1.json diff --git a/tests/mockserver_expectations/mock_maintenance_create_daily_expectation_1.json b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_1.json similarity index 100% rename from tests/mockserver_expectations/mock_maintenance_create_daily_expectation_1.json rename to tests/mockserver_payloads/requests/maintenance/mock_create_daily_1.json diff --git a/tests/mockserver_payloads/requests/maintenance/mock_create_daily_single_tag_1.json b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_single_tag_1.json new file mode 100644 index 0000000..a309fb1 --- /dev/null +++ b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_single_tag_1.json @@ -0,0 +1,25 @@ +{ + "name": "Test Payload Daily with Tag", + "description": "Generating Payload for Test", + "suppression": "DETECT_PROBLEMS_AND_ALERT", + "schedule": { + "recurrenceType": "DAILY", + "start": "2020-01-01 00:00", + "end": "2020-01-02 00:00", + "zoneId": "America/Chicago", + "recurrence": { + "startTime": "23:00", + "durationMinutes": 60 + } + }, + "type": "PLANNED", + "scope": { + "entities": [], + "matches": [{ + "tags": [{ + "context": "CONTEXTLESS", + "key": "testing" + }] + }] + } +} \ No newline at end of file diff --git a/tests/mockserver_expectations/mock_hostgroup_response_1.json b/tests/mockserver_payloads/responses/host_groups/mock_get_general_1.json similarity index 100% rename from tests/mockserver_expectations/mock_hostgroup_response_1.json rename to tests/mockserver_payloads/responses/host_groups/mock_get_general_1.json diff --git a/tests/mockserver_expectations/mock_maintenance_create_daily_response_1.json b/tests/mockserver_payloads/responses/maintenance/mock_create_daily_1.json similarity index 100% rename from tests/mockserver_expectations/mock_maintenance_create_daily_response_1.json rename to tests/mockserver_payloads/responses/maintenance/mock_create_daily_1.json diff --git a/tests/mockserver_payloads/responses/maintenance/mock_create_daily_single_tag_1.json b/tests/mockserver_payloads/responses/maintenance/mock_create_daily_single_tag_1.json new file mode 100644 index 0000000..16ba0d1 --- /dev/null +++ b/tests/mockserver_payloads/responses/maintenance/mock_create_daily_single_tag_1.json @@ -0,0 +1,5 @@ +{ + "id": "f8d5614d-7407-4fdf-a6a1-1e0ed693a6cf", + "name": "Test Payload Daily with Tag", + "description": "Generating Payload for Test" +} \ No newline at end of file diff --git a/tests/test_host_groups.py b/tests/test_host_groups.py index 2a3d370..69581a3 100644 --- a/tests/test_host_groups.py +++ b/tests/test_host_groups.py @@ -9,15 +9,16 @@ URL_PATH = "/api/v1/entity/infrastructure/hosts" class TestHostGroupFunctions(unittest.TestCase): + RESPONSE_DIR = "tests/mockserver_payloads/responses/host_groups/" def test_get_host_groups_tenantwide(self): parameters = { "relativeTime": ["day"], "includeDetails": [ "true" ], "Api-Token": [CLUSTER["api_token"][TENANT]], } - mockserver_expectation_file = "tests/mockserver_expectations/mock_hostgroup_response_1.json" + mockserver_response_file = f"{self.RESPONSE_DIR}mock_get_general_1.json" tooling_for_test.create_mockserver_expectation( - CLUSTER, TENANT, URL_PATH, "GET", parameters=parameters, response_payload_file=mockserver_expectation_file) + CLUSTER, TENANT, URL_PATH, "GET", parameters=parameters, response_file=mockserver_response_file) command_tested = host_groups.get_host_groups_tenantwide(CLUSTER, TENANT) expected_result = { diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py index b3f796d..1142951 100644 --- a/tests/test_maintenance_windows.py +++ b/tests/test_maintenance_windows.py @@ -13,19 +13,28 @@ class TestMaintenanceWindowCreate(unittest.TestCase): + """ + Test Cases for Creating a Maintenance Window + + Args: + unittest ([type]): [description] + """ + REQUEST_DIR = "tests/mockserver_payloads/requests/maintenance/" + RESPONSE_DIR = "tests/mockserver_payloads/responses/maintenance/" + def test_create_daily_no_scope(self): """ Testing create daily Maintenance Window with no scope """ - mockserver_expectation_file = "tests/mockserver_expectations/mock_maintenance_create_daily_expectation_1.json" - mockserver_response_file = "tests/mockserver_expectations/mock_maintenance_create_daily_response_1.json" + mockserver_request_file = f"{self.REQUEST_DIR}mock_create_daily_1.json" + mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_daily_1.json" tooling_for_test.create_mockserver_expectation( CLUSTER, TENANT, URL_PATH, "POST", - request_payload_file=mockserver_expectation_file, - response_payload_file=mockserver_response_file, + request_file=mockserver_request_file, + response_file=mockserver_response_file, ) maintenance_schedule = maintenance.generate_schedule( "DAILY", @@ -44,6 +53,38 @@ def test_create_daily_no_scope(self): result = maintenance.create_window(CLUSTER, TENANT, maintenance_json) self.assertEqual(result, tooling_for_test.expected_payload(mockserver_response_file)) + def test_create_daily_single_tag(self): + """ + Testing create daily Maintenance Window with a single tag scope + """ + mockserver_request_file = f"{self.REQUEST_DIR}mock_create_daily_single_tag_1.json" + mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_daily_single_tag_1.json" + tooling_for_test.create_mockserver_expectation( + CLUSTER, + TENANT, + URL_PATH, + "POST", + request_file=mockserver_request_file, + response_file=mockserver_response_file, + ) + maintenance_schedule = maintenance.generate_schedule( + "DAILY", + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00" + ) + maintenance_scope = maintenance.generate_scope(tags=[{'context': "CONTEXTLESS",'key': "testing"}]) + maintenance_json = maintenance.generate_window_json( + "Test Payload Daily", + "Generating Payload for Test", + "DETECT_PROBLEMS_AND_ALERT", + maintenance_schedule, + scope= maintenance_scope, + is_planned=True + ) + result = maintenance.create_window(CLUSTER, TENANT, maintenance_json) + self.assertEqual(result, tooling_for_test.expected_payload(mockserver_response_file)) if __name__ == '__main__': unittest.main() From e6179d5c4b236dbccb1ec4f00b2c8125962b931e Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 3 Aug 2020 10:27:12 -0500 Subject: [PATCH 32/79] PAF-21 #Fixing Testing tool to match refactor --- tests/tooling_for_test.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/tests/tooling_for_test.py b/tests/tooling_for_test.py index 442c2ac..cacfb36 100644 --- a/tests/tooling_for_test.py +++ b/tests/tooling_for_test.py @@ -1,8 +1,10 @@ """Mockserver Expectation Setup""" import requests import json +import logging from dynatrace.requests.request_handler import generate_tenant_url +logging.basicConfig(filename="testing_tools.log",level=logging.DEBUG) def create_mockserver_expectation(cluster, tenant, url_path, request_type, **kwargs): requests.packages.urllib3.disable_warnings() @@ -22,20 +24,21 @@ def create_mockserver_expectation(cluster, tenant, url_path, request_type, **kwa "id": "OneOff", } + logging.debug(f"KWARGS {kwargs}") # Paramaters should always at least have Api-Token if 'parameters' in kwargs: expectation["httpRequest"]["queryStringParameters"] = kwargs['parameters'] - if "request_payload_file" in kwargs: - with open(kwargs['request_payload_file']) as f: + if "request_file" in kwargs: + with open(kwargs['request_file']) as f: request_payload = json.load(f) expectation["httpRequest"]["body"] = { "type": "JSON", "json": request_payload, } - if "response_payload_file" in kwargs: - with open(kwargs['response_payload_file']) as f: + if "response_file" in kwargs: + with open(kwargs['response_file']) as f: response_payload = json.load(f) expectation["httpResponse"]["body"] = { "type": "JSON", @@ -48,6 +51,9 @@ def create_mockserver_expectation(cluster, tenant, url_path, request_type, **kwa if "mock_id" in kwargs: expectation["id"] = kwargs["mock_id"] + + logging.debug(expectation) + expectation_url = f"{generate_tenant_url(cluster, tenant)}/mockserver/expectation" test_req = requests.request( "PUT", @@ -55,6 +61,7 @@ def create_mockserver_expectation(cluster, tenant, url_path, request_type, **kwa json=expectation, verify=False ) + logging.debug(test_req.text) if test_req.status_code > 300: print(expectation, test_req.status_code, test_req.text, end="\n") raise ValueError(test_req.status_code) From 100e9e2f292d9aa8d48b0733152bb1d5352a1975 Mon Sep 17 00:00:00 2001 From: Aaron Date: Tue, 4 Aug 2020 22:16:54 -0500 Subject: [PATCH 33/79] PAF-21 fixed payload for 2nd Maintenance --- tests/test_maintenance_windows.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py index 1142951..658656a 100644 --- a/tests/test_maintenance_windows.py +++ b/tests/test_maintenance_windows.py @@ -76,7 +76,7 @@ def test_create_daily_single_tag(self): ) maintenance_scope = maintenance.generate_scope(tags=[{'context': "CONTEXTLESS",'key': "testing"}]) maintenance_json = maintenance.generate_window_json( - "Test Payload Daily", + "Test Payload Daily with Tag", "Generating Payload for Test", "DETECT_PROBLEMS_AND_ALERT", maintenance_schedule, From 392fed215543b2baf75cb8f1854cd971032948dd Mon Sep 17 00:00:00 2001 From: Philipose Date: Wed, 5 Aug 2020 08:35:33 -0500 Subject: [PATCH 34/79] Create main.yml --- .github/workflows/main.yml | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 .github/workflows/main.yml diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..4ef0fbb --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,27 @@ +# This is a basic workflow to help you get started with Actions + +name: CI + +# Controls when the action will run. Triggers the workflow on push or pull request +# events but only for the master branch +on: + push: + branches: [ master, test, dev ] + pull_request: + branches: [ master, test, dev ] + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + build: + # The type of runner that the job will run on + runs-on: ubuntu-latest + + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v2 + + # Runs a single command using the runners shell + - name: Super-Linter + uses: github/super-linter@v3.5.1 From 11aa84c43002a72f3f7c5cdf4fcaf99040786523 Mon Sep 17 00:00:00 2001 From: Philipose Date: Wed, 5 Aug 2020 08:39:32 -0500 Subject: [PATCH 35/79] Update and rename main.yml to linter.yml --- .github/workflows/linter.yml | 51 ++++++++++++++++++++++++++++++++++++ .github/workflows/main.yml | 27 ------------------- 2 files changed, 51 insertions(+), 27 deletions(-) create mode 100644 .github/workflows/linter.yml delete mode 100644 .github/workflows/main.yml diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml new file mode 100644 index 0000000..fcdc635 --- /dev/null +++ b/.github/workflows/linter.yml @@ -0,0 +1,51 @@ +########################### +########################### +## Linter GitHub Actions ## +########################### +########################### +name: Lint Code Base + +# +# Documentation: +# https://help.github.com/en/articles/workflow-syntax-for-github-actions +# + +############################# +# Start the job on all push # +############################# +on: + push: + branches-ignore: [master, test, dev] + # Remove the line above to run when pushing to master + pull_request: + branches: [master, test, dev] + +############### +# Set the Job # +############### +jobs: + build: + # Name the Job + name: Lint Code Base + # Set the agent to run on + runs-on: ubuntu-latest + + ################## + # Load all steps # + ################## + steps: + ########################## + # Checkout the code base # + ########################## + - name: Checkout Code + uses: actions/checkout@v2 + + ################################ + # Run Linter against code base # + ################################ + - name: Lint Code Base + uses: docker://github/super-linter:v3 + env: + VALIDATE_ALL_CODEBASE: false + DEFAULT_BRANCH: master + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml deleted file mode 100644 index 4ef0fbb..0000000 --- a/.github/workflows/main.yml +++ /dev/null @@ -1,27 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: CI - -# Controls when the action will run. Triggers the workflow on push or pull request -# events but only for the master branch -on: - push: - branches: [ master, test, dev ] - pull_request: - branches: [ master, test, dev ] - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: ubuntu-latest - - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v2 - - # Runs a single command using the runners shell - - name: Super-Linter - uses: github/super-linter@v3.5.1 From 50fb88cf425192952a501e2f9827c77240e666ab Mon Sep 17 00:00:00 2001 From: Philipose Date: Wed, 5 Aug 2020 08:48:32 -0500 Subject: [PATCH 36/79] Update linter.yml Linting against whole code base. Project should be small enough to be fine --- .github/workflows/linter.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index fcdc635..8ee3748 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -15,7 +15,7 @@ name: Lint Code Base ############################# on: push: - branches-ignore: [master, test, dev] + branches-ignore: [master] # Remove the line above to run when pushing to master pull_request: branches: [master, test, dev] @@ -46,6 +46,6 @@ jobs: - name: Lint Code Base uses: docker://github/super-linter:v3 env: - VALIDATE_ALL_CODEBASE: false + VALIDATE_ALL_CODEBASE: true DEFAULT_BRANCH: master GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From b2c25a69deb3af6bd444e7bce4604e8719ab1407 Mon Sep 17 00:00:00 2001 From: Philipose Date: Wed, 5 Aug 2020 08:54:42 -0500 Subject: [PATCH 37/79] [no-ci] swiching to new validation --- .github/workflows/linter.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index 8ee3748..d093e8a 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -46,6 +46,6 @@ jobs: - name: Lint Code Base uses: docker://github/super-linter:v3 env: - VALIDATE_ALL_CODEBASE: true + VALIDATE_ALL_CODEBASE: false DEFAULT_BRANCH: master GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From adb2dc9651c158211d872fd43f4667e8b6fffcc4 Mon Sep 17 00:00:00 2001 From: Radu Date: Wed, 5 Aug 2020 18:10:18 +0100 Subject: [PATCH 38/79] Tests for Topology>Hosts functions --- .../requests/hosts/tags.json | 6 + .../responses/get_all.json | 0 .../responses/hosts/get_all.json | 1381 +++++++++++++++++ .../responses/hosts/get_single.json | 70 + tests/test_host_groups.py | 36 +- tests/test_topology_hosts.py | 138 ++ tests/tooling_for_test.py | 113 +- 7 files changed, 1674 insertions(+), 70 deletions(-) create mode 100644 tests/mockserver_payloads/requests/hosts/tags.json create mode 100644 tests/mockserver_payloads/responses/get_all.json create mode 100644 tests/mockserver_payloads/responses/hosts/get_all.json create mode 100644 tests/mockserver_payloads/responses/hosts/get_single.json create mode 100644 tests/test_topology_hosts.py diff --git a/tests/mockserver_payloads/requests/hosts/tags.json b/tests/mockserver_payloads/requests/hosts/tags.json new file mode 100644 index 0000000..5694743 --- /dev/null +++ b/tests/mockserver_payloads/requests/hosts/tags.json @@ -0,0 +1,6 @@ +{ + "tags": [ + "demo", + "example" + ] +} \ No newline at end of file diff --git a/tests/mockserver_payloads/responses/get_all.json b/tests/mockserver_payloads/responses/get_all.json new file mode 100644 index 0000000..e69de29 diff --git a/tests/mockserver_payloads/responses/hosts/get_all.json b/tests/mockserver_payloads/responses/hosts/get_all.json new file mode 100644 index 0000000..ef930c6 --- /dev/null +++ b/tests/mockserver_payloads/responses/hosts/get_all.json @@ -0,0 +1,1381 @@ +[ + { + "entityId": "HOST-9F74450267BAAE20", + "displayName": "host1123.radu.local", + "discoveredName": "host1123.radu.local", + "firstSeenTimestamp": 1594365724435, + "lastSeenTimestamp": 1596559448485, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "Application", + "value": "Super_App" + }, + { + "context": "CONTEXTLESS", + "key": "Environment", + "value": "DEV" + } + ], + "fromRelationships": {}, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-7BDD5DC06168C858", + "PROCESS_GROUP_INSTANCE-726C522DDE0D2524", + "PROCESS_GROUP_INSTANCE-1238166A9681701C", + "PROCESS_GROUP_INSTANCE-829A8915392BFF28", + "PROCESS_GROUP_INSTANCE-6277FD97691C3FCB", + "PROCESS_GROUP_INSTANCE-0A37526FC2730958", + "PROCESS_GROUP_INSTANCE-F86303D27A8E830D", + "PROCESS_GROUP_INSTANCE-7C78AA98F4803D16", + "PROCESS_GROUP_INSTANCE-F9B6343E59930663" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61" + ], + "runsOn": [ + "PROCESS_GROUP-4DE8442CCEAD2251", + "PROCESS_GROUP-6A2955530FA29616", + "PROCESS_GROUP-893E1F101431ADD1", + "PROCESS_GROUP-ACF6C23C63075E6F", + "PROCESS_GROUP-A93DF7C68AF50E3B", + "PROCESS_GROUP-719A10C7F5761D1B", + "PROCESS_GROUP-296B3416E647AAEA", + "PROCESS_GROUP-2F0439DE58E8F50B", + "PROCESS_GROUP-087AEE9E5099E4D2" + ] + }, + "osType": "WINDOWS", + "osArchitecture": "X86", + "osVersion": "Windows Server 2012 Standard, ver. 6.2.9200", + "hypervisorType": "VMWARE", + "ipAddresses": [ + "10.188.203.46" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 1, + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "consumedHostUnits": 0.25, + "managementZones": [ + { + "id": "4173205628047632907", + "name": "MANAGE_ZONE_DEV" + } + ], + "hostGroup": { + "meId": "HOST_GROUP-FA0C3B0A6B7F4D78", + "name": "DEV_SUPERAPP_ZONE-A" + } + }, + { + "entityId": "HOST-5B9CE4E4E14185FA", + "displayName": "eda.aphrodite.dev.eods.uk.eeca", + "discoveredName": "eda.aphrodite.dev.eods.uk.eeca", + "firstSeenTimestamp": 1595930453531, + "lastSeenTimestamp": 1596567537628, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "OS", + "value": "Linux" + } + ], + "fromRelationships": { + "isNetworkClientOfHost": [ + "HOST-B667FC231C8B8DD3" + ] + }, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-A21F23DF2377CDF3", + "PROCESS_GROUP_INSTANCE-DDF57230A17C229B", + "PROCESS_GROUP_INSTANCE-8EC7F099B79DE446", + "PROCESS_GROUP_INSTANCE-45B761529113B950", + "PROCESS_GROUP_INSTANCE-DC819D1A1D4BD277", + "PROCESS_GROUP_INSTANCE-6CD25C7F9F3EA7B4", + "PROCESS_GROUP_INSTANCE-94CCA21312F222E9", + "PROCESS_GROUP_INSTANCE-370D49A5BE8CD98F", + "PROCESS_GROUP_INSTANCE-8EA3C7CC429BC058", + "PROCESS_GROUP_INSTANCE-3E9F8129DED1826A", + "PROCESS_GROUP_INSTANCE-48B3556CC2F4EC85", + "PROCESS_GROUP_INSTANCE-387D1C89E2A32B65", + "PROCESS_GROUP_INSTANCE-853875A0D767AC6A", + "PROCESS_GROUP_INSTANCE-B29164D385A8123C" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61", + "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" + ], + "isNetworkClientOfHost": [ + "HOST-B667FC231C8B8DD3" + ], + "runsOn": [ + "PROCESS_GROUP-F612B535AB74A08A", + "PROCESS_GROUP-0765BFB1555AEC64", + "PROCESS_GROUP-5D3213D3CEE71BFC", + "PROCESS_GROUP-7BE59F151DF1E05F", + "PROCESS_GROUP-1D024AE6C849FC39", + "PROCESS_GROUP-77FDC8C1A93F7189", + "PROCESS_GROUP-D55B147D56DC61BC", + "PROCESS_GROUP-28699B8D57AACB48", + "PROCESS_GROUP-8F167A901E185CA4", + "PROCESS_GROUP-57D66C1273F88C3D", + "PROCESS_GROUP-374EE57952100044", + "PROCESS_GROUP-82AC69032E524B5E", + "PROCESS_GROUP-B1C069EABEFCD191" + ] + }, + "osType": "LINUX", + "osArchitecture": "X86", + "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", + "hypervisorType": "KVM", + "ipAddresses": [ + "10.226.144.161" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 2, + "cloudType": "EC2", + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "agentVersion": { + "major": 1, + "minor": 185, + "revision": 137, + "timestamp": "20200212-183600", + "sourceRevision": "" + }, + "consumedHostUnits": 0.25, + "userLevel": "SUPERUSER", + "managementZones": [ + { + "id": "5557749485955049446", + "name": "AWS_EODS_SHARED" + } + ], + "awsSecurityGroup": [ + "eods-dev-aphrodite-eda", + "eods-dev-shared-eda" + ], + "awsInstanceType": "t3.medium", + "amiId": "ami-0b196892c728a382c", + "awsInstanceId": "i-0f1a3ac209daf5032", + "hostGroup": { + "meId": "HOST_GROUP-8692976EA85437E7", + "name": "DEV_AWS_SHARED" + } + }, + { + "entityId": "HOST-421D60DB4A2EA929", + "displayName": "eda.ascend.dev.eods.uk.eeca", + "discoveredName": "eda.ascend.dev.eods.uk.eeca", + "firstSeenTimestamp": 1596185232748, + "lastSeenTimestamp": 1596567554260, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "OS", + "value": "Linux" + } + ], + "fromRelationships": { + "isNetworkClientOfHost": [ + "HOST-4399C32AF24910D5", + "HOST-E6234CDBD5DD63DF" + ] + }, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-C474F60F0A130E48", + "PROCESS_GROUP_INSTANCE-0CF25E0DFD16B8EA", + "PROCESS_GROUP_INSTANCE-7553D84034518B67", + "PROCESS_GROUP_INSTANCE-1C3F2EA649460DBB", + "PROCESS_GROUP_INSTANCE-8D4D262CB99D0E3A", + "PROCESS_GROUP_INSTANCE-974674A61CF2C895", + "PROCESS_GROUP_INSTANCE-2E8CCD9A15E3F55C", + "PROCESS_GROUP_INSTANCE-AEB6F70607F20436", + "PROCESS_GROUP_INSTANCE-5132D153699BC056", + "PROCESS_GROUP_INSTANCE-1D3151BD9706D607", + "PROCESS_GROUP_INSTANCE-D0BC8B6AC86E43AB", + "PROCESS_GROUP_INSTANCE-9CB9F19F7C0880B9", + "PROCESS_GROUP_INSTANCE-3016E7EE560C3DEF", + "PROCESS_GROUP_INSTANCE-2C4D0EFE814130C6" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61", + "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" + ], + "isNetworkClientOfHost": [ + "HOST-4399C32AF24910D5", + "HOST-E6234CDBD5DD63DF" + ], + "runsOn": [ + "PROCESS_GROUP-F612B535AB74A08A", + "PROCESS_GROUP-0765BFB1555AEC64", + "PROCESS_GROUP-5D3213D3CEE71BFC", + "PROCESS_GROUP-7BE59F151DF1E05F", + "PROCESS_GROUP-1D024AE6C849FC39", + "PROCESS_GROUP-D55B147D56DC61BC", + "PROCESS_GROUP-28699B8D57AACB48", + "PROCESS_GROUP-8F167A901E185CA4", + "PROCESS_GROUP-57D66C1273F88C3D", + "PROCESS_GROUP-374EE57952100044", + "PROCESS_GROUP-82AC69032E524B5E", + "PROCESS_GROUP-FAA604E684B7708E", + "PROCESS_GROUP-B1C069EABEFCD191" + ] + }, + "osType": "LINUX", + "osArchitecture": "X86", + "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", + "hypervisorType": "KVM", + "ipAddresses": [ + "10.226.144.176" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 2, + "cloudType": "EC2", + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "agentVersion": { + "major": 1, + "minor": 185, + "revision": 137, + "timestamp": "20200212-183600", + "sourceRevision": "" + }, + "consumedHostUnits": 0.25, + "userLevel": "SUPERUSER", + "managementZones": [ + { + "id": "5557749485955049446", + "name": "AWS_EODS_SHARED" + } + ], + "awsSecurityGroup": [ + "eods-dev-shared-eda", + "eods-dev-ascend-eda" + ], + "awsInstanceType": "t3.medium", + "amiId": "ami-0a41565f4b7a71076", + "awsInstanceId": "i-0227f5311abf531ce", + "hostGroup": { + "meId": "HOST_GROUP-8692976EA85437E7", + "name": "AWS_EODS_SHARED" + } + }, + { + "entityId": "HOST-413F4F394098A24D", + "displayName": "eda.brazilgds.dev.eods.uk.eeca", + "discoveredName": "eda.brazilgds.dev.eods.uk.eeca", + "firstSeenTimestamp": 1596190372236, + "lastSeenTimestamp": 1596567533507, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "OS", + "value": "Linux" + }, + { + "context": "CONTEXTLESS", + "key": "Alerting_Queue", + "value": "SUPPORT" + } + ], + "fromRelationships": { + "isNetworkClientOfHost": [ + "HOST-DEDC11439C4F6377" + ] + }, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-03A1FD721E7792EA", + "PROCESS_GROUP_INSTANCE-2DAEE2781F55FE38", + "PROCESS_GROUP_INSTANCE-47B3054A7FF34EAD", + "PROCESS_GROUP_INSTANCE-0665A89C4C1C88A3", + "PROCESS_GROUP_INSTANCE-8E6F09CEB32B055E", + "PROCESS_GROUP_INSTANCE-91B8F7693E08AEF7", + "PROCESS_GROUP_INSTANCE-C22D72ACEE844255", + "PROCESS_GROUP_INSTANCE-C756D9ED00A5052C", + "PROCESS_GROUP_INSTANCE-94645B441644C3F1", + "PROCESS_GROUP_INSTANCE-7671F7A23EE78003", + "PROCESS_GROUP_INSTANCE-9F9BDE7D76BE8BDD", + "PROCESS_GROUP_INSTANCE-9237C3DDF06510D7", + "PROCESS_GROUP_INSTANCE-1B20974DE5BF0869", + "PROCESS_GROUP_INSTANCE-AB2626A4FA4B4C76", + "PROCESS_GROUP_INSTANCE-5210FEB1632DCB32" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61", + "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" + ], + "isNetworkClientOfHost": [ + "HOST-DEDC11439C4F6377" + ], + "runsOn": [ + "PROCESS_GROUP-F612B535AB74A08A", + "PROCESS_GROUP-0765BFB1555AEC64", + "PROCESS_GROUP-5D3213D3CEE71BFC", + "PROCESS_GROUP-7BE59F151DF1E05F", + "PROCESS_GROUP-1D024AE6C849FC39", + "PROCESS_GROUP-77FDC8C1A93F7189", + "PROCESS_GROUP-D55B147D56DC61BC", + "PROCESS_GROUP-28699B8D57AACB48", + "PROCESS_GROUP-8F167A901E185CA4", + "PROCESS_GROUP-57D66C1273F88C3D", + "PROCESS_GROUP-374EE57952100044", + "PROCESS_GROUP-82AC69032E524B5E", + "PROCESS_GROUP-FAA604E684B7708E", + "PROCESS_GROUP-B1C069EABEFCD191" + ] + }, + "osType": "LINUX", + "osArchitecture": "X86", + "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", + "hypervisorType": "KVM", + "ipAddresses": [ + "10.226.144.190" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 2, + "cloudType": "EC2", + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "agentVersion": { + "major": 1, + "minor": 185, + "revision": 137, + "timestamp": "20200212-183600", + "sourceRevision": "" + }, + "consumedHostUnits": 0.25, + "userLevel": "SUPERUSER", + "managementZones": [ + { + "id": "5557749485955049446", + "name": "AWS_EODS_SHARED" + } + ], + "awsSecurityGroup": [ + "eods-dev-brazilgds-eda", + "eods-dev-shared-eda" + ], + "awsInstanceType": "t3.medium", + "amiId": "ami-0a41565f4b7a71076", + "awsInstanceId": "i-04050a5a2bfc20c77", + "hostGroup": { + "meId": "HOST_GROUP-8692976EA85437E7", + "name": "AWS_EODS_SHARED" + } + }, + { + "entityId": "HOST-E6234CDBD5DD63DF", + "displayName": "eda.exegol.dev.eods.uk.eeca", + "discoveredName": "eda.exegol.dev.eods.uk.eeca", + "firstSeenTimestamp": 1596524643794, + "lastSeenTimestamp": 1596567543866, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "OS", + "value": "Linux" + }, + { + "context": "CONTEXTLESS", + "key": "Alerting_Queue", + "value": "SUPPORT" + } + ], + "fromRelationships": { + "isNetworkClientOfHost": [ + "HOST-421D60DB4A2EA929" + ] + }, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-0B5B8CE18D930808", + "PROCESS_GROUP_INSTANCE-29730A2C266EC4CC", + "PROCESS_GROUP_INSTANCE-56F828839C899057", + "PROCESS_GROUP_INSTANCE-9AD062F556E05603", + "PROCESS_GROUP_INSTANCE-EFC07978167CBF23", + "PROCESS_GROUP_INSTANCE-8AB2E19A8A103FAA", + "PROCESS_GROUP_INSTANCE-3887DD9FE3FB4A4F", + "PROCESS_GROUP_INSTANCE-F50CFD53F6680AA0", + "PROCESS_GROUP_INSTANCE-604ADA0F95E0C4BE", + "PROCESS_GROUP_INSTANCE-9865AA2CFE0674F5", + "PROCESS_GROUP_INSTANCE-2C03688E54CBDC5B", + "PROCESS_GROUP_INSTANCE-337858A683010263", + "PROCESS_GROUP_INSTANCE-7635A326839A45D4", + "PROCESS_GROUP_INSTANCE-D16DF440ABA24191" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61", + "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" + ], + "isNetworkClientOfHost": [ + "HOST-421D60DB4A2EA929" + ], + "runsOn": [ + "PROCESS_GROUP-F612B535AB74A08A", + "PROCESS_GROUP-0765BFB1555AEC64", + "PROCESS_GROUP-5D3213D3CEE71BFC", + "PROCESS_GROUP-7BE59F151DF1E05F", + "PROCESS_GROUP-1D024AE6C849FC39", + "PROCESS_GROUP-D55B147D56DC61BC", + "PROCESS_GROUP-28699B8D57AACB48", + "PROCESS_GROUP-8F167A901E185CA4", + "PROCESS_GROUP-57D66C1273F88C3D", + "PROCESS_GROUP-374EE57952100044", + "PROCESS_GROUP-82AC69032E524B5E", + "PROCESS_GROUP-FAA604E684B7708E", + "PROCESS_GROUP-B1C069EABEFCD191" + ] + }, + "osType": "LINUX", + "osArchitecture": "X86", + "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", + "hypervisorType": "KVM", + "ipAddresses": [ + "10.226.144.177" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 2, + "cloudType": "EC2", + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "agentVersion": { + "major": 1, + "minor": 185, + "revision": 137, + "timestamp": "20200212-183600", + "sourceRevision": "" + }, + "consumedHostUnits": 0.5, + "userLevel": "SUPERUSER", + "managementZones": [ + { + "id": "5557749485955049446", + "name": "AWS_EODS_SHARED" + } + ], + "awsSecurityGroup": [ + "eods-dev-shared-eda", + "eods-dev-exegol-eda" + ], + "awsInstanceType": "t3.large", + "amiId": "ami-00446e862ac45b149", + "awsInstanceId": "i-0c9b5315f41124dbd", + "hostGroup": { + "meId": "HOST_GROUP-8692976EA85437E7", + "name": "AWS_EODS_SHARED" + } + }, + { + "entityId": "HOST-DEDC11439C4F6377", + "displayName": "eda.exegol.dev.eods.uk.eeca", + "discoveredName": "eda.exegol.dev.eods.uk.eeca", + "firstSeenTimestamp": 1596242536965, + "lastSeenTimestamp": 1596523700852, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "OS", + "value": "Linux" + }, + { + "context": "CONTEXTLESS", + "key": "Alerting_Queue", + "value": "SUPPORT" + } + ], + "fromRelationships": { + "isNetworkClientOfHost": [ + "HOST-413F4F394098A24D" + ] + }, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-0B87053ECA9302CB", + "PROCESS_GROUP_INSTANCE-E992A9D8E2304139", + "PROCESS_GROUP_INSTANCE-70714018883520DF", + "PROCESS_GROUP_INSTANCE-5133C5933F2890B3", + "PROCESS_GROUP_INSTANCE-ADCF32A8AA21D404", + "PROCESS_GROUP_INSTANCE-58B58797DC72C416", + "PROCESS_GROUP_INSTANCE-CDF3A0CBBFFA0A08", + "PROCESS_GROUP_INSTANCE-00788007AA694AE7", + "PROCESS_GROUP_INSTANCE-B24DBC02C3823F02", + "PROCESS_GROUP_INSTANCE-7D8F35669570D4E8", + "PROCESS_GROUP_INSTANCE-58930C0AA7201822", + "PROCESS_GROUP_INSTANCE-6D954DBD8B599781", + "PROCESS_GROUP_INSTANCE-B29453B6005AB7E5", + "PROCESS_GROUP_INSTANCE-30904F76A54966F0", + "PROCESS_GROUP_INSTANCE-439E2D9CE96C770C", + "PROCESS_GROUP_INSTANCE-118C57B46FFCC464" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61", + "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" + ], + "isNetworkClientOfHost": [ + "HOST-413F4F394098A24D" + ], + "runsOn": [ + "PROCESS_GROUP-F612B535AB74A08A", + "PROCESS_GROUP-0765BFB1555AEC64", + "PROCESS_GROUP-5D3213D3CEE71BFC", + "PROCESS_GROUP-7BE59F151DF1E05F", + "PROCESS_GROUP-1D024AE6C849FC39", + "PROCESS_GROUP-77FDC8C1A93F7189", + "PROCESS_GROUP-D55B147D56DC61BC", + "PROCESS_GROUP-28699B8D57AACB48", + "PROCESS_GROUP-B62F90EC2186CCF3", + "PROCESS_GROUP-8F167A901E185CA4", + "PROCESS_GROUP-57D66C1273F88C3D", + "PROCESS_GROUP-374EE57952100044", + "PROCESS_GROUP-82AC69032E524B5E", + "PROCESS_GROUP-B1C069EABEFCD191", + "PROCESS_GROUP-FAA604E684B7708E" + ] + }, + "osType": "LINUX", + "osArchitecture": "X86", + "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", + "hypervisorType": "KVM", + "ipAddresses": [ + "10.226.144.191" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 2, + "cloudType": "EC2", + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "consumedHostUnits": 0.5, + "managementZones": [ + { + "id": "5557749485955049446", + "name": "AWS_EODS_SHARED" + } + ], + "awsSecurityGroup": [ + "eods-dev-shared-eda", + "eods-dev-exegol-eda" + ], + "awsInstanceType": "t3.large", + "amiId": "ami-03f1877fb9d2f6094", + "awsInstanceId": "i-0749617d1ef4342bc", + "hostGroup": { + "meId": "HOST_GROUP-8692976EA85437E7", + "name": "AWS_EODS_SHARED" + } + }, + { + "entityId": "HOST-B667FC231C8B8DD3", + "displayName": "eda.gaia.dev.eods.uk.eeca", + "discoveredName": "eda.gaia.dev.eods.uk.eeca", + "firstSeenTimestamp": 1595843874728, + "lastSeenTimestamp": 1596439377370, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "OS", + "value": "Linux" + }, + { + "context": "CONTEXTLESS", + "key": "Alerting_Queue", + "value": "SUPPORT" + } + ], + "fromRelationships": { + "isNetworkClientOfHost": [ + "HOST-5B9CE4E4E14185FA" + ] + }, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-7364F89D49309AE0", + "PROCESS_GROUP_INSTANCE-68C36D672AADA443", + "PROCESS_GROUP_INSTANCE-D1E45549D386DC67", + "PROCESS_GROUP_INSTANCE-A5484DAB3F3EE4AC", + "PROCESS_GROUP_INSTANCE-40E7F7CF6A6919F1", + "PROCESS_GROUP_INSTANCE-DAF651624346D1A6", + "PROCESS_GROUP_INSTANCE-40E382F35D47986B", + "PROCESS_GROUP_INSTANCE-F13DF7666B32BF1A", + "PROCESS_GROUP_INSTANCE-37F8B8E865F00148", + "PROCESS_GROUP_INSTANCE-2D3E7F97AF294DBA", + "PROCESS_GROUP_INSTANCE-300E6AF75CB62AB2" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61", + "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" + ], + "isNetworkClientOfHost": [ + "HOST-5B9CE4E4E14185FA" + ], + "runsOn": [ + "PROCESS_GROUP-F612B535AB74A08A", + "PROCESS_GROUP-0765BFB1555AEC64", + "PROCESS_GROUP-5D3213D3CEE71BFC", + "PROCESS_GROUP-7BE59F151DF1E05F", + "PROCESS_GROUP-1D024AE6C849FC39", + "PROCESS_GROUP-77FDC8C1A93F7189", + "PROCESS_GROUP-B38E65573551425E", + "PROCESS_GROUP-28699B8D57AACB48", + "PROCESS_GROUP-57D66C1273F88C3D", + "PROCESS_GROUP-374EE57952100044", + "PROCESS_GROUP-82AC69032E524B5E", + "PROCESS_GROUP-B1C069EABEFCD191" + ] + }, + "osType": "LINUX", + "osArchitecture": "X86", + "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", + "hypervisorType": "KVM", + "ipAddresses": [ + "10.226.144.160" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 2, + "cloudType": "EC2", + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "consumedHostUnits": 0.25, + "managementZones": [ + { + "id": "5557749485955049446", + "name": "AWS_EODS_SHARED" + } + ], + "awsSecurityGroup": [ + "eods-dev-gaia-eda", + "eods-dev-shared-eda" + ], + "awsInstanceType": "t3.medium", + "amiId": "ami-02760d44609a734b9", + "awsInstanceId": "i-09ce4aa19402e2fca", + "hostGroup": { + "meId": "HOST_GROUP-8692976EA85437E7", + "name": "AWS_EODS_SHARED" + } + }, + { + "entityId": "HOST-7A728806A05336AC", + "displayName": "eda.gaia.dev.eods.uk.eeca", + "discoveredName": "eda.gaia.dev.eods.uk.eeca", + "firstSeenTimestamp": 1596440075964, + "lastSeenTimestamp": 1596567334915, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "OS", + "value": "Linux" + }, + { + "context": "CONTEXTLESS", + "key": "Alerting_Queue", + "value": "SUPPORT" + } + ], + "fromRelationships": { + "isNetworkClientOfHost": [ + "HOST-6D6EAA89DCBA960A" + ] + }, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-16E32547FF9E6AD9", + "PROCESS_GROUP_INSTANCE-695D398E83E65FD3", + "PROCESS_GROUP_INSTANCE-FC1B1ED2E06E91CD", + "PROCESS_GROUP_INSTANCE-666F8EDCBDFEA602", + "PROCESS_GROUP_INSTANCE-86A66804E2E55934", + "PROCESS_GROUP_INSTANCE-AF299C7BF68F5710", + "PROCESS_GROUP_INSTANCE-4D3C309DDE2C14E2", + "PROCESS_GROUP_INSTANCE-B522CEF153E091BF", + "PROCESS_GROUP_INSTANCE-76FF566A1900C5C4", + "PROCESS_GROUP_INSTANCE-0957891719F167E5", + "PROCESS_GROUP_INSTANCE-0DABC17A11907B18", + "PROCESS_GROUP_INSTANCE-4B13F843F126982C", + "PROCESS_GROUP_INSTANCE-A4D6194296751F3C" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61", + "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" + ], + "isNetworkClientOfHost": [ + "HOST-6D6EAA89DCBA960A" + ], + "runsOn": [ + "PROCESS_GROUP-F612B535AB74A08A", + "PROCESS_GROUP-0765BFB1555AEC64", + "PROCESS_GROUP-5D3213D3CEE71BFC", + "PROCESS_GROUP-7BE59F151DF1E05F", + "PROCESS_GROUP-1D024AE6C849FC39", + "PROCESS_GROUP-B38E65573551425E", + "PROCESS_GROUP-D55B147D56DC61BC", + "PROCESS_GROUP-28699B8D57AACB48", + "PROCESS_GROUP-8F167A901E185CA4", + "PROCESS_GROUP-57D66C1273F88C3D", + "PROCESS_GROUP-374EE57952100044", + "PROCESS_GROUP-82AC69032E524B5E", + "PROCESS_GROUP-B1C069EABEFCD191" + ] + }, + "osType": "LINUX", + "osArchitecture": "X86", + "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", + "hypervisorType": "KVM", + "ipAddresses": [ + "10.226.144.252" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 2, + "cloudType": "EC2", + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "agentVersion": { + "major": 1, + "minor": 185, + "revision": 137, + "timestamp": "20200212-183600", + "sourceRevision": "" + }, + "consumedHostUnits": 0.25, + "userLevel": "SUPERUSER", + "managementZones": [ + { + "id": "5557749485955049446", + "name": "AWS_EODS_SHARED" + } + ], + "awsSecurityGroup": [ + "eods-dev-gaia-eda", + "eods-dev-shared-eda" + ], + "awsInstanceType": "t3.medium", + "amiId": "ami-03f1877fb9d2f6094", + "awsInstanceId": "i-0955c2c29e3157ccf", + "hostGroup": { + "meId": "HOST_GROUP-8692976EA85437E7", + "name": "AWS_EODS_SHARED" + } + }, + { + "entityId": "HOST-EDE2A188CB4C77B1", + "displayName": "eda.hestia.dev.eods.uk.eeca", + "discoveredName": "eda.hestia.dev.eods.uk.eeca", + "firstSeenTimestamp": 1595425521418, + "lastSeenTimestamp": 1596567386740, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "OS", + "value": "Linux" + }, + { + "context": "CONTEXTLESS", + "key": "Alerting_Queue", + "value": "SUPPORT" + } + ], + "fromRelationships": { + "isNetworkClientOfHost": [ + "HOST-752E1FB58ABB01A6" + ] + }, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-334630CCFD6A5E21", + "PROCESS_GROUP_INSTANCE-250CE79C1D6F4E7B", + "PROCESS_GROUP_INSTANCE-81D8D4FC4769FFA7", + "PROCESS_GROUP_INSTANCE-6B8B375C8B71D0D0", + "PROCESS_GROUP_INSTANCE-873DAF8BE2257760", + "PROCESS_GROUP_INSTANCE-C0D9A551713FE025", + "PROCESS_GROUP_INSTANCE-96B5396E1B0D626F", + "PROCESS_GROUP_INSTANCE-FECD1000E8F91ECE", + "PROCESS_GROUP_INSTANCE-8429E0656F083B99", + "PROCESS_GROUP_INSTANCE-FD651A51F83C50B9", + "PROCESS_GROUP_INSTANCE-81730CC994812BC4", + "PROCESS_GROUP_INSTANCE-663A8FCC78ED3826" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61", + "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" + ], + "runsOn": [ + "PROCESS_GROUP-F612B535AB74A08A", + "PROCESS_GROUP-0765BFB1555AEC64", + "PROCESS_GROUP-5D3213D3CEE71BFC", + "PROCESS_GROUP-7BE59F151DF1E05F", + "PROCESS_GROUP-1115D1BB3693C8B8", + "PROCESS_GROUP-1D024AE6C849FC39", + "PROCESS_GROUP-7184C606962EE406", + "PROCESS_GROUP-77FDC8C1A93F7189", + "PROCESS_GROUP-28699B8D57AACB48", + "PROCESS_GROUP-57D66C1273F88C3D", + "PROCESS_GROUP-374EE57952100044", + "PROCESS_GROUP-82AC69032E524B5E", + "PROCESS_GROUP-B1C069EABEFCD191" + ] + }, + "osType": "LINUX", + "osArchitecture": "X86", + "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", + "hypervisorType": "KVM", + "ipAddresses": [ + "10.226.144.140" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 2, + "cloudType": "EC2", + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "agentVersion": { + "major": 1, + "minor": 185, + "revision": 137, + "timestamp": "20200212-183600", + "sourceRevision": "" + }, + "consumedHostUnits": 0.25, + "userLevel": "SUPERUSER", + "managementZones": [ + { + "id": "5557749485955049446", + "name": "AWS_EODS_SHARED" + } + ], + "awsSecurityGroup": [ + "eods-dev-hestia-eda", + "eods-dev-shared-eda" + ], + "awsInstanceType": "t3.medium", + "amiId": "ami-048e11ed1aa982d34", + "awsInstanceId": "i-0c527af83fb5e5964", + "hostGroup": { + "meId": "HOST_GROUP-8692976EA85437E7", + "name": "AWS_EODS_SHARED" + } + }, + { + "entityId": "HOST-350D8009776A920D", + "displayName": "eda.indiagds.dev.eods.uk.eeca", + "discoveredName": "eda.indiagds.dev.eods.uk.eeca", + "firstSeenTimestamp": 1593783034963, + "lastSeenTimestamp": 1596567529326, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "OS", + "value": "Linux" + }, + { + "context": "CONTEXTLESS", + "key": "Alerting_Queue", + "value": "SUPPORT" + } + ], + "fromRelationships": {}, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-9A5CE817E2B4D9FE", + "PROCESS_GROUP_INSTANCE-C2F55AA979C65E18", + "PROCESS_GROUP_INSTANCE-B36416DD3757356C", + "PROCESS_GROUP_INSTANCE-E056947421B6F3B1", + "PROCESS_GROUP_INSTANCE-4BC7AB263964692E", + "PROCESS_GROUP_INSTANCE-024338920915B043", + "PROCESS_GROUP_INSTANCE-EBA9114D414CBB9D", + "PROCESS_GROUP_INSTANCE-DAB4E357847D9514", + "PROCESS_GROUP_INSTANCE-243D27A71401EA5F", + "PROCESS_GROUP_INSTANCE-84DEAA5E46984F15", + "PROCESS_GROUP_INSTANCE-2622318154DFFB72", + "PROCESS_GROUP_INSTANCE-599C2D4828A7CE78", + "PROCESS_GROUP_INSTANCE-E042FBEFB962ED19", + "PROCESS_GROUP_INSTANCE-FA5DC6FE84D9351E", + "PROCESS_GROUP_INSTANCE-E3C89AF1198FA66D" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61", + "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" + ], + "runsOn": [ + "PROCESS_GROUP-F612B535AB74A08A", + "PROCESS_GROUP-0765BFB1555AEC64", + "PROCESS_GROUP-5D3213D3CEE71BFC", + "PROCESS_GROUP-7BE59F151DF1E05F", + "PROCESS_GROUP-1D024AE6C849FC39", + "PROCESS_GROUP-77FDC8C1A93F7189", + "PROCESS_GROUP-D55B147D56DC61BC", + "PROCESS_GROUP-28699B8D57AACB48", + "PROCESS_GROUP-8F167A901E185CA4", + "PROCESS_GROUP-57D66C1273F88C3D", + "PROCESS_GROUP-374EE57952100044", + "PROCESS_GROUP-82AC69032E524B5E", + "PROCESS_GROUP-FAA604E684B7708E", + "PROCESS_GROUP-B1C069EABEFCD191" + ] + }, + "osType": "LINUX", + "osArchitecture": "X86", + "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", + "hypervisorType": "KVM", + "ipAddresses": [ + "10.226.144.136" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 2, + "cloudType": "EC2", + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "agentVersion": { + "major": 1, + "minor": 185, + "revision": 137, + "timestamp": "20200212-183600", + "sourceRevision": "" + }, + "consumedHostUnits": 0.5, + "userLevel": "SUPERUSER", + "managementZones": [ + { + "id": "5557749485955049446", + "name": "AWS_EODS_SHARED" + } + ], + "awsSecurityGroup": [ + "eods-dev-shared-eda", + "eods-dev-indiagds-eda" + ], + "awsInstanceType": "t3.large", + "amiId": "ami-0ce88eb2646dd6eac", + "awsInstanceId": "i-0069235aa24092781", + "hostGroup": { + "meId": "HOST_GROUP-8692976EA85437E7", + "name": "AWS_EODS_SHARED" + } + }, + { + "entityId": "HOST-6D6EAA89DCBA960A", + "displayName": "eda.jeds.dev.eods.uk.eeca", + "discoveredName": "eda.jeds.dev.eods.uk.eeca", + "firstSeenTimestamp": 1596098871283, + "lastSeenTimestamp": 1596567349283, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "OS", + "value": "Linux" + }, + { + "context": "CONTEXTLESS", + "key": "Alerting_Queue", + "value": "SUPPORT" + } + ], + "fromRelationships": { + "isNetworkClientOfHost": [ + "HOST-7A728806A05336AC" + ] + }, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-B3CA3BCDEA9CBF9A", + "PROCESS_GROUP_INSTANCE-BB5992E1761F5789", + "PROCESS_GROUP_INSTANCE-3036CA29518BDD50", + "PROCESS_GROUP_INSTANCE-14F58D2B2E92D7C9", + "PROCESS_GROUP_INSTANCE-77F967D5FCA1F315", + "PROCESS_GROUP_INSTANCE-24E4D4127FFD8646", + "PROCESS_GROUP_INSTANCE-155BEFD4FB7C9C59", + "PROCESS_GROUP_INSTANCE-5A201212A2C5B444", + "PROCESS_GROUP_INSTANCE-A23EEC7E2F093119", + "PROCESS_GROUP_INSTANCE-B835BEF48A66F7B6", + "PROCESS_GROUP_INSTANCE-EB073C5D9C87316B", + "PROCESS_GROUP_INSTANCE-041C0B46FE2D4AAC", + "PROCESS_GROUP_INSTANCE-7E411B01FF0FFF75", + "PROCESS_GROUP_INSTANCE-01FF07C88377CA7F" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61", + "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" + ], + "isNetworkClientOfHost": [ + "HOST-7A728806A05336AC" + ], + "runsOn": [ + "PROCESS_GROUP-F612B535AB74A08A", + "PROCESS_GROUP-0765BFB1555AEC64", + "PROCESS_GROUP-5D3213D3CEE71BFC", + "PROCESS_GROUP-7BE59F151DF1E05F", + "PROCESS_GROUP-1D024AE6C849FC39", + "PROCESS_GROUP-77FDC8C1A93F7189", + "PROCESS_GROUP-B38E65573551425E", + "PROCESS_GROUP-D55B147D56DC61BC", + "PROCESS_GROUP-28699B8D57AACB48", + "PROCESS_GROUP-8F167A901E185CA4", + "PROCESS_GROUP-57D66C1273F88C3D", + "PROCESS_GROUP-374EE57952100044", + "PROCESS_GROUP-82AC69032E524B5E", + "PROCESS_GROUP-B1C069EABEFCD191" + ] + }, + "osType": "LINUX", + "osArchitecture": "X86", + "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", + "hypervisorType": "KVM", + "ipAddresses": [ + "10.226.144.250" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 2, + "cloudType": "EC2", + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "agentVersion": { + "major": 1, + "minor": 185, + "revision": 137, + "timestamp": "20200212-183600", + "sourceRevision": "" + }, + "consumedHostUnits": 0.25, + "userLevel": "SUPERUSER", + "managementZones": [ + { + "id": "5557749485955049446", + "name": "AWS_EODS_SHARED" + } + ], + "awsSecurityGroup": [ + "eods-dev-jeds-eda", + "eods-dev-shared-eda" + ], + "awsInstanceType": "t3.medium", + "amiId": "ami-02f52db0f39b16e7d", + "awsInstanceId": "i-0a534fbb876e94607", + "hostGroup": { + "meId": "HOST_GROUP-8692976EA85437E7", + "name": "AWS_EODS_SHARED" + } + }, + { + "entityId": "HOST-405742DD38D0564F", + "displayName": "eda.morpheus.dev.eods.uk.eeca", + "discoveredName": "eda.morpheus.dev.eods.uk.eeca", + "firstSeenTimestamp": 1596005374982, + "lastSeenTimestamp": 1596567516902, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "OS", + "value": "Linux" + }, + { + "context": "CONTEXTLESS", + "key": "Alerting_Queue", + "value": "SUPPORT" + } + ], + "fromRelationships": { + "isNetworkClientOfHost": [ + "HOST-4399C32AF24910D5" + ] + }, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-9EF3D3990EF67FDF", + "PROCESS_GROUP_INSTANCE-1A51DEAECD6EBA9A", + "PROCESS_GROUP_INSTANCE-EB1BBCAD2F72C245", + "PROCESS_GROUP_INSTANCE-12FB702F43DAC1BF", + "PROCESS_GROUP_INSTANCE-7719FA4646AF7401", + "PROCESS_GROUP_INSTANCE-8F07042ACB63F15C", + "PROCESS_GROUP_INSTANCE-2140323046FA9179", + "PROCESS_GROUP_INSTANCE-950C56A06E0C37F3", + "PROCESS_GROUP_INSTANCE-8B29FEDB21622A61", + "PROCESS_GROUP_INSTANCE-C63ED40978EDF12E", + "PROCESS_GROUP_INSTANCE-3B3A6D4BBA718854", + "PROCESS_GROUP_INSTANCE-F60BA59802FE681B", + "PROCESS_GROUP_INSTANCE-5378F3551B653F30", + "PROCESS_GROUP_INSTANCE-2CC6EF9C671D0A3A" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61", + "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" + ], + "isNetworkClientOfHost": [ + "HOST-4399C32AF24910D5" + ], + "runsOn": [ + "PROCESS_GROUP-F612B535AB74A08A", + "PROCESS_GROUP-0765BFB1555AEC64", + "PROCESS_GROUP-5D3213D3CEE71BFC", + "PROCESS_GROUP-7BE59F151DF1E05F", + "PROCESS_GROUP-1D024AE6C849FC39", + "PROCESS_GROUP-B38E65573551425E", + "PROCESS_GROUP-D55B147D56DC61BC", + "PROCESS_GROUP-28699B8D57AACB48", + "PROCESS_GROUP-8F167A901E185CA4", + "PROCESS_GROUP-57D66C1273F88C3D", + "PROCESS_GROUP-374EE57952100044", + "PROCESS_GROUP-82AC69032E524B5E", + "PROCESS_GROUP-FAA604E684B7708E", + "PROCESS_GROUP-B1C069EABEFCD191" + ] + }, + "osType": "LINUX", + "osArchitecture": "X86", + "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", + "hypervisorType": "KVM", + "ipAddresses": [ + "10.226.144.174" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 2, + "cloudType": "EC2", + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "agentVersion": { + "major": 1, + "minor": 185, + "revision": 137, + "timestamp": "20200212-183600", + "sourceRevision": "" + }, + "consumedHostUnits": 0.25, + "userLevel": "SUPERUSER", + "managementZones": [ + { + "id": "5557749485955049446", + "name": "AWS_EODS_SHARED" + } + ], + "awsSecurityGroup": [ + "eods-dev-morpheus-eda", + "eods-dev-shared-eda" + ], + "awsInstanceType": "t3.medium", + "amiId": "ami-02f52db0f39b16e7d", + "awsInstanceId": "i-0122160aacbb13ba3", + "hostGroup": { + "meId": "HOST_GROUP-8692976EA85437E7", + "name": "AWS_EODS_SHARED" + } + }, + { + "entityId": "HOST-4399C32AF24910D5", + "displayName": "eda.sofiaqa.dev.eods.uk.eeca", + "discoveredName": "eda.sofiaqa.dev.eods.uk.eeca", + "firstSeenTimestamp": 1595425419743, + "lastSeenTimestamp": 1596567525176, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "OS", + "value": "Linux" + }, + { + "context": "CONTEXTLESS", + "key": "Alerting_Queue", + "value": "SUPPORT" + } + ], + "fromRelationships": { + "isNetworkClientOfHost": [ + "HOST-421D60DB4A2EA929", + "HOST-405742DD38D0564F" + ] + }, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-E71904BAC6A65EA5", + "PROCESS_GROUP_INSTANCE-2F086E6BAD844CA0", + "PROCESS_GROUP_INSTANCE-A99B934F3FA051F7", + "PROCESS_GROUP_INSTANCE-67B034846C4CEA75", + "PROCESS_GROUP_INSTANCE-C5F055FEB274B7B4", + "PROCESS_GROUP_INSTANCE-50B672A2D1FC79AA", + "PROCESS_GROUP_INSTANCE-F4292764D018FC1B", + "PROCESS_GROUP_INSTANCE-33600EA8F97CB445", + "PROCESS_GROUP_INSTANCE-9D3D526EC46F3945", + "PROCESS_GROUP_INSTANCE-7315D06DB535C9B3", + "PROCESS_GROUP_INSTANCE-3C170C0FB469B00B" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61", + "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" + ], + "isNetworkClientOfHost": [ + "HOST-421D60DB4A2EA929", + "HOST-405742DD38D0564F" + ], + "runsOn": [ + "PROCESS_GROUP-F612B535AB74A08A", + "PROCESS_GROUP-0765BFB1555AEC64", + "PROCESS_GROUP-5D3213D3CEE71BFC", + "PROCESS_GROUP-7BE59F151DF1E05F", + "PROCESS_GROUP-1D024AE6C849FC39", + "PROCESS_GROUP-77FDC8C1A93F7189", + "PROCESS_GROUP-28699B8D57AACB48", + "PROCESS_GROUP-57D66C1273F88C3D", + "PROCESS_GROUP-374EE57952100044", + "PROCESS_GROUP-82AC69032E524B5E", + "PROCESS_GROUP-B1C069EABEFCD191" + ] + }, + "osType": "LINUX", + "osArchitecture": "X86", + "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", + "hypervisorType": "KVM", + "ipAddresses": [ + "10.226.144.175" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 2, + "cloudType": "EC2", + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "agentVersion": { + "major": 1, + "minor": 185, + "revision": 137, + "timestamp": "20200212-183600", + "sourceRevision": "" + }, + "consumedHostUnits": 0.25, + "userLevel": "SUPERUSER", + "managementZones": [ + { + "id": "5557749485955049446", + "name": "AWS_EODS_SHARED" + } + ], + "awsSecurityGroup": [ + "eods-dev-shared-eda", + "eods-dev-sofiaqa-eda" + ], + "awsInstanceType": "t3.medium", + "amiId": "ami-048e11ed1aa982d34", + "awsInstanceId": "i-01158fb516cc1882b", + "hostGroup": { + "meId": "HOST_GROUP-8692976EA85437E7", + "name": "AWS_EODS_SHARED" + } + }, + { + "entityId": "HOST-752E1FB58ABB01A6", + "displayName": "eda.testkibana.dev.eods.uk.eeca", + "discoveredName": "eda.testkibana.dev.eods.uk.eeca", + "firstSeenTimestamp": 1592896258226, + "lastSeenTimestamp": 1596567568688, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "OS", + "value": "Linux" + }, + { + "context": "CONTEXTLESS", + "key": "Alerting_Queue", + "value": "SUPPORT" + } + ], + "fromRelationships": {}, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-58151B6C30C89632", + "PROCESS_GROUP_INSTANCE-4260A72EF4C423E8", + "PROCESS_GROUP_INSTANCE-BA7E59427908A6B5", + "PROCESS_GROUP_INSTANCE-A0750BC8DC67601A", + "PROCESS_GROUP_INSTANCE-7C1E2A24C93009CA", + "PROCESS_GROUP_INSTANCE-F3478961CA86A6C7", + "PROCESS_GROUP_INSTANCE-94CEA4B98C81917F", + "PROCESS_GROUP_INSTANCE-C8450A437F254F0F", + "PROCESS_GROUP_INSTANCE-CC5BAC0D0013D2CA", + "PROCESS_GROUP_INSTANCE-19BFB2F4D5765DD3", + "PROCESS_GROUP_INSTANCE-5827CBC26581E935", + "PROCESS_GROUP_INSTANCE-AB8A8EF1BC9D2836", + "PROCESS_GROUP_INSTANCE-A105402BE0932764", + "PROCESS_GROUP_INSTANCE-9C58C0F66EFB3E6C", + "PROCESS_GROUP_INSTANCE-B677ABDA0185EBF3", + "PROCESS_GROUP_INSTANCE-D42BEE0DAAF2A5AE", + "PROCESS_GROUP_INSTANCE-32392E4FC97880EF", + "PROCESS_GROUP_INSTANCE-7EABFD171B7DB61D" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61", + "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" + ], + "isNetworkClientOfHost": [ + "HOST-EDE2A188CB4C77B1" + ], + "runsOn": [ + "PROCESS_GROUP-F612B535AB74A08A", + "PROCESS_GROUP-0765BFB1555AEC64", + "PROCESS_GROUP-5D3213D3CEE71BFC", + "PROCESS_GROUP-7BE59F151DF1E05F", + "PROCESS_GROUP-5C7D243AE1CD33CA", + "PROCESS_GROUP-79AF9C7B3182E51B", + "PROCESS_GROUP-1115D1BB3693C8B8", + "PROCESS_GROUP-1D024AE6C849FC39", + "PROCESS_GROUP-28699B8D57AACB48", + "PROCESS_GROUP-D55B147D56DC61BC", + "PROCESS_GROUP-8F167A901E185CA4", + "PROCESS_GROUP-57D66C1273F88C3D", + "PROCESS_GROUP-374EE57952100044", + "PROCESS_GROUP-82AC69032E524B5E", + "PROCESS_GROUP-A105F1B82049A408", + "PROCESS_GROUP-3C74CEE5747023B4", + "PROCESS_GROUP-E53DDEE9533F67FF" + ] + }, + "osType": "LINUX", + "osArchitecture": "X86", + "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", + "hypervisorType": "KVM", + "ipAddresses": [ + "10.226.144.147" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 2, + "cloudType": "EC2", + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "agentVersion": { + "major": 1, + "minor": 185, + "revision": 137, + "timestamp": "20200212-183600", + "sourceRevision": "" + }, + "consumedHostUnits": 0.5, + "userLevel": "SUPERUSER", + "managementZones": [ + { + "id": "5557749485955049446", + "name": "AWS_EODS_SHARED" + } + ], + "awsSecurityGroup": [ + "eods-dev-shared-eda", + "eods-dev-testkibana-eda" + ], + "awsInstanceType": "m5.large", + "amiId": "ami-0e1f7043cd799a9fc", + "awsInstanceId": "i-0588f65cba4550089", + "hostGroup": { + "meId": "HOST_GROUP-8692976EA85437E7", + "name": "AWS_EODS_SHARED" + } + } +] \ No newline at end of file diff --git a/tests/mockserver_payloads/responses/hosts/get_single.json b/tests/mockserver_payloads/responses/hosts/get_single.json new file mode 100644 index 0000000..dcda31d --- /dev/null +++ b/tests/mockserver_payloads/responses/hosts/get_single.json @@ -0,0 +1,70 @@ +{ + "entityId": "HOST-9F74450267BAAE20", + "displayName": "host1123.radu.local", + "discoveredName": "host1123.radu.local", + "firstSeenTimestamp": 1594365724435, + "lastSeenTimestamp": 1596559448485, + "tags": [ + { + "context": "CONTEXTLESS", + "key": "Application", + "value": "Super_App" + }, + { + "context": "CONTEXTLESS", + "key": "Environment", + "value": "DEV" + } + ], + "fromRelationships": {}, + "toRelationships": { + "isProcessOf": [ + "PROCESS_GROUP_INSTANCE-7BDD5DC06168C858", + "PROCESS_GROUP_INSTANCE-726C522DDE0D2524", + "PROCESS_GROUP_INSTANCE-1238166A9681701C", + "PROCESS_GROUP_INSTANCE-829A8915392BFF28", + "PROCESS_GROUP_INSTANCE-6277FD97691C3FCB", + "PROCESS_GROUP_INSTANCE-0A37526FC2730958", + "PROCESS_GROUP_INSTANCE-F86303D27A8E830D", + "PROCESS_GROUP_INSTANCE-7C78AA98F4803D16", + "PROCESS_GROUP_INSTANCE-F9B6343E59930663" + ], + "isSiteOf": [ + "GEOLOC_SITE-7580E62C914CAE61" + ], + "runsOn": [ + "PROCESS_GROUP-4DE8442CCEAD2251", + "PROCESS_GROUP-6A2955530FA29616", + "PROCESS_GROUP-893E1F101431ADD1", + "PROCESS_GROUP-ACF6C23C63075E6F", + "PROCESS_GROUP-A93DF7C68AF50E3B", + "PROCESS_GROUP-719A10C7F5761D1B", + "PROCESS_GROUP-296B3416E647AAEA", + "PROCESS_GROUP-2F0439DE58E8F50B", + "PROCESS_GROUP-087AEE9E5099E4D2" + ] + }, + "osType": "WINDOWS", + "osArchitecture": "X86", + "osVersion": "Windows Server 2012 Standard, ver. 6.2.9200", + "hypervisorType": "VMWARE", + "ipAddresses": [ + "10.188.203.46" + ], + "bitness": "64bit", + "cpuCores": 1, + "logicalCpuCores": 1, + "monitoringMode": "FULL_STACK", + "networkZoneId": "default", + "consumedHostUnits": 0.25, + "managementZones": [ + { + "id": "4173205628047632907", + "name": "MANAGE_ZONE_DEV" + } + ], + "hostGroup": { + "meId": "HOST_GROUP-FA0C3B0A6B7F4D78", + "name": "DEV_SUPERAPP_ZONE-A" + } +} \ No newline at end of file diff --git a/tests/test_host_groups.py b/tests/test_host_groups.py index 69581a3..48fda87 100644 --- a/tests/test_host_groups.py +++ b/tests/test_host_groups.py @@ -8,23 +8,27 @@ TENANT = "tenant1" URL_PATH = "/api/v1/entity/infrastructure/hosts" + class TestHostGroupFunctions(unittest.TestCase): - RESPONSE_DIR = "tests/mockserver_payloads/responses/host_groups/" - def test_get_host_groups_tenantwide(self): - parameters = { - "relativeTime": ["day"], - "includeDetails": [ "true" ], - "Api-Token": [CLUSTER["api_token"][TENANT]], - } - mockserver_response_file = f"{self.RESPONSE_DIR}mock_get_general_1.json" - tooling_for_test.create_mockserver_expectation( - CLUSTER, TENANT, URL_PATH, "GET", parameters=parameters, response_file=mockserver_response_file) - command_tested = host_groups.get_host_groups_tenantwide(CLUSTER, TENANT) + RESPONSE_DIR = "tests/mockserver_payloads/responses/host_groups/" + + def test_get_host_groups_tenantwide(self): + parameters = { + "relativeTime": ["day"], + "includeDetails": ["true"], + "Api-Token": [CLUSTER["api_token"][TENANT]], + } + mockserver_response_file = f"{self.RESPONSE_DIR}mock_get_general_1.json" + tooling_for_test.create_mockserver_expectation( + CLUSTER, TENANT, URL_PATH, "GET", parameters=parameters, response_file=mockserver_response_file) + command_tested = host_groups.get_host_groups_tenantwide( + CLUSTER, TENANT) + + expected_result = { + 'HOST_GROUP-ABCDEFGH12345678': 'HOST_GROUP_1' + } + self.assertEqual(command_tested, expected_result) - expected_result = { - 'HOST_GROUP-ABCDEFGH12345678': 'HOST_GROUP_1' - } - self.assertEqual(command_tested, expected_result) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tests/test_topology_hosts.py b/tests/test_topology_hosts.py new file mode 100644 index 0000000..f990c27 --- /dev/null +++ b/tests/test_topology_hosts.py @@ -0,0 +1,138 @@ +""" +Test Suite for Topology Hosts +""" +import unittest +from user_variables import FULL_SET +from tests import tooling_for_test as testtools +from dynatrace.requests.request_handler import TenantAPIs +from dynatrace.tenant.topology import hosts + +cluster = FULL_SET.get('mock_cluster') +tenant = 'mock_tenant' +url = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/hosts" +request_dir = "tests/mockserver_payloads/requests/hosts" +response_dir = "tests/mockserver_payloads/responses/hosts" + + +class TestGetHosts(unittest.TestCase): + """ + Tests cases for fetching topology hosts. + """ + + def test_get_all_hosts(self): + """ + Test fetching all hosts + """ + response_file = f"{response_dir}/get_all.json" + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=url, + request_type="GET", + response_file=response_file + ) + + result = hosts.get_hosts_tenantwide(cluster, tenant) + self.assertEqual(result, testtools.expected_payload(response_file)) + + def test_get_single_host(self): + """ + Test fetching a specific host + """ + host_id = "HOST-9F74450267BAAE20" + response_file = f"{response_dir}/get_single.json" + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=f"{url}/{host_id}", + request_type="GET", + response_file=response_file + ) + + result = hosts.get_host(cluster, tenant, host_id) + self.assertEqual(result, testtools.expected_payload(response_file)) + + def test_get_host_count(self): + """ + Test getting the count of hosts in a tenant. + """ + response_file = f"{response_dir}/get_all.json" + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=url, + request_type="GET", + response_file=response_file, + parameters=dict(relativeTime=['day'], + includeDetails=['False']) + ) + + result = hosts.get_host_count_tenantwide(cluster, tenant) + self.assertEqual(result, 14) + + def test_get_host_units(self): + """ + Tests getting the consumed host units in a tenant. + """ + response_file = f"{response_dir}/get_all.json" + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=url, + request_type="GET", + response_file=response_file + ) + + result = hosts.get_host_units_tenantwide(cluster, tenant) + self.assertEqual(result, 4.5) + + hosts.set_host_properties + + +class TestHostTagging(unittest.TestCase): + """ + Test cases for testing host-level tagging. + """ + def test_add_tags(self): + """ + Test adding two tags to a specific host. + """ + host_id = "HOST-9F74450267BAAE20" + request_file = f"{request_dir}/tags.json" + tags = ["demo", "example"] + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + request_type="POST", + url_path=f"{url}/{host_id}", + request_file=request_file, + response_code=201 + ) + + result = hosts.add_host_tags(cluster, tenant, host_id, tags) + self.assertEqual(result, 201) + + def test_delete_tags(self): + """ + Test deleting a tag from a specific host. + """ + host_id = "HOST-9F74450267BAAE20" + tag = "demo" + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=f"{url}/{host_id}/tags/{tag}", + request_type="DELETE", + response_code=204 + ) + + result = hosts.delete_host_tag(cluster, tenant, host_id, tag) + self.assertEqual(204, result.status_code) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/tooling_for_test.py b/tests/tooling_for_test.py index cacfb36..1fe5ebf 100644 --- a/tests/tooling_for_test.py +++ b/tests/tooling_for_test.py @@ -4,69 +4,74 @@ import logging from dynatrace.requests.request_handler import generate_tenant_url -logging.basicConfig(filename="testing_tools.log",level=logging.DEBUG) +logging.basicConfig(filename="testing_tools.log", level=logging.DEBUG) + def create_mockserver_expectation(cluster, tenant, url_path, request_type, **kwargs): - requests.packages.urllib3.disable_warnings() - expectation = { - "httpRequest": { - "queryStringParameters": { - "Api-Token": ["sample_api_token"] # TODO Change this Hard Code - }, - }, - "httpResponse": { - "statusCode": 200 - }, - "times": { - "remainingTimes": 1, - "unlimited": False - }, - "id": "OneOff", - } + requests.packages.urllib3.disable_warnings() + expectation = { + "httpRequest": { + "queryStringParameters": { + "Api-Token": [cluster.get('api_token').get(tenant)] + }, + "path": url_path, + "method": request_type + }, + "httpResponse": { + "statusCode": 200 + }, + "times": { + "remainingTimes": 1, + "unlimited": False + }, + "id": "OneOff", + } - logging.debug(f"KWARGS {kwargs}") - # Paramaters should always at least have Api-Token - if 'parameters' in kwargs: - expectation["httpRequest"]["queryStringParameters"] = kwargs['parameters'] + logging.debug(f"KWARGS {kwargs}") + # Paramaters should always at least have Api-Token + if 'parameters' in kwargs: + expectation["httpRequest"]["queryStringParameters"].update(kwargs['parameters']) - if "request_file" in kwargs: - with open(kwargs['request_file']) as f: - request_payload = json.load(f) - expectation["httpRequest"]["body"] = { - "type": "JSON", - "json": request_payload, - } + if "request_file" in kwargs: + with open(kwargs['request_file']) as f: + request_payload = json.load(f) + expectation["httpRequest"]["body"] = { + "type": "JSON", + "json": request_payload, + } - if "response_file" in kwargs: - with open(kwargs['response_file']) as f: - response_payload = json.load(f) - expectation["httpResponse"]["body"] = { - "type": "JSON", - "json": response_payload, - } - expectation["httpResponse"]["headers"] = { - "content-type": ["application/json"] - } + if "response_file" in kwargs: + with open(kwargs['response_file']) as f: + response_payload = json.load(f) + expectation["httpResponse"]["body"] = { + "type": "JSON", + "json": response_payload, + } + expectation["httpResponse"]["headers"] = { + "content-type": ["application/json"] + } - if "mock_id" in kwargs: - expectation["id"] = kwargs["mock_id"] + if "response_code" in kwargs: + expectation["httpResponse"]["statusCode"] = kwargs["response_code"] + if "mock_id" in kwargs: + expectation["id"] = kwargs["mock_id"] - logging.debug(expectation) + logging.debug(expectation) - expectation_url = f"{generate_tenant_url(cluster, tenant)}/mockserver/expectation" - test_req = requests.request( - "PUT", - expectation_url, - json=expectation, - verify=False - ) - logging.debug(test_req.text) - if test_req.status_code > 300: - print(expectation, test_req.status_code, test_req.text, end="\n") - raise ValueError(test_req.status_code) + expectation_url = f"https://{cluster.get('url')}/mockserver/expectation" + test_req = requests.request( + "PUT", + expectation_url, + json=expectation, + verify=False + ) + logging.debug(test_req.text) + if test_req.status_code > 300: + print(expectation, test_req.status_code, test_req.text, end="\n") + raise ValueError(test_req.status_code) def expected_payload(json_file): - with open(json_file) as f: - return json.load(f) + with open(json_file) as f: + return json.load(f) From 9735c41d52ee353fcddb62c13d9813024c8e942d Mon Sep 17 00:00:00 2001 From: Radu Date: Wed, 5 Aug 2020 20:31:02 +0100 Subject: [PATCH 39/79] added venv for virtual environments --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 7e2c7e5..aa33aa4 100644 --- a/.gitignore +++ b/.gitignore @@ -6,7 +6,8 @@ variable_sets/* scripts/* templates/* -.vscode/ +**.vscode** +**venv** user_variables.py sandbox_script.py From 2d364452f4d0604de7d5289cdba8d1f840622d60 Mon Sep 17 00:00:00 2001 From: Radu Date: Wed, 5 Aug 2020 20:32:27 +0100 Subject: [PATCH 40/79] 4 space formatting; added missing check so param isn't passed as None --- dynatrace/exceptions.py | 14 ++++++++++---- dynatrace/tenant/topology/shared.py | 17 +++++++++++++++++ 2 files changed, 27 insertions(+), 4 deletions(-) diff --git a/dynatrace/exceptions.py b/dynatrace/exceptions.py index b92a074..41c5abf 100644 --- a/dynatrace/exceptions.py +++ b/dynatrace/exceptions.py @@ -3,20 +3,26 @@ ''' from sys import stderr + class InvalidAPIResponseException (Exception): - def __init__ (self, message): + def __init__(self, message): print(message, file=stderr) + class InvalidDateFormatException(ValueError): def __init__(self, required_format): self.required_format = required_format - print("Incorrect Date for following entry: %s", required_format, file=stderr) + print("Incorrect Date for following entry: %s", + required_format, file=stderr) + class InvalidScopeException(ValueError): def __init__(self, required_format): self.required_format = required_format - print("Invalid scope used. Tag required for management zone, matching rule: %s", required_format, file=stderr) + print("Invalid scope used. Tag required for management zone, matching rule: %s", + required_format, file=stderr) + class ManagedClusterOnlyException(TypeError): def __init__(self): - print ("This operation is only supported on Dynatrace Managed!", file=stderr) + print("This operation is only supported on Dynatrace Managed!", file=stderr) diff --git a/dynatrace/tenant/topology/shared.py b/dynatrace/tenant/topology/shared.py index be242d9..32f35ef 100644 --- a/dynatrace/tenant/topology/shared.py +++ b/dynatrace/tenant/topology/shared.py @@ -30,6 +30,10 @@ def get_env_layer_entities(cluster, tenant, layer, params=None): layer_list = ['applications', 'hosts', 'processes', 'process-groups', 'services'] check_valid_layer(layer, layer_list) + + if not params: + params = {} + response = rh.make_api_call( cluster=cluster, tenant=tenant, @@ -44,6 +48,10 @@ def get_env_layer_entity(cluster, tenant, layer, entity, params=None): layer_list = ['applications', 'hosts', 'processes', 'process-groups', 'services'] check_valid_layer(layer, layer_list) + + if not params: + params = {} + response = rh.make_api_call( cluster=cluster, tenant=tenant, @@ -70,6 +78,8 @@ def set_env_layer_properties(cluster, tenant, layer, entity, prop_json): def get_env_layer_count(cluster, tenant, layer, params=None): """Get total hosts in an environment""" + if not params: + params = {} layer_list = ['applications', 'hosts', 'processes', 'process-groups', 'services'] @@ -90,6 +100,10 @@ def get_env_layer_count(cluster, tenant, layer, params=None): def get_cluster_layer_count(cluster, layer, params=None): """Get total count for all environments in cluster""" + + if not params: + params = {} + cluster_layer_count = 0 for env_key in cluster['tenant']: cluster_layer_count += get_env_layer_count(cluster=cluster, @@ -101,6 +115,9 @@ def get_cluster_layer_count(cluster, layer, params=None): def get_set_layer_count(full_set, layer, params=None): """Get total count for all clusters definied in variable file""" + if not params: + params = {} + full_set_layer_count = 0 for cluster in full_set.values(): full_set_layer_count += get_cluster_layer_count(cluster, From 84906a68cdde02d8056448e626535b0b4b65aa72 Mon Sep 17 00:00:00 2001 From: Radu Date: Wed, 5 Aug 2020 22:04:15 +0100 Subject: [PATCH 41/79] PAF-23 - Testing for Topology > Processes --- .../responses/processes/get_all_pgis.json | 669 ++++++++++++++++++ .../responses/processes/get_one_pgi.json | 37 + tests/test_processes.py | 51 ++ 3 files changed, 757 insertions(+) create mode 100644 tests/mockserver_payloads/responses/processes/get_all_pgis.json create mode 100644 tests/mockserver_payloads/responses/processes/get_one_pgi.json create mode 100644 tests/test_processes.py diff --git a/tests/mockserver_payloads/responses/processes/get_all_pgis.json b/tests/mockserver_payloads/responses/processes/get_all_pgis.json new file mode 100644 index 0000000..81d097d --- /dev/null +++ b/tests/mockserver_payloads/responses/processes/get_all_pgis.json @@ -0,0 +1,669 @@ +[ + { + "entityId": "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE", + "displayName": "Code*Service.exe", + "discoveredName": "Code*Service.exe", + "firstSeenTimestamp": 1595746260000, + "lastSeenTimestamp": 1596659400000, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-80AA2D475F709672" + ], + "isInstanceOf": [ + "PROCESS_GROUP-859E1549052CD876" + ], + "isNetworkClientOf": [ + "PROCESS_GROUP_INSTANCE-402BEC123CA7FA83" + ] + }, + "toRelationships": { + "runsOnProcessGroupInstance": [ + "SERVICE-C12BF59DA3B51679", + "SERVICE-B71ADA892013D156" + ] + }, + "metadata": { + "commandLineArgs": [ + "C:\\Program\\ Files\\Code42\\Code42Service.exe" + ], + "executables": [ + "Code*Service.exe" + ], + "executablePaths": [ + "C:/Program Files/Code*/Code*Service.exe", + "C:\\PROGRAM FILES\\CODE42\\CODE42SERVICE.EXE" + ] + }, + "softwareTechnologies": [ + { + "type": "APACHE_HTTP_CLIENT_SYNC", + "edition": null, + "version": "4.5.2" + }, + { + "type": "SQLITE", + "edition": null, + "version": null + }, + { + "type": "JETTY", + "edition": null, + "version": "9.4.27.v20200227" + }, + { + "type": "JAVA", + "edition": "OpenJDK", + "version": "11.0.4" + } + ], + "listenPorts": [ + 4244 + ], + "bitness": "64bit", + "monitoringState": { + "actualMonitoringState": "ON", + "expectedMonitoringState": "ON", + "restartRequired": true + }, + "agentVersions": [ + { + "major": 1, + "minor": 199, + "revision": 28, + "timestamp": "20200723-141750", + "sourceRevision": "" + } + ] + }, + { + "entityId": "PROCESS_GROUP_INSTANCE-A6AAFEA17E6F60FD", + "displayName": "Code.exe", + "discoveredName": "Code.exe", + "firstSeenTimestamp": 1593608520000, + "lastSeenTimestamp": 1596564450419, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-80AA2D475F709672" + ], + "isInstanceOf": [ + "PROCESS_GROUP-19DACA5E22637C33" + ] + }, + "toRelationships": {}, + "metadata": { + "commandLineArgs": [ + "C:\\Program\\ Files\\Microsoft\\ VS\\ Code\\Code.exe -n" + ], + "executables": [ + "Code.exe" + ], + "executablePaths": [ + "C:/Program Files/Microsoft VS Code/Code.exe" + ] + }, + "softwareTechnologies": [ + { + "type": "CLR", + "edition": "FullCLR", + "version": "4.8.4180.0" + }, + { + "type": "DOTNET", + "edition": ".NET Framework", + "version": "4.8.4180.0" + } + ], + "bitness": "64bit", + "monitoringState": { + "actualMonitoringState": "OFF", + "expectedMonitoringState": "ON", + "restartRequired": false + } + }, + { + "entityId": "PROCESS_GROUP_INSTANCE-F0967E6BFEE20424", + "displayName": "CodeHelper.exe", + "discoveredName": "CodeHelper.exe", + "firstSeenTimestamp": 1593608520000, + "lastSeenTimestamp": 1596659400000, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-80AA2D475F709672" + ], + "isInstanceOf": [ + "PROCESS_GROUP-2DADD604A183AF95" + ] + }, + "toRelationships": {}, + "metadata": { + "commandLineArgs": [ + ], + "executables": [ + "CodeHelper.exe" + ], + "executablePaths": [ + ] + }, + "softwareTechnologies": [ + { + "type": "CLR", + "edition": "FullCLR", + "version": "4.8.4180.0" + }, + { + "type": "DOTNET", + "edition": ".NET Framework", + "version": "4.8.4180.0" + } + ], + "bitness": "32bit", + "monitoringState": { + "actualMonitoringState": "OFF", + "expectedMonitoringState": "OFF", + "restartRequired": false + } + }, + { + "entityId": "PROCESS_GROUP_INSTANCE-C7BEFD2A8F523A60", + "displayName": "ConnectivityDiagnosis.exe", + "discoveredName": "ConnectivityDiagnosis.exe", + "firstSeenTimestamp": 1593679650232, + "lastSeenTimestamp": 1596627719999, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-80AA2D475F709672" + ], + "isInstanceOf": [ + "PROCESS_GROUP-FA40BCF5F71DC6D8" + ] + }, + "toRelationships": {}, + "metadata": { + "commandLineArgs": [ + "C:\\WINDOWS\\CCM\\ConnectivityDiagnosis.exe" + ], + "executables": [ + "ConnectivityDiagnosis.exe" + ], + "executablePaths": [ + "C:/WINDOWS/CCM/ConnectivityDiagnosis.exe", + "C:\\WINDOWS\\CCM\\CONNECTIVITYDIAGNOSIS.EXE" + ] + }, + "softwareTechnologies": [ + { + "type": "CLR", + "edition": "FullCLR", + "version": "4.8.4180.0" + }, + { + "type": "DOTNET", + "edition": ".NET Framework", + "version": "4.8.4180.0" + } + ], + "bitness": "64bit", + "monitoringState": { + "actualMonitoringState": "OFF", + "expectedMonitoringState": "ON", + "restartRequired": true + } + }, + { + "entityId": "PROCESS_GROUP_INSTANCE-A5B3318E9D88975D", + "displayName": "DellCommandUpdate.exe", + "discoveredName": "DellCommandUpdate.exe", + "firstSeenTimestamp": 1594030620000, + "lastSeenTimestamp": 1596659400000, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-80AA2D475F709672" + ], + "isInstanceOf": [ + "PROCESS_GROUP-30E1CF56034D1657" + ] + }, + "toRelationships": {}, + "metadata": { + "commandLineArgs": [ + "C:\\Program\\ Files\\ (x86)\\Dell\\CommandUpdate\\DellCommandUpdate.exe NewUpdatesReadyToApply" + ], + "executables": [ + "DellCommandUpdate.exe" + ], + "executablePaths": [ + "C:/Program Files (x*)/Dell/CommandUpdate/DellCommandUpdate.exe", + "C:\\PROGRAM FILES (X86)\\DELL\\COMMANDUPDATE\\DELLCOMMANDUPDATE.EXE" + ] + }, + "softwareTechnologies": [ + { + "type": "CLR", + "edition": "FullCLR", + "version": "4.8.4180.0" + }, + { + "type": "DOTNET", + "edition": ".NET Framework", + "version": "4.8.4180.0" + } + ], + "bitness": "64bit", + "monitoringState": { + "actualMonitoringState": "OFF", + "expectedMonitoringState": "OFF", + "restartRequired": false + } + }, + { + "entityId": "PROCESS_GROUP_INSTANCE-F131DA558F8051F9", + "displayName": "Docker Desktop Installer.exe", + "discoveredName": "Docker Desktop Installer.exe", + "firstSeenTimestamp": 1596565440000, + "lastSeenTimestamp": 1596570839999, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-80AA2D475F709672" + ], + "isInstanceOf": [ + "PROCESS_GROUP-C340A62CDF763878" + ] + }, + "toRelationships": {}, + "metadata": { + "commandLineArgs": [ + "C:\\Program\\ Files\\Docker\\Docker\\Docker\\ Desktop\\ Installer.exe check-for-update" + ], + "executables": [ + "Docker Desktop Installer.exe" + ], + "executablePaths": [ + "C:/Program Files/Docker/Docker/Docker Desktop Installer.exe", + "C:\\PROGRAM FILES\\DOCKER\\DOCKER\\DOCKER DESKTOP INSTALLER.EXE" + ] + }, + "softwareTechnologies": [ + { + "type": "CLR", + "edition": "FullCLR", + "version": "4.8.4180.0" + }, + { + "type": "DOTNET", + "edition": ".NET Framework", + "version": "4.8.4180.0" + } + ], + "bitness": "64bit", + "monitoringState": { + "actualMonitoringState": "OFF", + "expectedMonitoringState": "ON", + "restartRequired": false + } + }, + { + "entityId": "PROCESS_GROUP_INSTANCE-8BBC7F9C695E6480", + "displayName": "Docker Desktop.exe", + "discoveredName": "Docker Desktop.exe", + "firstSeenTimestamp": 1596565439999, + "lastSeenTimestamp": 1596565439999, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-80AA2D475F709672" + ], + "isInstanceOf": [ + "PROCESS_GROUP-1688581C565DEABE" + ] + }, + "toRelationships": {}, + "metadata": { + "commandLineArgs": [ + "C:\\Program\\ Files\\Docker\\Docker\\Docker\\ Desktop.exe" + ], + "executables": [ + "Docker Desktop.exe" + ], + "executablePaths": [ + "C:/Program Files/Docker/Docker/Docker Desktop.exe", + "C:\\PROGRAM FILES\\DOCKER\\DOCKER\\DOCKER DESKTOP.EXE" + ] + }, + "softwareTechnologies": [ + { + "type": "CLR", + "edition": "FullCLR", + "version": "4.8.4180.0" + }, + { + "type": "DOTNET", + "edition": ".NET Framework", + "version": "4.8.4180.0" + } + ], + "bitness": "64bit", + "monitoringState": { + "actualMonitoringState": "OFF", + "expectedMonitoringState": "ON", + "restartRequired": false + } + }, + { + "entityId": "PROCESS_GROUP_INSTANCE-402BEC123CA7FA83", + "displayName": "Dynatrace ActiveGate", + "discoveredName": "Dynatrace ActiveGate", + "firstSeenTimestamp": 1594791720000, + "lastSeenTimestamp": 1596464730209, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-1BB99004B1F22491" + ], + "isInstanceOf": [ + "PROCESS_GROUP-5B927C168D55DE12" + ] + }, + "toRelationships": { + "isNetworkClientOf": [ + "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE", + "PROCESS_GROUP_INSTANCE-BBA4AAF390993A8D", + "PROCESS_GROUP_INSTANCE-E42674B38558C0CF", + "PROCESS_GROUP_INSTANCE-E8BD469415E3A0AE", + "PROCESS_GROUP_INSTANCE-31B7F45EF1965AD7", + "PROCESS_GROUP_INSTANCE-0257C724F074BDCE", + "PROCESS_GROUP_INSTANCE-60C0DE2F26F0E308", + "PROCESS_GROUP_INSTANCE-7F1EFFDC6C1F29CB", + "PROCESS_GROUP_INSTANCE-1FE3191C7C750153", + "PROCESS_GROUP_INSTANCE-C063F464B1BC2B07" + ] + }, + "metadata": { + "commandLineArgs": [ + "/opt/dynatrace/gateway/jre/bin/java -Dcom.compuware.apm.WatchDogPort=50006 -classpath /opt/dynatrace/gateway/lib/* -Xms1024M -XX:ErrorFile=/var/log/dynatrace/gateway/hs_err_pid_%p.log -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=60 -Duser.language=en -Dcom.compuware.apm.debug.webserver=true -Djava.util.logging.manager=com.compuware.apm.logging.impl.backend.CustomShutdownLogManager -Djdk.tls.ephemeralDHKeySize=2048 -ea -Dorg.xerial.snappy.lib.path=/opt/dynatrace/gateway/lib/native -Dorg.xerial.snappy.lib.name=libsnappyjava.so -Dcom.compuware.apm.WatchDogTimeout=180 com.compuware.apm.collector.core.CollectorImpl -CONFIG_DIR /var/lib/dynatrace/gateway/config" + ], + "hostGroups": [ + "linux_plugins" + ], + "executables": [ + "java" + ], + "javaMainClasses": [ + "com.compuware.apm.collector.core.CollectorImpl" + ], + "executablePaths": [ + "/opt/dynatrace/gateway/jre/bin/java" + ] + }, + "softwareTechnologies": [ + { + "type": "APMNG", + "edition": "OpenJDK", + "version": "1.8.0_252" + } + ], + "bitness": "64bit", + "monitoringState": { + "actualMonitoringState": "OFF", + "expectedMonitoringState": "OFF", + "restartRequired": false + } + }, + { + "entityId": "PROCESS_GROUP_INSTANCE-1AA2930908C82AF5", + "displayName": "GfxDownloadWrapper.exe", + "discoveredName": "GfxDownloadWrapper.exe", + "firstSeenTimestamp": 1594743060000, + "lastSeenTimestamp": 1596627660000, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-80AA2D475F709672" + ], + "isInstanceOf": [ + "PROCESS_GROUP-E83BBCE6EA416069" + ] + }, + "toRelationships": {}, + "metadata": { + "commandLineArgs": [ + ], + "executables": [ + "GfxDownloadWrapper.exe" + ], + "executablePaths": [ + ] + }, + "softwareTechnologies": [ + { + "type": "CLR", + "edition": "FullCLR", + "version": "4.8.4180.0" + }, + { + "type": "DOTNET", + "edition": ".NET Framework", + "version": "4.8.4180.0" + } + ], + "bitness": "64bit", + "monitoringState": { + "actualMonitoringState": "OFF", + "expectedMonitoringState": "ON", + "restartRequired": true + } + }, + { + "entityId": "PROCESS_GROUP_INSTANCE-792BE86953F73281", + "displayName": "GlobalProtect service", + "discoveredName": "GlobalProtect service", + "firstSeenTimestamp": 1593603300000, + "lastSeenTimestamp": 1596659400000, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-80AA2D475F709672" + ], + "isInstanceOf": [ + "PROCESS_GROUP-19A66884085CCFE1" + ] + }, + "toRelationships": {}, + "metadata": { + "executables": [ + "PanGPS.exe" + ], + "executablePaths": [ + "C:\\PROGRAM FILES\\PALO ALTO NETWORKS\\GLOBALPROTECT\\PANGPS.EXE" + ] + }, + "listenPorts": [ + 4767 + ], + "bitness": "64bit", + "monitoringState": { + "actualMonitoringState": "ON", + "expectedMonitoringState": "OFF", + "restartRequired": false + } + }, + { + "entityId": "PROCESS_GROUP_INSTANCE-979851F08FB75636", + "displayName": "Google Chrome", + "discoveredName": "Google Chrome", + "firstSeenTimestamp": 1593605280000, + "lastSeenTimestamp": 1596659400000, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-80AA2D475F709672" + ], + "isInstanceOf": [ + "PROCESS_GROUP-1DB7168BDCE9CE4B" + ], + "isNetworkClientOf": [ + "PROCESS_GROUP_INSTANCE-D18F46A3E5EEF8C7", + "PROCESS_GROUP_INSTANCE-148467FDC40B7504" + ] + }, + "toRelationships": {}, + "metadata": { + "executables": [ + "chrome.exe" + ], + "executablePaths": [ + "C:\\PROGRAM FILES (X86)\\GOOGLE\\CHROME\\APPLICATION\\CHROME.EXE" + ] + }, + "bitness": "64bit", + "monitoringState": { + "actualMonitoringState": "ON", + "expectedMonitoringState": "OFF", + "restartRequired": false + } + }, + { + "entityId": "PROCESS_GROUP_INSTANCE-2D90C092D03BD6B9", + "displayName": "Host Process for Microsoft Configuration Manager", + "discoveredName": "Host Process for Microsoft Configuration Manager", + "firstSeenTimestamp": 1593610860000, + "lastSeenTimestamp": 1596636960000, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-80AA2D475F709672" + ], + "isInstanceOf": [ + "PROCESS_GROUP-5D9A038AD1100883" + ] + }, + "toRelationships": {}, + "metadata": { + "executables": [ + "CcmExec.exe" + ], + "executablePaths": [ + "C:\\WINDOWS\\CCM\\CCMEXEC.EXE" + ] + }, + "bitness": "64bit", + "monitoringState": { + "actualMonitoringState": "ON", + "expectedMonitoringState": "OFF", + "restartRequired": false + } + }, + { + "entityId": "PROCESS_GROUP_INSTANCE-46AE3D05C7012909", + "displayName": "Insomnia", + "discoveredName": "Insomnia", + "firstSeenTimestamp": 1596656520000, + "lastSeenTimestamp": 1596656640000, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-80AA2D475F709672" + ], + "isInstanceOf": [ + "PROCESS_GROUP-C400D5D1212102C4" + ] + }, + "toRelationships": {}, + "metadata": { + "executables": [ + "Insomnia.exe" + ], + "executablePaths": [ + "C:\\USERS\\RADU.STEFAN\\APPDATA\\LOCAL\\INSOMNIA\\APP-2020.3.3\\INSOMNIA.EXE" + ] + }, + "bitness": "64bit", + "monitoringState": { + "actualMonitoringState": "OFF", + "expectedMonitoringState": "OFF", + "restartRequired": false + } + }, + { + "entityId": "PROCESS_GROUP_INSTANCE-AD53BA0E5F9FB61D", + "displayName": "Linux System", + "discoveredName": "Linux System", + "firstSeenTimestamp": 1596279540000, + "lastSeenTimestamp": 1596659640000, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-FFC515D2848C762E" + ], + "isInstanceOf": [ + "PROCESS_GROUP-EF1E81586EB8EDD6" + ] + }, + "toRelationships": {}, + "metadata": { + "executables": [ + "kthreadd" + ], + "executablePaths": [ + "kthreadd" + ] + }, + "softwareTechnologies": [ + { + "type": "LINUX_SYSTEM", + "edition": null, + "version": null + } + ], + "monitoringState": { + "actualMonitoringState": "ON", + "expectedMonitoringState": "OFF", + "restartRequired": false + } + }, + { + "entityId": "PROCESS_GROUP_INSTANCE-9CD5F00A9D5B1DD7", + "displayName": "Linux System", + "discoveredName": "Linux System", + "firstSeenTimestamp": 1594791720000, + "lastSeenTimestamp": 1596485100000, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-1BB99004B1F22491" + ], + "isInstanceOf": [ + "PROCESS_GROUP-C4E413AE6C59611D" + ] + }, + "toRelationships": {}, + "metadata": { + "hostGroups": [ + "linux_plugins" + ], + "executables": [ + "kthreadd" + ], + "executablePaths": [ + "kthreadd" + ] + }, + "softwareTechnologies": [ + { + "type": "LINUX_SYSTEM", + "edition": null, + "version": null + } + ], + "monitoringState": { + "actualMonitoringState": "OFF", + "expectedMonitoringState": "OFF", + "restartRequired": false + } + } + ] \ No newline at end of file diff --git a/tests/mockserver_payloads/responses/processes/get_one_pgi.json b/tests/mockserver_payloads/responses/processes/get_one_pgi.json new file mode 100644 index 0000000..1eff28d --- /dev/null +++ b/tests/mockserver_payloads/responses/processes/get_one_pgi.json @@ -0,0 +1,37 @@ +{ + "entityId": "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE", + "displayName": "Code*Service.exe", + "discoveredName": "Code*Service.exe", + "firstSeenTimestamp": 1595746260000, + "lastSeenTimestamp": 1596659400000, + "tags": [], + "fromRelationships": { + "isProcessOf": [ + "HOST-80AA2D475F709672" + ], + "isInstanceOf": [ + "PROCESS_GROUP-859E1549052CD876" + ], + "isNetworkClientOf": [ + "PROCESS_GROUP_INSTANCE-402BEC123CA7FA83" + ] + }, + "toRelationships": { + "runsOnProcessGroupInstance": [ + "SERVICE-C12BF59DA3B51679", + "SERVICE-B71ADA892013D156" + ] + }, + "metadata": { + "commandLineArgs": [ + "C:\\Program\\ Files\\Code42\\Code42Service.exe" + ], + "executables": [ + "Code*Service.exe" + ], + "executablePaths": [ + "C:/Program Files/Code*/Code*Service.exe", + "C:\\PROGRAM FILES\\CODE42\\CODE42SERVICE.EXE" + ] + } +} \ No newline at end of file diff --git a/tests/test_processes.py b/tests/test_processes.py new file mode 100644 index 0000000..ec82f6f --- /dev/null +++ b/tests/test_processes.py @@ -0,0 +1,51 @@ +"""Test suite for Topology Processes""" + +import unittest +from user_variables import FULL_SET +from tests import tooling_for_test as testtools +from dynatrace.requests.request_handler import TenantAPIs +from dynatrace.tenant.topology import process + +cluster = FULL_SET.get('mock_cluster') +tenant = 'mock_tenant' +url = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/processes" +request_dir = "tests/mockserver_payloads/requests/processes" +response_dir = "tests/mockserver_payloads/responses/processes" + + +class TestGetProcesses(unittest.TestCase): + """Test cases for fetching topology processes.""" + + def test_get_all_processes(self): + """Test getting all processes tenantwide.""" + response_file = f"{response_dir}/get_all_pgis.json" + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=url, + request_type="GET", + response_file=response_file + ) + + result = process.get_processes_tenantwide(cluster, tenant) + self.assertEqual(result, testtools.expected_payload(response_file)) + + def test_get_single_process(self): + """Tests getting one specific process.""" + response_file = f"{response_dir}/get_one_pgi.json" + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=url, + request_type="GET", + response_file=response_file + ) + + result = process.get_processes_tenantwide(cluster, tenant) + self.assertEqual(result, testtools.expected_payload(response_file)) + + +if __name__ == '__main__': + unittest.main() From 824dc130b01eb8642e8edf51bcdc1ef2582e63c2 Mon Sep 17 00:00:00 2001 From: Radu Date: Wed, 5 Aug 2020 22:06:36 +0100 Subject: [PATCH 42/79] changes from PAF-22 that should have been pushed to dev instead --- tests/tooling_for_test.py | 111 +++++++++++++++++++------------------- 1 file changed, 57 insertions(+), 54 deletions(-) diff --git a/tests/tooling_for_test.py b/tests/tooling_for_test.py index cacfb36..bc612e2 100644 --- a/tests/tooling_for_test.py +++ b/tests/tooling_for_test.py @@ -4,69 +4,72 @@ import logging from dynatrace.requests.request_handler import generate_tenant_url -logging.basicConfig(filename="testing_tools.log",level=logging.DEBUG) +logging.basicConfig(filename="testing_tools.log", level=logging.DEBUG) + def create_mockserver_expectation(cluster, tenant, url_path, request_type, **kwargs): - requests.packages.urllib3.disable_warnings() - expectation = { - "httpRequest": { - "queryStringParameters": { - "Api-Token": ["sample_api_token"] # TODO Change this Hard Code - }, - }, - "httpResponse": { - "statusCode": 200 - }, - "times": { - "remainingTimes": 1, - "unlimited": False - }, - "id": "OneOff", - } + requests.packages.urllib3.disable_warnings() + expectation = { + "httpRequest": { + "queryStringParameters": { + "Api-Token": [cluster.get('api_token').get(tenant)] + }, + }, + "httpResponse": { + "statusCode": 200 + }, + "times": { + "remainingTimes": 1, + "unlimited": False + }, + "id": "OneOff", + } - logging.debug(f"KWARGS {kwargs}") - # Paramaters should always at least have Api-Token - if 'parameters' in kwargs: - expectation["httpRequest"]["queryStringParameters"] = kwargs['parameters'] + logging.debug(f"KWARGS {kwargs}") + # Paramaters should always at least have Api-Token + if 'parameters' in kwargs: + expectation["httpRequest"]["queryStringParameters"] = kwargs['parameters'] - if "request_file" in kwargs: - with open(kwargs['request_file']) as f: - request_payload = json.load(f) - expectation["httpRequest"]["body"] = { - "type": "JSON", - "json": request_payload, - } + if "request_file" in kwargs: + with open(kwargs['request_file']) as f: + request_payload = json.load(f) + expectation["httpRequest"]["body"] = { + "type": "JSON", + "json": request_payload, + } - if "response_file" in kwargs: - with open(kwargs['response_file']) as f: - response_payload = json.load(f) - expectation["httpResponse"]["body"] = { - "type": "JSON", - "json": response_payload, - } - expectation["httpResponse"]["headers"] = { - "content-type": ["application/json"] - } + if "response_file" in kwargs: + with open(kwargs['response_file']) as f: + response_payload = json.load(f) + expectation["httpResponse"]["body"] = { + "type": "JSON", + "json": response_payload, + } + expectation["httpResponse"]["headers"] = { + "content-type": ["application/json"] + } - if "mock_id" in kwargs: - expectation["id"] = kwargs["mock_id"] + if "response_code" in kwargs: + expectation['httpResponse']['statusCode'] = kwargs['response_code'] + if "mock_id" in kwargs: + expectation["id"] = kwargs["mock_id"] - logging.debug(expectation) + logging.debug(expectation) - expectation_url = f"{generate_tenant_url(cluster, tenant)}/mockserver/expectation" - test_req = requests.request( - "PUT", - expectation_url, - json=expectation, - verify=False - ) - logging.debug(test_req.text) - if test_req.status_code > 300: - print(expectation, test_req.status_code, test_req.text, end="\n") - raise ValueError(test_req.status_code) + expectation_url = f"{generate_tenant_url(cluster, tenant)}/mockserver/expectation" + test_req = requests.request( + "PUT", + expectation_url, + json=expectation, + verify=False + ) + logging.debug(test_req.text) + if test_req.status_code > 300: + print(expectation, test_req.status_code, test_req.text, end="\n") + raise ValueError(test_req.status_code) def expected_payload(json_file): - with open(json_file) as f: - return json.load(f) + with open(json_file) as f: + return json.load(f) From 42db44fdbc878908ca7d92e2f250caa4abbf8966 Mon Sep 17 00:00:00 2001 From: Radu Date: Wed, 5 Aug 2020 22:30:18 +0100 Subject: [PATCH 43/79] Update test_processes.py --- tests/test_processes.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_processes.py b/tests/test_processes.py index ec82f6f..c212a96 100644 --- a/tests/test_processes.py +++ b/tests/test_processes.py @@ -34,6 +34,7 @@ def test_get_all_processes(self): def test_get_single_process(self): """Tests getting one specific process.""" response_file = f"{response_dir}/get_one_pgi.json" + process_id = "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE" testtools.create_mockserver_expectation( cluster=cluster, @@ -43,7 +44,7 @@ def test_get_single_process(self): response_file=response_file ) - result = process.get_processes_tenantwide(cluster, tenant) + result = process.get_process(cluster, tenant, process_id) self.assertEqual(result, testtools.expected_payload(response_file)) From bde9a78c4dd36f8077b10d0a9b0eb307d52903fa Mon Sep 17 00:00:00 2001 From: Radu Date: Wed, 5 Aug 2020 22:31:22 +0100 Subject: [PATCH 44/79] Update test_processes.py --- tests/test_processes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_processes.py b/tests/test_processes.py index c212a96..d78a4f2 100644 --- a/tests/test_processes.py +++ b/tests/test_processes.py @@ -39,7 +39,7 @@ def test_get_single_process(self): testtools.create_mockserver_expectation( cluster=cluster, tenant=tenant, - url_path=url, + url_path=f"{url}/{process_id}", request_type="GET", response_file=response_file ) From 07d3ce5eecff9a47ca7508ff5aad6b1b63bc01d1 Mon Sep 17 00:00:00 2001 From: Radu Date: Wed, 5 Aug 2020 22:44:30 +0100 Subject: [PATCH 45/79] PAF-24 Testing for Topology > Process Groups --- .../responses/processes/get_all_pgs.json | 208 ++++++++++++++++++ .../responses/processes/get_one_pg.json | 62 ++++++ .../responses/processes/tags.json | 6 + tests/test_process_groups.py | 91 ++++++++ 4 files changed, 367 insertions(+) create mode 100644 tests/mockserver_payloads/responses/processes/get_all_pgs.json create mode 100644 tests/mockserver_payloads/responses/processes/get_one_pg.json create mode 100644 tests/mockserver_payloads/responses/processes/tags.json create mode 100644 tests/test_process_groups.py diff --git a/tests/mockserver_payloads/responses/processes/get_all_pgs.json b/tests/mockserver_payloads/responses/processes/get_all_pgs.json new file mode 100644 index 0000000..12ad4cc --- /dev/null +++ b/tests/mockserver_payloads/responses/processes/get_all_pgs.json @@ -0,0 +1,208 @@ +[ + { + "entityId": "PROCESS_GROUP-859E1549052CD876", + "displayName": "Code*Service.exe", + "discoveredName": "Code*Service.exe", + "firstSeenTimestamp": 1595746300858, + "lastSeenTimestamp": 1596661825512, + "tags": [], + "fromRelationships": { + "isNetworkClientOfProcessGroup": [ + "PROCESS_GROUP-5B927C168D55DE12" + ], + "runsOn": [ + "HOST-80AA2D475F709672" + ] + }, + "toRelationships": { + "isInstanceOf": [ + "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE" + ], + "runsOn": [ + "SERVICE-C12BF59DA3B51679", + "SERVICE-B71ADA892013D156" + ] + }, + "metadata": { + "commandLineArgs": [ + "C:\\Program\\ Files\\Code42\\Code42Service.exe" + ], + "executables": [ + "Code*Service.exe" + ], + "executablePaths": [ + "C:/Program Files/Code*/Code*Service.exe", + "C:\\PROGRAM FILES\\CODE42\\CODE42SERVICE.EXE" + ] + }, + "softwareTechnologies": [ + { + "type": "APACHE_HTTP_CLIENT_SYNC", + "edition": null, + "version": "4.5.2" + }, + { + "type": "JETTY", + "edition": null, + "version": "9.4.27.v20200227" + }, + { + "type": "JAVA", + "edition": "OpenJDK", + "version": "11.0.4" + }, + { + "type": "SQLITE", + "edition": null, + "version": null + } + ], + "listenPorts": [ + 4244 + ] + }, + { + "entityId": "PROCESS_GROUP-19DACA5E22637C33", + "displayName": "Code.exe", + "discoveredName": "Code.exe", + "firstSeenTimestamp": 1592585950992, + "lastSeenTimestamp": 1596564482129, + "tags": [], + "fromRelationships": { + "runsOn": [ + "HOST-80AA2D475F709672" + ] + }, + "toRelationships": { + "isInstanceOf": [ + "PROCESS_GROUP_INSTANCE-A6AAFEA17E6F60FD" + ] + }, + "metadata": { + "commandLineArgs": [ + "C:\\Program\\ Files\\Microsoft\\ VS\\ Code\\Code.exe -n" + ], + "executables": [ + "Code.exe" + ], + "executablePaths": [ + "C:/Program Files/Microsoft VS Code/Code.exe" + ] + }, + "softwareTechnologies": [ + { + "type": "CLR", + "edition": "FullCLR", + "version": "4.8.4180.0" + }, + { + "type": "DOTNET", + "edition": ".NET Framework", + "version": "4.8.4180.0" + } + ] + }, + { + "entityId": "PROCESS_GROUP-859E1549052CD876", + "displayName": "Code*Service.exe", + "discoveredName": "Code*Service.exe", + "firstSeenTimestamp": 1595746300858, + "lastSeenTimestamp": 1596661825512, + "tags": [], + "fromRelationships": { + "isNetworkClientOfProcessGroup": [ + "PROCESS_GROUP-5B927C168D55DE12" + ], + "runsOn": [ + "HOST-80AA2D475F709672" + ] + }, + "toRelationships": { + "isInstanceOf": [ + "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE" + ], + "runsOn": [ + "SERVICE-C12BF59DA3B51679", + "SERVICE-B71ADA892013D156" + ] + }, + "metadata": { + "commandLineArgs": [ + "C:\\Program\\ Files\\Code42\\Code42Service.exe" + ], + "executables": [ + "Code*Service.exe" + ], + "executablePaths": [ + "C:/Program Files/Code*/Code*Service.exe", + "C:\\PROGRAM FILES\\CODE42\\CODE42SERVICE.EXE" + ] + }, + "softwareTechnologies": [ + { + "type": "APACHE_HTTP_CLIENT_SYNC", + "edition": null, + "version": "4.5.2" + }, + { + "type": "JETTY", + "edition": null, + "version": "9.4.27.v20200227" + }, + { + "type": "JAVA", + "edition": "OpenJDK", + "version": "11.0.4" + }, + { + "type": "SQLITE", + "edition": null, + "version": null + } + ], + "listenPorts": [ + 4244 + ] + }, + { + "entityId": "PROCESS_GROUP-19DACA5E22637C33", + "displayName": "Code.exe", + "discoveredName": "Code.exe", + "firstSeenTimestamp": 1592585950992, + "lastSeenTimestamp": 1596564482129, + "tags": [], + "fromRelationships": { + "runsOn": [ + "HOST-80AA2D475F709672" + ] + }, + "toRelationships": { + "isInstanceOf": [ + "PROCESS_GROUP_INSTANCE-A6AAFEA17E6F60FD" + ] + }, + "metadata": { + "commandLineArgs": [ + "C:\\Program\\ Files\\Microsoft\\ VS\\ Code\\Code.exe -n" + ], + "executables": [ + "Code.exe" + ], + "executablePaths": [ + "C:/Program Files/Microsoft VS Code/Code.exe" + ] + }, + "softwareTechnologies": [ + { + "type": "CLR", + "edition": "FullCLR", + "version": "4.8.4180.0" + }, + { + "type": "DOTNET", + "edition": ".NET Framework", + "version": "4.8.4180.0" + } + ] + } +] \ No newline at end of file diff --git a/tests/mockserver_payloads/responses/processes/get_one_pg.json b/tests/mockserver_payloads/responses/processes/get_one_pg.json new file mode 100644 index 0000000..882af98 --- /dev/null +++ b/tests/mockserver_payloads/responses/processes/get_one_pg.json @@ -0,0 +1,62 @@ +{ + "entityId": "PROCESS_GROUP-859E1549052CD876", + "displayName": "Code*Service.exe", + "discoveredName": "Code*Service.exe", + "firstSeenTimestamp": 1595746300858, + "lastSeenTimestamp": 1596661825512, + "tags": [], + "fromRelationships": { + "isNetworkClientOfProcessGroup": [ + "PROCESS_GROUP-5B927C168D55DE12" + ], + "runsOn": [ + "HOST-80AA2D475F709672" + ] + }, + "toRelationships": { + "isInstanceOf": [ + "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE" + ], + "runsOn": [ + "SERVICE-C12BF59DA3B51679", + "SERVICE-B71ADA892013D156" + ] + }, + "metadata": { + "commandLineArgs": [ + "C:\\Program\\ Files\\Code42\\Code42Service.exe" + ], + "executables": [ + "Code*Service.exe" + ], + "executablePaths": [ + "C:/Program Files/Code*/Code*Service.exe", + "C:\\PROGRAM FILES\\CODE42\\CODE42SERVICE.EXE" + ] + }, + "softwareTechnologies": [ + { + "type": "APACHE_HTTP_CLIENT_SYNC", + "edition": null, + "version": "4.5.2" + }, + { + "type": "JETTY", + "edition": null, + "version": "9.4.27.v20200227" + }, + { + "type": "JAVA", + "edition": "OpenJDK", + "version": "11.0.4" + }, + { + "type": "SQLITE", + "edition": null, + "version": null + } + ], + "listenPorts": [ + 4244 + ] +} \ No newline at end of file diff --git a/tests/mockserver_payloads/responses/processes/tags.json b/tests/mockserver_payloads/responses/processes/tags.json new file mode 100644 index 0000000..dc89ff6 --- /dev/null +++ b/tests/mockserver_payloads/responses/processes/tags.json @@ -0,0 +1,6 @@ +{ + "tags": [ + "demo", + "example" + ] +} \ No newline at end of file diff --git a/tests/test_process_groups.py b/tests/test_process_groups.py new file mode 100644 index 0000000..15b17ef --- /dev/null +++ b/tests/test_process_groups.py @@ -0,0 +1,91 @@ +"""Test Suite for Topology Process Groups""" + +import unittest +from user_variables import FULL_SET +from tests import tooling_for_test as testtools +from dynatrace.requests.request_handler import TenantAPIs +from dynatrace.tenant.topology import process_groups + +cluster = FULL_SET.get('mock_cluster') +tenant = 'mock_tenant' +url = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/process-groups" +request_dir = "tests/mockserver_payloads/requests/processes" +response_dir = "tests/mockserver_payloads/responses/processes" + + +class TestGetPGs(unittest.TestCase): + """Test cases for fetching topology process groups.""" + + def test_get_all_pgs(self): + """Test fetching all PGs""" + response_file = f"{response_dir}/get_all_pgs.json" + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=url, + request_type="GET", + response_file=response_file + ) + + result = process_groups.get_process_groups_tenantwide(cluster, tenant) + self.assertEqual(result, testtools.expected_payload(response_file)) + + def test_get_single_pg(self): + """Test fetching single PG""" + response_file = f"{response_dir}/get_one_pg.json" + pg_id = "PROCESS_GROUP-859E1549052CD876" + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=f"{url}/{pg_id}", + request_type="GET", + response_file=response_file + ) + + result = process_groups.get_process_group(cluster, tenant, pg_id) + self.assertEqual(result, testtools.expected_payload(response_file)) + + def test_get_pg_count(self): + """Test getting the PG count tenantwide.""" + response_file = f"{response_dir}/get_all_pgs.json" + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=url, + request_type="GET", + response_file=response_file + ) + + result = process_groups.get_process_group_count_tenantwide(cluster, + tenant) + self.assertEqual(result, 4) + + +class TestPGTags(unittest.TestCase): + """Test cases for PG tags""" + + def test_add_pg_tags(self): + """Test adding two tags to the PG.""" + pg_id = "PROCESS_GROUP-859E1549052CD876" + request_file = f"{response_dir}/tags.json" + tags = ["demo", "example"] + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + request_type="POST", + url_path=f"{url}/{pg_id}", + request_file=request_file, + response_code=201 + ) + + result = process_groups.add_process_group_tags(cluster, tenant, + pg_id, tags) + self.assertEqual(result, 201) + + +if __name__ == '__main__': + unittest.main() From 794db69a504d75964bc854ae10f3634cab6d13be Mon Sep 17 00:00:00 2001 From: Radu Date: Wed, 5 Aug 2020 22:49:43 +0100 Subject: [PATCH 46/79] triple quote one-liners should be all on one line (PEP8 style) --- tests/test_topology_hosts.py | 39 ++++++++++++++---------------------- 1 file changed, 15 insertions(+), 24 deletions(-) diff --git a/tests/test_topology_hosts.py b/tests/test_topology_hosts.py index f990c27..540f108 100644 --- a/tests/test_topology_hosts.py +++ b/tests/test_topology_hosts.py @@ -15,14 +15,11 @@ class TestGetHosts(unittest.TestCase): - """ - Tests cases for fetching topology hosts. - """ + """Tests cases for fetching topology hosts.""" def test_get_all_hosts(self): - """ - Test fetching all hosts - """ + """Test fetching all hosts""" + response_file = f"{response_dir}/get_all.json" testtools.create_mockserver_expectation( @@ -37,9 +34,8 @@ def test_get_all_hosts(self): self.assertEqual(result, testtools.expected_payload(response_file)) def test_get_single_host(self): - """ - Test fetching a specific host - """ + """Test fetching a specific host""" + host_id = "HOST-9F74450267BAAE20" response_file = f"{response_dir}/get_single.json" @@ -55,9 +51,8 @@ def test_get_single_host(self): self.assertEqual(result, testtools.expected_payload(response_file)) def test_get_host_count(self): - """ - Test getting the count of hosts in a tenant. - """ + """Test getting the count of hosts in a tenant.""" + response_file = f"{response_dir}/get_all.json" testtools.create_mockserver_expectation( cluster=cluster, @@ -73,9 +68,8 @@ def test_get_host_count(self): self.assertEqual(result, 14) def test_get_host_units(self): - """ - Tests getting the consumed host units in a tenant. - """ + """Tests getting the consumed host units in a tenant.""" + response_file = f"{response_dir}/get_all.json" testtools.create_mockserver_expectation( cluster=cluster, @@ -92,13 +86,11 @@ def test_get_host_units(self): class TestHostTagging(unittest.TestCase): - """ - Test cases for testing host-level tagging. - """ + """Test cases for testing host-level tagging.""" + def test_add_tags(self): - """ - Test adding two tags to a specific host. - """ + """Test adding two tags to a specific host.""" + host_id = "HOST-9F74450267BAAE20" request_file = f"{request_dir}/tags.json" tags = ["demo", "example"] @@ -116,9 +108,8 @@ def test_add_tags(self): self.assertEqual(result, 201) def test_delete_tags(self): - """ - Test deleting a tag from a specific host. - """ + """Test deleting a tag from a specific host.""" + host_id = "HOST-9F74450267BAAE20" tag = "demo" From ca3dc81ec82d56711fa7a85ed4a9b0455b86a3c6 Mon Sep 17 00:00:00 2001 From: Radu Date: Thu, 6 Aug 2020 08:53:36 +0100 Subject: [PATCH 47/79] PAF-25 Testing for Topology > Services --- .../requests/services/tags.json | 6 + .../responses/services/get_all.json | 139 ++++++++++++++++++ .../responses/services/get_one.json | 51 +++++++ tests/test_services.py | 89 +++++++++++ 4 files changed, 285 insertions(+) create mode 100644 tests/mockserver_payloads/requests/services/tags.json create mode 100644 tests/mockserver_payloads/responses/services/get_all.json create mode 100644 tests/mockserver_payloads/responses/services/get_one.json create mode 100644 tests/test_services.py diff --git a/tests/mockserver_payloads/requests/services/tags.json b/tests/mockserver_payloads/requests/services/tags.json new file mode 100644 index 0000000..dc89ff6 --- /dev/null +++ b/tests/mockserver_payloads/requests/services/tags.json @@ -0,0 +1,6 @@ +{ + "tags": [ + "demo", + "example" + ] +} \ No newline at end of file diff --git a/tests/mockserver_payloads/responses/services/get_all.json b/tests/mockserver_payloads/responses/services/get_all.json new file mode 100644 index 0000000..42c6e27 --- /dev/null +++ b/tests/mockserver_payloads/responses/services/get_all.json @@ -0,0 +1,139 @@ +[ + { + "entityId": "SERVICE-C12BF59DA3B51679", + "displayName": "/", + "discoveredName": "/", + "firstSeenTimestamp": 1595746255774, + "lastSeenTimestamp": 1596699715160, + "tags": [], + "fromRelationships": { + "runsOnProcessGroupInstance": [ + "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE" + ], + "runsOn": [ + "PROCESS_GROUP-859E1549052CD876" + ] + }, + "toRelationships": { + "calls": [ + "SERVICE-B71ADA892013D156" + ] + }, + "agentTechnologyType": "JAVA", + "serviceTechnologyTypes": [ + "Java" + ], + "serviceType": "WebRequest", + "softwareTechnologies": [ + { + "type": "APACHE_HTTP_CLIENT_SYNC", + "edition": null, + "version": "4.5.2" + }, + { + "type": "JETTY", + "edition": null, + "version": "9.4.27.v20200227" + }, + { + "type": "JAVA", + "edition": "OpenJDK", + "version": "11.0.4" + }, + { + "type": "SQLITE", + "edition": null, + "version": null + } + ], + "webApplicationId": "/", + "webServerName": "localhost", + "contextRoot": "/" + }, + { + "entityId": "SERVICE-C096CE0BA471AEFD", + "displayName": "Netty on 0:0:0:0:0:0:0:0:*", + "discoveredName": "Netty on 0:0:0:0:0:0:0:0:*", + "firstSeenTimestamp": 1596565951607, + "lastSeenTimestamp": 1596663764465, + "tags": [], + "fromRelationships": { + "runsOnProcessGroupInstance": [ + "PROCESS_GROUP_INSTANCE-148467FDC40B7504" + ], + "runsOn": [ + "PROCESS_GROUP-A6C0C543A3B775E3" + ] + }, + "toRelationships": {}, + "agentTechnologyType": "JAVA", + "serviceTechnologyTypes": [ + "Java", + "NETTY" + ], + "serviceType": "WebRequest", + "softwareTechnologies": [ + { + "type": "NETTY", + "edition": null, + "version": "4.1.50.Final" + }, + { + "type": "JAVA", + "edition": "Oracle HotSpot", + "version": "13.0.2" + } + ], + "webApplicationId": "Netty on 0:0:0:0:0:0:0:0:*", + "webServerName": "0:0:0:0:0:0:0:0:5555", + "contextRoot": "/", + "port": 5555 + }, + { + "entityId": "SERVICE-B71ADA892013D156", + "displayName": "Requests executed in background threads of Code*Service.exe", + "discoveredName": "Requests executed in background threads of Code*Service.exe", + "firstSeenTimestamp": 1595746270529, + "lastSeenTimestamp": 1596699743160, + "tags": [], + "fromRelationships": { + "runsOnProcessGroupInstance": [ + "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE" + ], + "runsOn": [ + "PROCESS_GROUP-859E1549052CD876" + ], + "calls": [ + "SERVICE-C12BF59DA3B51679" + ] + }, + "toRelationships": {}, + "agentTechnologyType": "JAVA", + "serviceTechnologyTypes": [ + "Java" + ], + "serviceType": "Process", + "softwareTechnologies": [ + { + "type": "APACHE_HTTP_CLIENT_SYNC", + "edition": null, + "version": "4.5.2" + }, + { + "type": "JETTY", + "edition": null, + "version": "9.4.27.v20200227" + }, + { + "type": "JAVA", + "edition": "OpenJDK", + "version": "11.0.4" + }, + { + "type": "SQLITE", + "edition": null, + "version": null + } + ] + } +] \ No newline at end of file diff --git a/tests/mockserver_payloads/responses/services/get_one.json b/tests/mockserver_payloads/responses/services/get_one.json new file mode 100644 index 0000000..8eb4e76 --- /dev/null +++ b/tests/mockserver_payloads/responses/services/get_one.json @@ -0,0 +1,51 @@ +{ + "entityId": "SERVICE-C12BF59DA3B51679", + "displayName": "/", + "discoveredName": "/", + "firstSeenTimestamp": 1595746255774, + "lastSeenTimestamp": 1596699715160, + "tags": [], + "fromRelationships": { + "runsOnProcessGroupInstance": [ + "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE" + ], + "runsOn": [ + "PROCESS_GROUP-859E1549052CD876" + ] + }, + "toRelationships": { + "calls": [ + "SERVICE-B71ADA892013D156" + ] + }, + "agentTechnologyType": "JAVA", + "serviceTechnologyTypes": [ + "Java" + ], + "serviceType": "WebRequest", + "softwareTechnologies": [ + { + "type": "APACHE_HTTP_CLIENT_SYNC", + "edition": null, + "version": "4.5.2" + }, + { + "type": "JETTY", + "edition": null, + "version": "9.4.27.v20200227" + }, + { + "type": "JAVA", + "edition": "OpenJDK", + "version": "11.0.4" + }, + { + "type": "SQLITE", + "edition": null, + "version": null + } + ], + "webApplicationId": "/", + "webServerName": "localhost", + "contextRoot": "/" +} \ No newline at end of file diff --git a/tests/test_services.py b/tests/test_services.py new file mode 100644 index 0000000..068cd1f --- /dev/null +++ b/tests/test_services.py @@ -0,0 +1,89 @@ +"""Test Suite for Topology Services""" + +import unittest +from user_variables import FULL_SET +from tests import tooling_for_test as testtools +from dynatrace.requests.request_handler import TenantAPIs +from dynatrace.tenant.topology import services + +cluster = FULL_SET.get('mock_cluster') +tenant = 'mock_tenant' +url = f"{TenantAPIs.V1_TOPOLOGY}/services" +request_dir = "tests/mockserver_payloads/requests/services" +response_dir = "tests/mockserver_payloads/responses/services" + + +class TestGetServices(unittest.TestCase): + """Test cases for fetching topology services.""" + + def test_get_all_svc(self): + """Test fetching all services""" + response_file = f"{response_dir}/get_all.json" + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=url, + request_type="GET", + response_file=response_file + ) + + result = services.get_services_tenantwide(cluster, tenant) + self.assertEqual(result, testtools.expected_payload(response_file)) + + def test_get_single_svc(self): + """Test fetching single service""" + response_file = f"{response_dir}/get_one.json" + svc_id = "SERVICE-C12BF59DA3B51679" + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=f"{url}/{svc_id}", + request_type="GET", + response_file=response_file + ) + + result = services.get_service(cluster, tenant, svc_id) + self.assertEqual(result, testtools.expected_payload(response_file)) + + def test_get_svc_count(self): + """Test getting the service count tenantwide.""" + response_file = f"{response_dir}/get_all.json" + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=url, + request_type="GET", + response_file=response_file + ) + + result = services.get_service_count_tenantwide(cluster, tenant) + self.assertEqual(result, 3) + + +class TestServiceTags(unittest.TestCase): + """Test cases for service tags""" + + def test_add_svc_tags(self): + """Test adding two tags to the service.""" + svc_id = "SERVICE-C12BF59DA3B51679" + request_file = f"{request_dir}/tags.json" + tags = ["demo", "example"] + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + request_type="POST", + url_path=f"{url}/{svc_id}", + request_file=request_file, + response_code=201 + ) + + result = services.add_service_tags(cluster, tenant, svc_id, tags) + self.assertEqual(result, 201) + + +if __name__ == '__main__': + unittest.main() From 815dbb09b8dff98942e3d71a4d670a66456d4f8b Mon Sep 17 00:00:00 2001 From: Radu Date: Thu, 6 Aug 2020 08:55:45 +0100 Subject: [PATCH 48/79] moved tags.json (request_file) to request payloads dir --- .../{responses => requests}/processes/tags.json | 0 tests/test_process_groups.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename tests/mockserver_payloads/{responses => requests}/processes/tags.json (100%) diff --git a/tests/mockserver_payloads/responses/processes/tags.json b/tests/mockserver_payloads/requests/processes/tags.json similarity index 100% rename from tests/mockserver_payloads/responses/processes/tags.json rename to tests/mockserver_payloads/requests/processes/tags.json diff --git a/tests/test_process_groups.py b/tests/test_process_groups.py index 15b17ef..bf78f9d 100644 --- a/tests/test_process_groups.py +++ b/tests/test_process_groups.py @@ -70,7 +70,7 @@ class TestPGTags(unittest.TestCase): def test_add_pg_tags(self): """Test adding two tags to the PG.""" pg_id = "PROCESS_GROUP-859E1549052CD876" - request_file = f"{response_dir}/tags.json" + request_file = f"{request_dir}/tags.json" tags = ["demo", "example"] testtools.create_mockserver_expectation( From fc23279d2f69d31323221a4193b8d67123a2989b Mon Sep 17 00:00:00 2001 From: Radu Date: Thu, 6 Aug 2020 17:38:23 +0100 Subject: [PATCH 49/79] python specific stuff on .gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index aa33aa4..3465291 100644 --- a/.gitignore +++ b/.gitignore @@ -6,8 +6,11 @@ variable_sets/* scripts/* templates/* +# Workspace settings and virtual environments **.vscode** **venv** +# Framework log files +***.log** user_variables.py sandbox_script.py From 4e90f5cba51b8f2756d1db83d99e255de9f46f87 Mon Sep 17 00:00:00 2001 From: Radu Date: Thu, 6 Aug 2020 17:43:45 +0100 Subject: [PATCH 50/79] trimmed down mockserver payloads for hosts. edited test_hosts.py to work with changes --- .../responses/hosts/get_all.json | 1375 +---------------- .../responses/hosts/get_single.json | 70 +- tests/test_topology_hosts.py | 8 +- 3 files changed, 10 insertions(+), 1443 deletions(-) diff --git a/tests/mockserver_payloads/responses/hosts/get_all.json b/tests/mockserver_payloads/responses/hosts/get_all.json index ef930c6..bae3691 100644 --- a/tests/mockserver_payloads/responses/hosts/get_all.json +++ b/tests/mockserver_payloads/responses/hosts/get_all.json @@ -1,1381 +1,14 @@ [ { - "entityId": "HOST-9F74450267BAAE20", - "displayName": "host1123.radu.local", - "discoveredName": "host1123.radu.local", - "firstSeenTimestamp": 1594365724435, - "lastSeenTimestamp": 1596559448485, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "Application", - "value": "Super_App" - }, - { - "context": "CONTEXTLESS", - "key": "Environment", - "value": "DEV" - } - ], - "fromRelationships": {}, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-7BDD5DC06168C858", - "PROCESS_GROUP_INSTANCE-726C522DDE0D2524", - "PROCESS_GROUP_INSTANCE-1238166A9681701C", - "PROCESS_GROUP_INSTANCE-829A8915392BFF28", - "PROCESS_GROUP_INSTANCE-6277FD97691C3FCB", - "PROCESS_GROUP_INSTANCE-0A37526FC2730958", - "PROCESS_GROUP_INSTANCE-F86303D27A8E830D", - "PROCESS_GROUP_INSTANCE-7C78AA98F4803D16", - "PROCESS_GROUP_INSTANCE-F9B6343E59930663" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61" - ], - "runsOn": [ - "PROCESS_GROUP-4DE8442CCEAD2251", - "PROCESS_GROUP-6A2955530FA29616", - "PROCESS_GROUP-893E1F101431ADD1", - "PROCESS_GROUP-ACF6C23C63075E6F", - "PROCESS_GROUP-A93DF7C68AF50E3B", - "PROCESS_GROUP-719A10C7F5761D1B", - "PROCESS_GROUP-296B3416E647AAEA", - "PROCESS_GROUP-2F0439DE58E8F50B", - "PROCESS_GROUP-087AEE9E5099E4D2" - ] - }, - "osType": "WINDOWS", - "osArchitecture": "X86", - "osVersion": "Windows Server 2012 Standard, ver. 6.2.9200", - "hypervisorType": "VMWARE", - "ipAddresses": [ - "10.188.203.46" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 1, - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "consumedHostUnits": 0.25, - "managementZones": [ - { - "id": "4173205628047632907", - "name": "MANAGE_ZONE_DEV" - } - ], - "hostGroup": { - "meId": "HOST_GROUP-FA0C3B0A6B7F4D78", - "name": "DEV_SUPERAPP_ZONE-A" - } + "entityId": "HOST-ABC123DEF456GHIJ", + "consumedHostUnits": 0.25 }, { "entityId": "HOST-5B9CE4E4E14185FA", - "displayName": "eda.aphrodite.dev.eods.uk.eeca", - "discoveredName": "eda.aphrodite.dev.eods.uk.eeca", - "firstSeenTimestamp": 1595930453531, - "lastSeenTimestamp": 1596567537628, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "OS", - "value": "Linux" - } - ], - "fromRelationships": { - "isNetworkClientOfHost": [ - "HOST-B667FC231C8B8DD3" - ] - }, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-A21F23DF2377CDF3", - "PROCESS_GROUP_INSTANCE-DDF57230A17C229B", - "PROCESS_GROUP_INSTANCE-8EC7F099B79DE446", - "PROCESS_GROUP_INSTANCE-45B761529113B950", - "PROCESS_GROUP_INSTANCE-DC819D1A1D4BD277", - "PROCESS_GROUP_INSTANCE-6CD25C7F9F3EA7B4", - "PROCESS_GROUP_INSTANCE-94CCA21312F222E9", - "PROCESS_GROUP_INSTANCE-370D49A5BE8CD98F", - "PROCESS_GROUP_INSTANCE-8EA3C7CC429BC058", - "PROCESS_GROUP_INSTANCE-3E9F8129DED1826A", - "PROCESS_GROUP_INSTANCE-48B3556CC2F4EC85", - "PROCESS_GROUP_INSTANCE-387D1C89E2A32B65", - "PROCESS_GROUP_INSTANCE-853875A0D767AC6A", - "PROCESS_GROUP_INSTANCE-B29164D385A8123C" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61", - "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" - ], - "isNetworkClientOfHost": [ - "HOST-B667FC231C8B8DD3" - ], - "runsOn": [ - "PROCESS_GROUP-F612B535AB74A08A", - "PROCESS_GROUP-0765BFB1555AEC64", - "PROCESS_GROUP-5D3213D3CEE71BFC", - "PROCESS_GROUP-7BE59F151DF1E05F", - "PROCESS_GROUP-1D024AE6C849FC39", - "PROCESS_GROUP-77FDC8C1A93F7189", - "PROCESS_GROUP-D55B147D56DC61BC", - "PROCESS_GROUP-28699B8D57AACB48", - "PROCESS_GROUP-8F167A901E185CA4", - "PROCESS_GROUP-57D66C1273F88C3D", - "PROCESS_GROUP-374EE57952100044", - "PROCESS_GROUP-82AC69032E524B5E", - "PROCESS_GROUP-B1C069EABEFCD191" - ] - }, - "osType": "LINUX", - "osArchitecture": "X86", - "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", - "hypervisorType": "KVM", - "ipAddresses": [ - "10.226.144.161" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 2, - "cloudType": "EC2", - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "agentVersion": { - "major": 1, - "minor": 185, - "revision": 137, - "timestamp": "20200212-183600", - "sourceRevision": "" - }, - "consumedHostUnits": 0.25, - "userLevel": "SUPERUSER", - "managementZones": [ - { - "id": "5557749485955049446", - "name": "AWS_EODS_SHARED" - } - ], - "awsSecurityGroup": [ - "eods-dev-aphrodite-eda", - "eods-dev-shared-eda" - ], - "awsInstanceType": "t3.medium", - "amiId": "ami-0b196892c728a382c", - "awsInstanceId": "i-0f1a3ac209daf5032", - "hostGroup": { - "meId": "HOST_GROUP-8692976EA85437E7", - "name": "DEV_AWS_SHARED" - } + "consumedHostUnits": 0.25 }, { "entityId": "HOST-421D60DB4A2EA929", - "displayName": "eda.ascend.dev.eods.uk.eeca", - "discoveredName": "eda.ascend.dev.eods.uk.eeca", - "firstSeenTimestamp": 1596185232748, - "lastSeenTimestamp": 1596567554260, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "OS", - "value": "Linux" - } - ], - "fromRelationships": { - "isNetworkClientOfHost": [ - "HOST-4399C32AF24910D5", - "HOST-E6234CDBD5DD63DF" - ] - }, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-C474F60F0A130E48", - "PROCESS_GROUP_INSTANCE-0CF25E0DFD16B8EA", - "PROCESS_GROUP_INSTANCE-7553D84034518B67", - "PROCESS_GROUP_INSTANCE-1C3F2EA649460DBB", - "PROCESS_GROUP_INSTANCE-8D4D262CB99D0E3A", - "PROCESS_GROUP_INSTANCE-974674A61CF2C895", - "PROCESS_GROUP_INSTANCE-2E8CCD9A15E3F55C", - "PROCESS_GROUP_INSTANCE-AEB6F70607F20436", - "PROCESS_GROUP_INSTANCE-5132D153699BC056", - "PROCESS_GROUP_INSTANCE-1D3151BD9706D607", - "PROCESS_GROUP_INSTANCE-D0BC8B6AC86E43AB", - "PROCESS_GROUP_INSTANCE-9CB9F19F7C0880B9", - "PROCESS_GROUP_INSTANCE-3016E7EE560C3DEF", - "PROCESS_GROUP_INSTANCE-2C4D0EFE814130C6" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61", - "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" - ], - "isNetworkClientOfHost": [ - "HOST-4399C32AF24910D5", - "HOST-E6234CDBD5DD63DF" - ], - "runsOn": [ - "PROCESS_GROUP-F612B535AB74A08A", - "PROCESS_GROUP-0765BFB1555AEC64", - "PROCESS_GROUP-5D3213D3CEE71BFC", - "PROCESS_GROUP-7BE59F151DF1E05F", - "PROCESS_GROUP-1D024AE6C849FC39", - "PROCESS_GROUP-D55B147D56DC61BC", - "PROCESS_GROUP-28699B8D57AACB48", - "PROCESS_GROUP-8F167A901E185CA4", - "PROCESS_GROUP-57D66C1273F88C3D", - "PROCESS_GROUP-374EE57952100044", - "PROCESS_GROUP-82AC69032E524B5E", - "PROCESS_GROUP-FAA604E684B7708E", - "PROCESS_GROUP-B1C069EABEFCD191" - ] - }, - "osType": "LINUX", - "osArchitecture": "X86", - "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", - "hypervisorType": "KVM", - "ipAddresses": [ - "10.226.144.176" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 2, - "cloudType": "EC2", - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "agentVersion": { - "major": 1, - "minor": 185, - "revision": 137, - "timestamp": "20200212-183600", - "sourceRevision": "" - }, - "consumedHostUnits": 0.25, - "userLevel": "SUPERUSER", - "managementZones": [ - { - "id": "5557749485955049446", - "name": "AWS_EODS_SHARED" - } - ], - "awsSecurityGroup": [ - "eods-dev-shared-eda", - "eods-dev-ascend-eda" - ], - "awsInstanceType": "t3.medium", - "amiId": "ami-0a41565f4b7a71076", - "awsInstanceId": "i-0227f5311abf531ce", - "hostGroup": { - "meId": "HOST_GROUP-8692976EA85437E7", - "name": "AWS_EODS_SHARED" - } - }, - { - "entityId": "HOST-413F4F394098A24D", - "displayName": "eda.brazilgds.dev.eods.uk.eeca", - "discoveredName": "eda.brazilgds.dev.eods.uk.eeca", - "firstSeenTimestamp": 1596190372236, - "lastSeenTimestamp": 1596567533507, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "OS", - "value": "Linux" - }, - { - "context": "CONTEXTLESS", - "key": "Alerting_Queue", - "value": "SUPPORT" - } - ], - "fromRelationships": { - "isNetworkClientOfHost": [ - "HOST-DEDC11439C4F6377" - ] - }, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-03A1FD721E7792EA", - "PROCESS_GROUP_INSTANCE-2DAEE2781F55FE38", - "PROCESS_GROUP_INSTANCE-47B3054A7FF34EAD", - "PROCESS_GROUP_INSTANCE-0665A89C4C1C88A3", - "PROCESS_GROUP_INSTANCE-8E6F09CEB32B055E", - "PROCESS_GROUP_INSTANCE-91B8F7693E08AEF7", - "PROCESS_GROUP_INSTANCE-C22D72ACEE844255", - "PROCESS_GROUP_INSTANCE-C756D9ED00A5052C", - "PROCESS_GROUP_INSTANCE-94645B441644C3F1", - "PROCESS_GROUP_INSTANCE-7671F7A23EE78003", - "PROCESS_GROUP_INSTANCE-9F9BDE7D76BE8BDD", - "PROCESS_GROUP_INSTANCE-9237C3DDF06510D7", - "PROCESS_GROUP_INSTANCE-1B20974DE5BF0869", - "PROCESS_GROUP_INSTANCE-AB2626A4FA4B4C76", - "PROCESS_GROUP_INSTANCE-5210FEB1632DCB32" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61", - "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" - ], - "isNetworkClientOfHost": [ - "HOST-DEDC11439C4F6377" - ], - "runsOn": [ - "PROCESS_GROUP-F612B535AB74A08A", - "PROCESS_GROUP-0765BFB1555AEC64", - "PROCESS_GROUP-5D3213D3CEE71BFC", - "PROCESS_GROUP-7BE59F151DF1E05F", - "PROCESS_GROUP-1D024AE6C849FC39", - "PROCESS_GROUP-77FDC8C1A93F7189", - "PROCESS_GROUP-D55B147D56DC61BC", - "PROCESS_GROUP-28699B8D57AACB48", - "PROCESS_GROUP-8F167A901E185CA4", - "PROCESS_GROUP-57D66C1273F88C3D", - "PROCESS_GROUP-374EE57952100044", - "PROCESS_GROUP-82AC69032E524B5E", - "PROCESS_GROUP-FAA604E684B7708E", - "PROCESS_GROUP-B1C069EABEFCD191" - ] - }, - "osType": "LINUX", - "osArchitecture": "X86", - "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", - "hypervisorType": "KVM", - "ipAddresses": [ - "10.226.144.190" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 2, - "cloudType": "EC2", - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "agentVersion": { - "major": 1, - "minor": 185, - "revision": 137, - "timestamp": "20200212-183600", - "sourceRevision": "" - }, - "consumedHostUnits": 0.25, - "userLevel": "SUPERUSER", - "managementZones": [ - { - "id": "5557749485955049446", - "name": "AWS_EODS_SHARED" - } - ], - "awsSecurityGroup": [ - "eods-dev-brazilgds-eda", - "eods-dev-shared-eda" - ], - "awsInstanceType": "t3.medium", - "amiId": "ami-0a41565f4b7a71076", - "awsInstanceId": "i-04050a5a2bfc20c77", - "hostGroup": { - "meId": "HOST_GROUP-8692976EA85437E7", - "name": "AWS_EODS_SHARED" - } - }, - { - "entityId": "HOST-E6234CDBD5DD63DF", - "displayName": "eda.exegol.dev.eods.uk.eeca", - "discoveredName": "eda.exegol.dev.eods.uk.eeca", - "firstSeenTimestamp": 1596524643794, - "lastSeenTimestamp": 1596567543866, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "OS", - "value": "Linux" - }, - { - "context": "CONTEXTLESS", - "key": "Alerting_Queue", - "value": "SUPPORT" - } - ], - "fromRelationships": { - "isNetworkClientOfHost": [ - "HOST-421D60DB4A2EA929" - ] - }, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-0B5B8CE18D930808", - "PROCESS_GROUP_INSTANCE-29730A2C266EC4CC", - "PROCESS_GROUP_INSTANCE-56F828839C899057", - "PROCESS_GROUP_INSTANCE-9AD062F556E05603", - "PROCESS_GROUP_INSTANCE-EFC07978167CBF23", - "PROCESS_GROUP_INSTANCE-8AB2E19A8A103FAA", - "PROCESS_GROUP_INSTANCE-3887DD9FE3FB4A4F", - "PROCESS_GROUP_INSTANCE-F50CFD53F6680AA0", - "PROCESS_GROUP_INSTANCE-604ADA0F95E0C4BE", - "PROCESS_GROUP_INSTANCE-9865AA2CFE0674F5", - "PROCESS_GROUP_INSTANCE-2C03688E54CBDC5B", - "PROCESS_GROUP_INSTANCE-337858A683010263", - "PROCESS_GROUP_INSTANCE-7635A326839A45D4", - "PROCESS_GROUP_INSTANCE-D16DF440ABA24191" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61", - "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" - ], - "isNetworkClientOfHost": [ - "HOST-421D60DB4A2EA929" - ], - "runsOn": [ - "PROCESS_GROUP-F612B535AB74A08A", - "PROCESS_GROUP-0765BFB1555AEC64", - "PROCESS_GROUP-5D3213D3CEE71BFC", - "PROCESS_GROUP-7BE59F151DF1E05F", - "PROCESS_GROUP-1D024AE6C849FC39", - "PROCESS_GROUP-D55B147D56DC61BC", - "PROCESS_GROUP-28699B8D57AACB48", - "PROCESS_GROUP-8F167A901E185CA4", - "PROCESS_GROUP-57D66C1273F88C3D", - "PROCESS_GROUP-374EE57952100044", - "PROCESS_GROUP-82AC69032E524B5E", - "PROCESS_GROUP-FAA604E684B7708E", - "PROCESS_GROUP-B1C069EABEFCD191" - ] - }, - "osType": "LINUX", - "osArchitecture": "X86", - "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", - "hypervisorType": "KVM", - "ipAddresses": [ - "10.226.144.177" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 2, - "cloudType": "EC2", - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "agentVersion": { - "major": 1, - "minor": 185, - "revision": 137, - "timestamp": "20200212-183600", - "sourceRevision": "" - }, - "consumedHostUnits": 0.5, - "userLevel": "SUPERUSER", - "managementZones": [ - { - "id": "5557749485955049446", - "name": "AWS_EODS_SHARED" - } - ], - "awsSecurityGroup": [ - "eods-dev-shared-eda", - "eods-dev-exegol-eda" - ], - "awsInstanceType": "t3.large", - "amiId": "ami-00446e862ac45b149", - "awsInstanceId": "i-0c9b5315f41124dbd", - "hostGroup": { - "meId": "HOST_GROUP-8692976EA85437E7", - "name": "AWS_EODS_SHARED" - } - }, - { - "entityId": "HOST-DEDC11439C4F6377", - "displayName": "eda.exegol.dev.eods.uk.eeca", - "discoveredName": "eda.exegol.dev.eods.uk.eeca", - "firstSeenTimestamp": 1596242536965, - "lastSeenTimestamp": 1596523700852, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "OS", - "value": "Linux" - }, - { - "context": "CONTEXTLESS", - "key": "Alerting_Queue", - "value": "SUPPORT" - } - ], - "fromRelationships": { - "isNetworkClientOfHost": [ - "HOST-413F4F394098A24D" - ] - }, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-0B87053ECA9302CB", - "PROCESS_GROUP_INSTANCE-E992A9D8E2304139", - "PROCESS_GROUP_INSTANCE-70714018883520DF", - "PROCESS_GROUP_INSTANCE-5133C5933F2890B3", - "PROCESS_GROUP_INSTANCE-ADCF32A8AA21D404", - "PROCESS_GROUP_INSTANCE-58B58797DC72C416", - "PROCESS_GROUP_INSTANCE-CDF3A0CBBFFA0A08", - "PROCESS_GROUP_INSTANCE-00788007AA694AE7", - "PROCESS_GROUP_INSTANCE-B24DBC02C3823F02", - "PROCESS_GROUP_INSTANCE-7D8F35669570D4E8", - "PROCESS_GROUP_INSTANCE-58930C0AA7201822", - "PROCESS_GROUP_INSTANCE-6D954DBD8B599781", - "PROCESS_GROUP_INSTANCE-B29453B6005AB7E5", - "PROCESS_GROUP_INSTANCE-30904F76A54966F0", - "PROCESS_GROUP_INSTANCE-439E2D9CE96C770C", - "PROCESS_GROUP_INSTANCE-118C57B46FFCC464" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61", - "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" - ], - "isNetworkClientOfHost": [ - "HOST-413F4F394098A24D" - ], - "runsOn": [ - "PROCESS_GROUP-F612B535AB74A08A", - "PROCESS_GROUP-0765BFB1555AEC64", - "PROCESS_GROUP-5D3213D3CEE71BFC", - "PROCESS_GROUP-7BE59F151DF1E05F", - "PROCESS_GROUP-1D024AE6C849FC39", - "PROCESS_GROUP-77FDC8C1A93F7189", - "PROCESS_GROUP-D55B147D56DC61BC", - "PROCESS_GROUP-28699B8D57AACB48", - "PROCESS_GROUP-B62F90EC2186CCF3", - "PROCESS_GROUP-8F167A901E185CA4", - "PROCESS_GROUP-57D66C1273F88C3D", - "PROCESS_GROUP-374EE57952100044", - "PROCESS_GROUP-82AC69032E524B5E", - "PROCESS_GROUP-B1C069EABEFCD191", - "PROCESS_GROUP-FAA604E684B7708E" - ] - }, - "osType": "LINUX", - "osArchitecture": "X86", - "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", - "hypervisorType": "KVM", - "ipAddresses": [ - "10.226.144.191" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 2, - "cloudType": "EC2", - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "consumedHostUnits": 0.5, - "managementZones": [ - { - "id": "5557749485955049446", - "name": "AWS_EODS_SHARED" - } - ], - "awsSecurityGroup": [ - "eods-dev-shared-eda", - "eods-dev-exegol-eda" - ], - "awsInstanceType": "t3.large", - "amiId": "ami-03f1877fb9d2f6094", - "awsInstanceId": "i-0749617d1ef4342bc", - "hostGroup": { - "meId": "HOST_GROUP-8692976EA85437E7", - "name": "AWS_EODS_SHARED" - } - }, - { - "entityId": "HOST-B667FC231C8B8DD3", - "displayName": "eda.gaia.dev.eods.uk.eeca", - "discoveredName": "eda.gaia.dev.eods.uk.eeca", - "firstSeenTimestamp": 1595843874728, - "lastSeenTimestamp": 1596439377370, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "OS", - "value": "Linux" - }, - { - "context": "CONTEXTLESS", - "key": "Alerting_Queue", - "value": "SUPPORT" - } - ], - "fromRelationships": { - "isNetworkClientOfHost": [ - "HOST-5B9CE4E4E14185FA" - ] - }, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-7364F89D49309AE0", - "PROCESS_GROUP_INSTANCE-68C36D672AADA443", - "PROCESS_GROUP_INSTANCE-D1E45549D386DC67", - "PROCESS_GROUP_INSTANCE-A5484DAB3F3EE4AC", - "PROCESS_GROUP_INSTANCE-40E7F7CF6A6919F1", - "PROCESS_GROUP_INSTANCE-DAF651624346D1A6", - "PROCESS_GROUP_INSTANCE-40E382F35D47986B", - "PROCESS_GROUP_INSTANCE-F13DF7666B32BF1A", - "PROCESS_GROUP_INSTANCE-37F8B8E865F00148", - "PROCESS_GROUP_INSTANCE-2D3E7F97AF294DBA", - "PROCESS_GROUP_INSTANCE-300E6AF75CB62AB2" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61", - "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" - ], - "isNetworkClientOfHost": [ - "HOST-5B9CE4E4E14185FA" - ], - "runsOn": [ - "PROCESS_GROUP-F612B535AB74A08A", - "PROCESS_GROUP-0765BFB1555AEC64", - "PROCESS_GROUP-5D3213D3CEE71BFC", - "PROCESS_GROUP-7BE59F151DF1E05F", - "PROCESS_GROUP-1D024AE6C849FC39", - "PROCESS_GROUP-77FDC8C1A93F7189", - "PROCESS_GROUP-B38E65573551425E", - "PROCESS_GROUP-28699B8D57AACB48", - "PROCESS_GROUP-57D66C1273F88C3D", - "PROCESS_GROUP-374EE57952100044", - "PROCESS_GROUP-82AC69032E524B5E", - "PROCESS_GROUP-B1C069EABEFCD191" - ] - }, - "osType": "LINUX", - "osArchitecture": "X86", - "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", - "hypervisorType": "KVM", - "ipAddresses": [ - "10.226.144.160" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 2, - "cloudType": "EC2", - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "consumedHostUnits": 0.25, - "managementZones": [ - { - "id": "5557749485955049446", - "name": "AWS_EODS_SHARED" - } - ], - "awsSecurityGroup": [ - "eods-dev-gaia-eda", - "eods-dev-shared-eda" - ], - "awsInstanceType": "t3.medium", - "amiId": "ami-02760d44609a734b9", - "awsInstanceId": "i-09ce4aa19402e2fca", - "hostGroup": { - "meId": "HOST_GROUP-8692976EA85437E7", - "name": "AWS_EODS_SHARED" - } - }, - { - "entityId": "HOST-7A728806A05336AC", - "displayName": "eda.gaia.dev.eods.uk.eeca", - "discoveredName": "eda.gaia.dev.eods.uk.eeca", - "firstSeenTimestamp": 1596440075964, - "lastSeenTimestamp": 1596567334915, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "OS", - "value": "Linux" - }, - { - "context": "CONTEXTLESS", - "key": "Alerting_Queue", - "value": "SUPPORT" - } - ], - "fromRelationships": { - "isNetworkClientOfHost": [ - "HOST-6D6EAA89DCBA960A" - ] - }, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-16E32547FF9E6AD9", - "PROCESS_GROUP_INSTANCE-695D398E83E65FD3", - "PROCESS_GROUP_INSTANCE-FC1B1ED2E06E91CD", - "PROCESS_GROUP_INSTANCE-666F8EDCBDFEA602", - "PROCESS_GROUP_INSTANCE-86A66804E2E55934", - "PROCESS_GROUP_INSTANCE-AF299C7BF68F5710", - "PROCESS_GROUP_INSTANCE-4D3C309DDE2C14E2", - "PROCESS_GROUP_INSTANCE-B522CEF153E091BF", - "PROCESS_GROUP_INSTANCE-76FF566A1900C5C4", - "PROCESS_GROUP_INSTANCE-0957891719F167E5", - "PROCESS_GROUP_INSTANCE-0DABC17A11907B18", - "PROCESS_GROUP_INSTANCE-4B13F843F126982C", - "PROCESS_GROUP_INSTANCE-A4D6194296751F3C" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61", - "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" - ], - "isNetworkClientOfHost": [ - "HOST-6D6EAA89DCBA960A" - ], - "runsOn": [ - "PROCESS_GROUP-F612B535AB74A08A", - "PROCESS_GROUP-0765BFB1555AEC64", - "PROCESS_GROUP-5D3213D3CEE71BFC", - "PROCESS_GROUP-7BE59F151DF1E05F", - "PROCESS_GROUP-1D024AE6C849FC39", - "PROCESS_GROUP-B38E65573551425E", - "PROCESS_GROUP-D55B147D56DC61BC", - "PROCESS_GROUP-28699B8D57AACB48", - "PROCESS_GROUP-8F167A901E185CA4", - "PROCESS_GROUP-57D66C1273F88C3D", - "PROCESS_GROUP-374EE57952100044", - "PROCESS_GROUP-82AC69032E524B5E", - "PROCESS_GROUP-B1C069EABEFCD191" - ] - }, - "osType": "LINUX", - "osArchitecture": "X86", - "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", - "hypervisorType": "KVM", - "ipAddresses": [ - "10.226.144.252" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 2, - "cloudType": "EC2", - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "agentVersion": { - "major": 1, - "minor": 185, - "revision": 137, - "timestamp": "20200212-183600", - "sourceRevision": "" - }, - "consumedHostUnits": 0.25, - "userLevel": "SUPERUSER", - "managementZones": [ - { - "id": "5557749485955049446", - "name": "AWS_EODS_SHARED" - } - ], - "awsSecurityGroup": [ - "eods-dev-gaia-eda", - "eods-dev-shared-eda" - ], - "awsInstanceType": "t3.medium", - "amiId": "ami-03f1877fb9d2f6094", - "awsInstanceId": "i-0955c2c29e3157ccf", - "hostGroup": { - "meId": "HOST_GROUP-8692976EA85437E7", - "name": "AWS_EODS_SHARED" - } - }, - { - "entityId": "HOST-EDE2A188CB4C77B1", - "displayName": "eda.hestia.dev.eods.uk.eeca", - "discoveredName": "eda.hestia.dev.eods.uk.eeca", - "firstSeenTimestamp": 1595425521418, - "lastSeenTimestamp": 1596567386740, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "OS", - "value": "Linux" - }, - { - "context": "CONTEXTLESS", - "key": "Alerting_Queue", - "value": "SUPPORT" - } - ], - "fromRelationships": { - "isNetworkClientOfHost": [ - "HOST-752E1FB58ABB01A6" - ] - }, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-334630CCFD6A5E21", - "PROCESS_GROUP_INSTANCE-250CE79C1D6F4E7B", - "PROCESS_GROUP_INSTANCE-81D8D4FC4769FFA7", - "PROCESS_GROUP_INSTANCE-6B8B375C8B71D0D0", - "PROCESS_GROUP_INSTANCE-873DAF8BE2257760", - "PROCESS_GROUP_INSTANCE-C0D9A551713FE025", - "PROCESS_GROUP_INSTANCE-96B5396E1B0D626F", - "PROCESS_GROUP_INSTANCE-FECD1000E8F91ECE", - "PROCESS_GROUP_INSTANCE-8429E0656F083B99", - "PROCESS_GROUP_INSTANCE-FD651A51F83C50B9", - "PROCESS_GROUP_INSTANCE-81730CC994812BC4", - "PROCESS_GROUP_INSTANCE-663A8FCC78ED3826" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61", - "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" - ], - "runsOn": [ - "PROCESS_GROUP-F612B535AB74A08A", - "PROCESS_GROUP-0765BFB1555AEC64", - "PROCESS_GROUP-5D3213D3CEE71BFC", - "PROCESS_GROUP-7BE59F151DF1E05F", - "PROCESS_GROUP-1115D1BB3693C8B8", - "PROCESS_GROUP-1D024AE6C849FC39", - "PROCESS_GROUP-7184C606962EE406", - "PROCESS_GROUP-77FDC8C1A93F7189", - "PROCESS_GROUP-28699B8D57AACB48", - "PROCESS_GROUP-57D66C1273F88C3D", - "PROCESS_GROUP-374EE57952100044", - "PROCESS_GROUP-82AC69032E524B5E", - "PROCESS_GROUP-B1C069EABEFCD191" - ] - }, - "osType": "LINUX", - "osArchitecture": "X86", - "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", - "hypervisorType": "KVM", - "ipAddresses": [ - "10.226.144.140" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 2, - "cloudType": "EC2", - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "agentVersion": { - "major": 1, - "minor": 185, - "revision": 137, - "timestamp": "20200212-183600", - "sourceRevision": "" - }, - "consumedHostUnits": 0.25, - "userLevel": "SUPERUSER", - "managementZones": [ - { - "id": "5557749485955049446", - "name": "AWS_EODS_SHARED" - } - ], - "awsSecurityGroup": [ - "eods-dev-hestia-eda", - "eods-dev-shared-eda" - ], - "awsInstanceType": "t3.medium", - "amiId": "ami-048e11ed1aa982d34", - "awsInstanceId": "i-0c527af83fb5e5964", - "hostGroup": { - "meId": "HOST_GROUP-8692976EA85437E7", - "name": "AWS_EODS_SHARED" - } - }, - { - "entityId": "HOST-350D8009776A920D", - "displayName": "eda.indiagds.dev.eods.uk.eeca", - "discoveredName": "eda.indiagds.dev.eods.uk.eeca", - "firstSeenTimestamp": 1593783034963, - "lastSeenTimestamp": 1596567529326, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "OS", - "value": "Linux" - }, - { - "context": "CONTEXTLESS", - "key": "Alerting_Queue", - "value": "SUPPORT" - } - ], - "fromRelationships": {}, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-9A5CE817E2B4D9FE", - "PROCESS_GROUP_INSTANCE-C2F55AA979C65E18", - "PROCESS_GROUP_INSTANCE-B36416DD3757356C", - "PROCESS_GROUP_INSTANCE-E056947421B6F3B1", - "PROCESS_GROUP_INSTANCE-4BC7AB263964692E", - "PROCESS_GROUP_INSTANCE-024338920915B043", - "PROCESS_GROUP_INSTANCE-EBA9114D414CBB9D", - "PROCESS_GROUP_INSTANCE-DAB4E357847D9514", - "PROCESS_GROUP_INSTANCE-243D27A71401EA5F", - "PROCESS_GROUP_INSTANCE-84DEAA5E46984F15", - "PROCESS_GROUP_INSTANCE-2622318154DFFB72", - "PROCESS_GROUP_INSTANCE-599C2D4828A7CE78", - "PROCESS_GROUP_INSTANCE-E042FBEFB962ED19", - "PROCESS_GROUP_INSTANCE-FA5DC6FE84D9351E", - "PROCESS_GROUP_INSTANCE-E3C89AF1198FA66D" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61", - "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" - ], - "runsOn": [ - "PROCESS_GROUP-F612B535AB74A08A", - "PROCESS_GROUP-0765BFB1555AEC64", - "PROCESS_GROUP-5D3213D3CEE71BFC", - "PROCESS_GROUP-7BE59F151DF1E05F", - "PROCESS_GROUP-1D024AE6C849FC39", - "PROCESS_GROUP-77FDC8C1A93F7189", - "PROCESS_GROUP-D55B147D56DC61BC", - "PROCESS_GROUP-28699B8D57AACB48", - "PROCESS_GROUP-8F167A901E185CA4", - "PROCESS_GROUP-57D66C1273F88C3D", - "PROCESS_GROUP-374EE57952100044", - "PROCESS_GROUP-82AC69032E524B5E", - "PROCESS_GROUP-FAA604E684B7708E", - "PROCESS_GROUP-B1C069EABEFCD191" - ] - }, - "osType": "LINUX", - "osArchitecture": "X86", - "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", - "hypervisorType": "KVM", - "ipAddresses": [ - "10.226.144.136" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 2, - "cloudType": "EC2", - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "agentVersion": { - "major": 1, - "minor": 185, - "revision": 137, - "timestamp": "20200212-183600", - "sourceRevision": "" - }, - "consumedHostUnits": 0.5, - "userLevel": "SUPERUSER", - "managementZones": [ - { - "id": "5557749485955049446", - "name": "AWS_EODS_SHARED" - } - ], - "awsSecurityGroup": [ - "eods-dev-shared-eda", - "eods-dev-indiagds-eda" - ], - "awsInstanceType": "t3.large", - "amiId": "ami-0ce88eb2646dd6eac", - "awsInstanceId": "i-0069235aa24092781", - "hostGroup": { - "meId": "HOST_GROUP-8692976EA85437E7", - "name": "AWS_EODS_SHARED" - } - }, - { - "entityId": "HOST-6D6EAA89DCBA960A", - "displayName": "eda.jeds.dev.eods.uk.eeca", - "discoveredName": "eda.jeds.dev.eods.uk.eeca", - "firstSeenTimestamp": 1596098871283, - "lastSeenTimestamp": 1596567349283, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "OS", - "value": "Linux" - }, - { - "context": "CONTEXTLESS", - "key": "Alerting_Queue", - "value": "SUPPORT" - } - ], - "fromRelationships": { - "isNetworkClientOfHost": [ - "HOST-7A728806A05336AC" - ] - }, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-B3CA3BCDEA9CBF9A", - "PROCESS_GROUP_INSTANCE-BB5992E1761F5789", - "PROCESS_GROUP_INSTANCE-3036CA29518BDD50", - "PROCESS_GROUP_INSTANCE-14F58D2B2E92D7C9", - "PROCESS_GROUP_INSTANCE-77F967D5FCA1F315", - "PROCESS_GROUP_INSTANCE-24E4D4127FFD8646", - "PROCESS_GROUP_INSTANCE-155BEFD4FB7C9C59", - "PROCESS_GROUP_INSTANCE-5A201212A2C5B444", - "PROCESS_GROUP_INSTANCE-A23EEC7E2F093119", - "PROCESS_GROUP_INSTANCE-B835BEF48A66F7B6", - "PROCESS_GROUP_INSTANCE-EB073C5D9C87316B", - "PROCESS_GROUP_INSTANCE-041C0B46FE2D4AAC", - "PROCESS_GROUP_INSTANCE-7E411B01FF0FFF75", - "PROCESS_GROUP_INSTANCE-01FF07C88377CA7F" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61", - "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" - ], - "isNetworkClientOfHost": [ - "HOST-7A728806A05336AC" - ], - "runsOn": [ - "PROCESS_GROUP-F612B535AB74A08A", - "PROCESS_GROUP-0765BFB1555AEC64", - "PROCESS_GROUP-5D3213D3CEE71BFC", - "PROCESS_GROUP-7BE59F151DF1E05F", - "PROCESS_GROUP-1D024AE6C849FC39", - "PROCESS_GROUP-77FDC8C1A93F7189", - "PROCESS_GROUP-B38E65573551425E", - "PROCESS_GROUP-D55B147D56DC61BC", - "PROCESS_GROUP-28699B8D57AACB48", - "PROCESS_GROUP-8F167A901E185CA4", - "PROCESS_GROUP-57D66C1273F88C3D", - "PROCESS_GROUP-374EE57952100044", - "PROCESS_GROUP-82AC69032E524B5E", - "PROCESS_GROUP-B1C069EABEFCD191" - ] - }, - "osType": "LINUX", - "osArchitecture": "X86", - "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", - "hypervisorType": "KVM", - "ipAddresses": [ - "10.226.144.250" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 2, - "cloudType": "EC2", - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "agentVersion": { - "major": 1, - "minor": 185, - "revision": 137, - "timestamp": "20200212-183600", - "sourceRevision": "" - }, - "consumedHostUnits": 0.25, - "userLevel": "SUPERUSER", - "managementZones": [ - { - "id": "5557749485955049446", - "name": "AWS_EODS_SHARED" - } - ], - "awsSecurityGroup": [ - "eods-dev-jeds-eda", - "eods-dev-shared-eda" - ], - "awsInstanceType": "t3.medium", - "amiId": "ami-02f52db0f39b16e7d", - "awsInstanceId": "i-0a534fbb876e94607", - "hostGroup": { - "meId": "HOST_GROUP-8692976EA85437E7", - "name": "AWS_EODS_SHARED" - } - }, - { - "entityId": "HOST-405742DD38D0564F", - "displayName": "eda.morpheus.dev.eods.uk.eeca", - "discoveredName": "eda.morpheus.dev.eods.uk.eeca", - "firstSeenTimestamp": 1596005374982, - "lastSeenTimestamp": 1596567516902, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "OS", - "value": "Linux" - }, - { - "context": "CONTEXTLESS", - "key": "Alerting_Queue", - "value": "SUPPORT" - } - ], - "fromRelationships": { - "isNetworkClientOfHost": [ - "HOST-4399C32AF24910D5" - ] - }, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-9EF3D3990EF67FDF", - "PROCESS_GROUP_INSTANCE-1A51DEAECD6EBA9A", - "PROCESS_GROUP_INSTANCE-EB1BBCAD2F72C245", - "PROCESS_GROUP_INSTANCE-12FB702F43DAC1BF", - "PROCESS_GROUP_INSTANCE-7719FA4646AF7401", - "PROCESS_GROUP_INSTANCE-8F07042ACB63F15C", - "PROCESS_GROUP_INSTANCE-2140323046FA9179", - "PROCESS_GROUP_INSTANCE-950C56A06E0C37F3", - "PROCESS_GROUP_INSTANCE-8B29FEDB21622A61", - "PROCESS_GROUP_INSTANCE-C63ED40978EDF12E", - "PROCESS_GROUP_INSTANCE-3B3A6D4BBA718854", - "PROCESS_GROUP_INSTANCE-F60BA59802FE681B", - "PROCESS_GROUP_INSTANCE-5378F3551B653F30", - "PROCESS_GROUP_INSTANCE-2CC6EF9C671D0A3A" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61", - "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" - ], - "isNetworkClientOfHost": [ - "HOST-4399C32AF24910D5" - ], - "runsOn": [ - "PROCESS_GROUP-F612B535AB74A08A", - "PROCESS_GROUP-0765BFB1555AEC64", - "PROCESS_GROUP-5D3213D3CEE71BFC", - "PROCESS_GROUP-7BE59F151DF1E05F", - "PROCESS_GROUP-1D024AE6C849FC39", - "PROCESS_GROUP-B38E65573551425E", - "PROCESS_GROUP-D55B147D56DC61BC", - "PROCESS_GROUP-28699B8D57AACB48", - "PROCESS_GROUP-8F167A901E185CA4", - "PROCESS_GROUP-57D66C1273F88C3D", - "PROCESS_GROUP-374EE57952100044", - "PROCESS_GROUP-82AC69032E524B5E", - "PROCESS_GROUP-FAA604E684B7708E", - "PROCESS_GROUP-B1C069EABEFCD191" - ] - }, - "osType": "LINUX", - "osArchitecture": "X86", - "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", - "hypervisorType": "KVM", - "ipAddresses": [ - "10.226.144.174" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 2, - "cloudType": "EC2", - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "agentVersion": { - "major": 1, - "minor": 185, - "revision": 137, - "timestamp": "20200212-183600", - "sourceRevision": "" - }, - "consumedHostUnits": 0.25, - "userLevel": "SUPERUSER", - "managementZones": [ - { - "id": "5557749485955049446", - "name": "AWS_EODS_SHARED" - } - ], - "awsSecurityGroup": [ - "eods-dev-morpheus-eda", - "eods-dev-shared-eda" - ], - "awsInstanceType": "t3.medium", - "amiId": "ami-02f52db0f39b16e7d", - "awsInstanceId": "i-0122160aacbb13ba3", - "hostGroup": { - "meId": "HOST_GROUP-8692976EA85437E7", - "name": "AWS_EODS_SHARED" - } - }, - { - "entityId": "HOST-4399C32AF24910D5", - "displayName": "eda.sofiaqa.dev.eods.uk.eeca", - "discoveredName": "eda.sofiaqa.dev.eods.uk.eeca", - "firstSeenTimestamp": 1595425419743, - "lastSeenTimestamp": 1596567525176, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "OS", - "value": "Linux" - }, - { - "context": "CONTEXTLESS", - "key": "Alerting_Queue", - "value": "SUPPORT" - } - ], - "fromRelationships": { - "isNetworkClientOfHost": [ - "HOST-421D60DB4A2EA929", - "HOST-405742DD38D0564F" - ] - }, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-E71904BAC6A65EA5", - "PROCESS_GROUP_INSTANCE-2F086E6BAD844CA0", - "PROCESS_GROUP_INSTANCE-A99B934F3FA051F7", - "PROCESS_GROUP_INSTANCE-67B034846C4CEA75", - "PROCESS_GROUP_INSTANCE-C5F055FEB274B7B4", - "PROCESS_GROUP_INSTANCE-50B672A2D1FC79AA", - "PROCESS_GROUP_INSTANCE-F4292764D018FC1B", - "PROCESS_GROUP_INSTANCE-33600EA8F97CB445", - "PROCESS_GROUP_INSTANCE-9D3D526EC46F3945", - "PROCESS_GROUP_INSTANCE-7315D06DB535C9B3", - "PROCESS_GROUP_INSTANCE-3C170C0FB469B00B" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61", - "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" - ], - "isNetworkClientOfHost": [ - "HOST-421D60DB4A2EA929", - "HOST-405742DD38D0564F" - ], - "runsOn": [ - "PROCESS_GROUP-F612B535AB74A08A", - "PROCESS_GROUP-0765BFB1555AEC64", - "PROCESS_GROUP-5D3213D3CEE71BFC", - "PROCESS_GROUP-7BE59F151DF1E05F", - "PROCESS_GROUP-1D024AE6C849FC39", - "PROCESS_GROUP-77FDC8C1A93F7189", - "PROCESS_GROUP-28699B8D57AACB48", - "PROCESS_GROUP-57D66C1273F88C3D", - "PROCESS_GROUP-374EE57952100044", - "PROCESS_GROUP-82AC69032E524B5E", - "PROCESS_GROUP-B1C069EABEFCD191" - ] - }, - "osType": "LINUX", - "osArchitecture": "X86", - "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", - "hypervisorType": "KVM", - "ipAddresses": [ - "10.226.144.175" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 2, - "cloudType": "EC2", - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "agentVersion": { - "major": 1, - "minor": 185, - "revision": 137, - "timestamp": "20200212-183600", - "sourceRevision": "" - }, - "consumedHostUnits": 0.25, - "userLevel": "SUPERUSER", - "managementZones": [ - { - "id": "5557749485955049446", - "name": "AWS_EODS_SHARED" - } - ], - "awsSecurityGroup": [ - "eods-dev-shared-eda", - "eods-dev-sofiaqa-eda" - ], - "awsInstanceType": "t3.medium", - "amiId": "ami-048e11ed1aa982d34", - "awsInstanceId": "i-01158fb516cc1882b", - "hostGroup": { - "meId": "HOST_GROUP-8692976EA85437E7", - "name": "AWS_EODS_SHARED" - } - }, - { - "entityId": "HOST-752E1FB58ABB01A6", - "displayName": "eda.testkibana.dev.eods.uk.eeca", - "discoveredName": "eda.testkibana.dev.eods.uk.eeca", - "firstSeenTimestamp": 1592896258226, - "lastSeenTimestamp": 1596567568688, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "OS", - "value": "Linux" - }, - { - "context": "CONTEXTLESS", - "key": "Alerting_Queue", - "value": "SUPPORT" - } - ], - "fromRelationships": {}, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-58151B6C30C89632", - "PROCESS_GROUP_INSTANCE-4260A72EF4C423E8", - "PROCESS_GROUP_INSTANCE-BA7E59427908A6B5", - "PROCESS_GROUP_INSTANCE-A0750BC8DC67601A", - "PROCESS_GROUP_INSTANCE-7C1E2A24C93009CA", - "PROCESS_GROUP_INSTANCE-F3478961CA86A6C7", - "PROCESS_GROUP_INSTANCE-94CEA4B98C81917F", - "PROCESS_GROUP_INSTANCE-C8450A437F254F0F", - "PROCESS_GROUP_INSTANCE-CC5BAC0D0013D2CA", - "PROCESS_GROUP_INSTANCE-19BFB2F4D5765DD3", - "PROCESS_GROUP_INSTANCE-5827CBC26581E935", - "PROCESS_GROUP_INSTANCE-AB8A8EF1BC9D2836", - "PROCESS_GROUP_INSTANCE-A105402BE0932764", - "PROCESS_GROUP_INSTANCE-9C58C0F66EFB3E6C", - "PROCESS_GROUP_INSTANCE-B677ABDA0185EBF3", - "PROCESS_GROUP_INSTANCE-D42BEE0DAAF2A5AE", - "PROCESS_GROUP_INSTANCE-32392E4FC97880EF", - "PROCESS_GROUP_INSTANCE-7EABFD171B7DB61D" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61", - "AWS_AVAILABILITY_ZONE-4FA4A4BB20C71342" - ], - "isNetworkClientOfHost": [ - "HOST-EDE2A188CB4C77B1" - ], - "runsOn": [ - "PROCESS_GROUP-F612B535AB74A08A", - "PROCESS_GROUP-0765BFB1555AEC64", - "PROCESS_GROUP-5D3213D3CEE71BFC", - "PROCESS_GROUP-7BE59F151DF1E05F", - "PROCESS_GROUP-5C7D243AE1CD33CA", - "PROCESS_GROUP-79AF9C7B3182E51B", - "PROCESS_GROUP-1115D1BB3693C8B8", - "PROCESS_GROUP-1D024AE6C849FC39", - "PROCESS_GROUP-28699B8D57AACB48", - "PROCESS_GROUP-D55B147D56DC61BC", - "PROCESS_GROUP-8F167A901E185CA4", - "PROCESS_GROUP-57D66C1273F88C3D", - "PROCESS_GROUP-374EE57952100044", - "PROCESS_GROUP-82AC69032E524B5E", - "PROCESS_GROUP-A105F1B82049A408", - "PROCESS_GROUP-3C74CEE5747023B4", - "PROCESS_GROUP-E53DDEE9533F67FF" - ] - }, - "osType": "LINUX", - "osArchitecture": "X86", - "osVersion": "Red Hat Enterprise Linux Server 7.6 (Maipo) (kernel 3.10.0-1062.12.1.el7.x86_64)", - "hypervisorType": "KVM", - "ipAddresses": [ - "10.226.144.147" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 2, - "cloudType": "EC2", - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "agentVersion": { - "major": 1, - "minor": 185, - "revision": 137, - "timestamp": "20200212-183600", - "sourceRevision": "" - }, - "consumedHostUnits": 0.5, - "userLevel": "SUPERUSER", - "managementZones": [ - { - "id": "5557749485955049446", - "name": "AWS_EODS_SHARED" - } - ], - "awsSecurityGroup": [ - "eods-dev-shared-eda", - "eods-dev-testkibana-eda" - ], - "awsInstanceType": "m5.large", - "amiId": "ami-0e1f7043cd799a9fc", - "awsInstanceId": "i-0588f65cba4550089", - "hostGroup": { - "meId": "HOST_GROUP-8692976EA85437E7", - "name": "AWS_EODS_SHARED" - } + "consumedHostUnits": 3.5 } ] \ No newline at end of file diff --git a/tests/mockserver_payloads/responses/hosts/get_single.json b/tests/mockserver_payloads/responses/hosts/get_single.json index dcda31d..b27e2e7 100644 --- a/tests/mockserver_payloads/responses/hosts/get_single.json +++ b/tests/mockserver_payloads/responses/hosts/get_single.json @@ -1,70 +1,4 @@ { - "entityId": "HOST-9F74450267BAAE20", - "displayName": "host1123.radu.local", - "discoveredName": "host1123.radu.local", - "firstSeenTimestamp": 1594365724435, - "lastSeenTimestamp": 1596559448485, - "tags": [ - { - "context": "CONTEXTLESS", - "key": "Application", - "value": "Super_App" - }, - { - "context": "CONTEXTLESS", - "key": "Environment", - "value": "DEV" - } - ], - "fromRelationships": {}, - "toRelationships": { - "isProcessOf": [ - "PROCESS_GROUP_INSTANCE-7BDD5DC06168C858", - "PROCESS_GROUP_INSTANCE-726C522DDE0D2524", - "PROCESS_GROUP_INSTANCE-1238166A9681701C", - "PROCESS_GROUP_INSTANCE-829A8915392BFF28", - "PROCESS_GROUP_INSTANCE-6277FD97691C3FCB", - "PROCESS_GROUP_INSTANCE-0A37526FC2730958", - "PROCESS_GROUP_INSTANCE-F86303D27A8E830D", - "PROCESS_GROUP_INSTANCE-7C78AA98F4803D16", - "PROCESS_GROUP_INSTANCE-F9B6343E59930663" - ], - "isSiteOf": [ - "GEOLOC_SITE-7580E62C914CAE61" - ], - "runsOn": [ - "PROCESS_GROUP-4DE8442CCEAD2251", - "PROCESS_GROUP-6A2955530FA29616", - "PROCESS_GROUP-893E1F101431ADD1", - "PROCESS_GROUP-ACF6C23C63075E6F", - "PROCESS_GROUP-A93DF7C68AF50E3B", - "PROCESS_GROUP-719A10C7F5761D1B", - "PROCESS_GROUP-296B3416E647AAEA", - "PROCESS_GROUP-2F0439DE58E8F50B", - "PROCESS_GROUP-087AEE9E5099E4D2" - ] - }, - "osType": "WINDOWS", - "osArchitecture": "X86", - "osVersion": "Windows Server 2012 Standard, ver. 6.2.9200", - "hypervisorType": "VMWARE", - "ipAddresses": [ - "10.188.203.46" - ], - "bitness": "64bit", - "cpuCores": 1, - "logicalCpuCores": 1, - "monitoringMode": "FULL_STACK", - "networkZoneId": "default", - "consumedHostUnits": 0.25, - "managementZones": [ - { - "id": "4173205628047632907", - "name": "MANAGE_ZONE_DEV" - } - ], - "hostGroup": { - "meId": "HOST_GROUP-FA0C3B0A6B7F4D78", - "name": "DEV_SUPERAPP_ZONE-A" - } + "entityId": "HOST-ABC123DEF456GHIJ", + "consumedHostUnits": 0.25 } \ No newline at end of file diff --git a/tests/test_topology_hosts.py b/tests/test_topology_hosts.py index 540f108..5aef7c8 100644 --- a/tests/test_topology_hosts.py +++ b/tests/test_topology_hosts.py @@ -65,7 +65,7 @@ def test_get_host_count(self): ) result = hosts.get_host_count_tenantwide(cluster, tenant) - self.assertEqual(result, 14) + self.assertEqual(result, 3) def test_get_host_units(self): """Tests getting the consumed host units in a tenant.""" @@ -80,7 +80,7 @@ def test_get_host_units(self): ) result = hosts.get_host_units_tenantwide(cluster, tenant) - self.assertEqual(result, 4.5) + self.assertEqual(result, 4) hosts.set_host_properties @@ -91,7 +91,7 @@ class TestHostTagging(unittest.TestCase): def test_add_tags(self): """Test adding two tags to a specific host.""" - host_id = "HOST-9F74450267BAAE20" + host_id = "HOST-ABC123DEF456GHIJ" request_file = f"{request_dir}/tags.json" tags = ["demo", "example"] @@ -110,7 +110,7 @@ def test_add_tags(self): def test_delete_tags(self): """Test deleting a tag from a specific host.""" - host_id = "HOST-9F74450267BAAE20" + host_id = "HOST-ABC123DEF456GHIJ" tag = "demo" testtools.create_mockserver_expectation( From 20fa3f855bf1f0a601c42d49d4e71bab766b2cc5 Mon Sep 17 00:00:00 2001 From: Radu Date: Thu, 6 Aug 2020 17:45:36 +0100 Subject: [PATCH 51/79] edited vars import to work with circleci mockserver vars --- tests/test_process_groups.py | 91 ++++++++++++++++++++++++++++++++++++ 1 file changed, 91 insertions(+) create mode 100644 tests/test_process_groups.py diff --git a/tests/test_process_groups.py b/tests/test_process_groups.py new file mode 100644 index 0000000..34836d4 --- /dev/null +++ b/tests/test_process_groups.py @@ -0,0 +1,91 @@ +"""Test Suite for Topology Process Groups""" + +import unittest +from user_variables import FULL_SET +from tests import tooling_for_test as testtools +from dynatrace.requests.request_handler import TenantAPIs +from dynatrace.tenant.topology import process_groups + +cluster = FULL_SET.get('mockserver1') +tenant = 'tenant1' +url = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/process-groups" +request_dir = "tests/mockserver_payloads/requests/processes" +response_dir = "tests/mockserver_payloads/responses/processes" + + +class TestGetPGs(unittest.TestCase): + """Test cases for fetching topology process groups.""" + + def test_get_all_pgs(self): + """Test fetching all PGs""" + response_file = f"{response_dir}/get_all_pgs.json" + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=url, + request_type="GET", + response_file=response_file + ) + + result = process_groups.get_process_groups_tenantwide(cluster, tenant) + self.assertEqual(result, testtools.expected_payload(response_file)) + + def test_get_single_pg(self): + """Test fetching single PG""" + response_file = f"{response_dir}/get_one_pg.json" + pg_id = "PROCESS_GROUP-859E1549052CD876" + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=f"{url}/{pg_id}", + request_type="GET", + response_file=response_file + ) + + result = process_groups.get_process_group(cluster, tenant, pg_id) + self.assertEqual(result, testtools.expected_payload(response_file)) + + def test_get_pg_count(self): + """Test getting the PG count tenantwide.""" + response_file = f"{response_dir}/get_all_pgs.json" + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + url_path=url, + request_type="GET", + response_file=response_file + ) + + result = process_groups.get_process_group_count_tenantwide(cluster, + tenant) + self.assertEqual(result, 4) + + +class TestPGTags(unittest.TestCase): + """Test cases for PG tags""" + + def test_add_pg_tags(self): + """Test adding two tags to the PG.""" + pg_id = "PROCESS_GROUP-859E1549052CD876" + request_file = f"{request_dir}/tags.json" + tags = ["demo", "example"] + + testtools.create_mockserver_expectation( + cluster=cluster, + tenant=tenant, + request_type="POST", + url_path=f"{url}/{pg_id}", + request_file=request_file, + response_code=201 + ) + + result = process_groups.add_process_group_tags(cluster, tenant, + pg_id, tags) + self.assertEqual(result, 201) + + +if __name__ == '__main__': + unittest.main() From 6329ce5de3d33e8d3a59ad2e98106998007a5494 Mon Sep 17 00:00:00 2001 From: Aaron Date: Thu, 6 Aug 2020 22:38:50 -0500 Subject: [PATCH 52/79] PAF-21 # New Tests, Consolidate Response to 1 now --- .../mock_create_daily_multi_tags_and_1.json | 28 +++++++++++++++++ .../mock_create_daily_multi_tags_or_1.json | 30 +++++++++++++++++++ .../mock_create_daily_single_tag_1.json | 2 +- .../mock_create_daily_single_tag_1.json | 5 ---- 4 files changed, 59 insertions(+), 6 deletions(-) create mode 100644 tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_and_1.json create mode 100644 tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_or_1.json delete mode 100644 tests/mockserver_payloads/responses/maintenance/mock_create_daily_single_tag_1.json diff --git a/tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_and_1.json b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_and_1.json new file mode 100644 index 0000000..c15fe1f --- /dev/null +++ b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_and_1.json @@ -0,0 +1,28 @@ +{ + "name": "Test Payload Daily", + "description": "Generating Payload for Test", + "suppression": "DETECT_PROBLEMS_AND_ALERT", + "schedule": { + "recurrenceType": "DAILY", + "start": "2020-01-01 00:00", + "end": "2020-01-02 00:00", + "zoneId": "America/Chicago", + "recurrence": { + "startTime": "23:00", + "durationMinutes": 60 + } + }, + "type": "PLANNED", + "scope": { + "entities": [], + "matches": [{ + "tags": [{ + "context": "CONTEXTLESS", + "key": "testing" + }, { + "context": "CONTEXTLESS", + "key": "testing2" + }] + }] + } +} \ No newline at end of file diff --git a/tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_or_1.json b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_or_1.json new file mode 100644 index 0000000..88f59e8 --- /dev/null +++ b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_or_1.json @@ -0,0 +1,30 @@ +{ + "name": "Test Payload Daily", + "description": "Generating Payload for Test", + "suppression": "DETECT_PROBLEMS_AND_ALERT", + "schedule": { + "recurrenceType": "DAILY", + "start": "2020-01-01 00:00", + "end": "2020-01-02 00:00", + "zoneId": "America/Chicago", + "recurrence": { + "startTime": "23:00", + "durationMinutes": 60 + } + }, + "type": "PLANNED", + "scope": { + "entities": [], + "matches": [{ + "tags": [{ + "context": "CONTEXTLESS", + "key": "testing" + }] + }, { + "tags": [{ + "context": "CONTEXTLESS", + "key": "testing2" + }] + }] + } +} \ No newline at end of file diff --git a/tests/mockserver_payloads/requests/maintenance/mock_create_daily_single_tag_1.json b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_single_tag_1.json index a309fb1..9a8bd8b 100644 --- a/tests/mockserver_payloads/requests/maintenance/mock_create_daily_single_tag_1.json +++ b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_single_tag_1.json @@ -1,5 +1,5 @@ { - "name": "Test Payload Daily with Tag", + "name": "Test Payload Daily", "description": "Generating Payload for Test", "suppression": "DETECT_PROBLEMS_AND_ALERT", "schedule": { diff --git a/tests/mockserver_payloads/responses/maintenance/mock_create_daily_single_tag_1.json b/tests/mockserver_payloads/responses/maintenance/mock_create_daily_single_tag_1.json deleted file mode 100644 index 16ba0d1..0000000 --- a/tests/mockserver_payloads/responses/maintenance/mock_create_daily_single_tag_1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "id": "f8d5614d-7407-4fdf-a6a1-1e0ed693a6cf", - "name": "Test Payload Daily with Tag", - "description": "Generating Payload for Test" -} \ No newline at end of file From fdef6c67b2aebf990b1d4d851669a433e94ccf19 Mon Sep 17 00:00:00 2001 From: Aaron Date: Thu, 6 Aug 2020 22:40:47 -0500 Subject: [PATCH 53/79] PAF-21 #Adding ANDs and ORs logic to maintenance --- dynatrace/tenant/maintenance.py | 77 +++++++++++++++++++++++++-------- 1 file changed, 59 insertions(+), 18 deletions(-) diff --git a/dynatrace/tenant/maintenance.py b/dynatrace/tenant/maintenance.py index 5356161..7213f01 100644 --- a/dynatrace/tenant/maintenance.py +++ b/dynatrace/tenant/maintenance.py @@ -11,7 +11,6 @@ class Suppression(Enum): """ - *** NOT ACTIVE YET*** Types of suppression for create Maintenance Window JSON. Suppression is required Args: @@ -23,9 +22,15 @@ class Suppression(Enum): DISABLE_ALERTING = "DETECT_PROBLEMS_DONT_ALERT" DISABLE_DETECTION = "DONT_DETECT_PROBLEMS" + def __str__(self): + return self.value + + def __repr__(self): + return self.value + + class Day(Enum): """ - *** NOT ACTIVE YET *** Day of the Week Args: @@ -38,13 +43,19 @@ class Day(Enum): Enum (SUNDAY): SUNDAY """ - MONDAY = "MONDAY" - TUESDAY = "TUESDAY" - WEDNESDAY = "WEDNESDAY" - THURSDAY = "THURSDAY" - FRIDAY = "FRIDAY" - SATURDAY = "SATURDAY" - SUNDAY = "SUNDAY" + MONDAY = auto() + TUESDAY = auto() + WEDNESDAY = auto() + THURSDAY = auto() + FRIDAY = auto() + SATURDAY = auto() + SUNDAY = auto() + + def __str__(self): + return self.name + + def __repr__(self): + return self.name def validate_datetime(datetime_text, required_format): try: @@ -52,20 +63,50 @@ def validate_datetime(datetime_text, required_format): except ValueError: raise InvalidDateFormatException(required_format) +def generate_tag_scope(tag, filter_type=None, management_zone_id=None): + tag_payload = {} + + if management_zone_id: + tag_payload ['managementZoneId'] = str(management_zone_id) + + if isinstance (tag, list) and len(tag) > 0: + tag_payload ['tags'] = tag + elif isinstance (tag, dict): + tag_payload ['tags'] = [tag] + elif isinstance (tag, str): + tag_payload ['tags'] = [{'context': "CONTEXTLESS",'key': tag}] + + return tag_payload -def generate_scope(entities=None, filter_type=None, management_zone_id=None, tags=None, matches_any_tag=False): +def generate_scope(entities=None, filter_type=None, management_zone_id=None, tags=None, match_any_tag=True): if entities is None: entities = [] matches = [] matches_payload = {} - if isinstance(filter_type, str): - matches_payload['type'] = filter_type - if management_zone_id: - matches_payload['managementZoneId'] = management_zone_id - if isinstance(tags, list): - matches_payload['tags'] = tags - - matches.append(matches_payload) + # if isinstance(filter_type, str): + # matches_payload['type'] = filter_type + + if match_any_tag and isinstance(tags, list) and len(tags)>1: + for tag in tags: + matches.append( + generate_tag_scope( + tag, + filter_type=filter_type, + management_zone_id=management_zone_id + ) + ) + else: + matches.append( + generate_tag_scope( + tags, + filter_type=filter_type, + management_zone_id=management_zone_id + ) + ) + + # if isinstance(match_any_tag, bool): + # matches_payload['tagsCombination'] = "OR" if match_any_tag \ + # else "AND" scope = { 'entities': entities, From 4470cd61299590b80abb9019ace375ae9b5363f4 Mon Sep 17 00:00:00 2001 From: Aaron Date: Thu, 6 Aug 2020 22:45:39 -0500 Subject: [PATCH 54/79] PAF-21 #Adding Multi-Tag Tests --- tests/test_maintenance_windows.py | 96 ++++++++++++++++++++++++++----- 1 file changed, 81 insertions(+), 15 deletions(-) diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py index 658656a..bc95ab6 100644 --- a/tests/test_maintenance_windows.py +++ b/tests/test_maintenance_windows.py @@ -1,6 +1,4 @@ -""" -Test Cases For Maintenance Windows. -""" +"""Test Cases For Maintenance Windows.""" import unittest import user_variables from tests import tooling_for_test @@ -11,14 +9,8 @@ TENANT = "tenant1" URL_PATH = TenantAPIs.MAINTENANCE_WINDOWS - class TestMaintenanceWindowCreate(unittest.TestCase): - """ - Test Cases for Creating a Maintenance Window - - Args: - unittest ([type]): [description] - """ + """Test Cases for Creating a Maintenance Window""" REQUEST_DIR = "tests/mockserver_payloads/requests/maintenance/" RESPONSE_DIR = "tests/mockserver_payloads/responses/maintenance/" @@ -54,11 +46,9 @@ def test_create_daily_no_scope(self): self.assertEqual(result, tooling_for_test.expected_payload(mockserver_response_file)) def test_create_daily_single_tag(self): - """ - Testing create daily Maintenance Window with a single tag scope - """ + """Testing create daily Maintenance Window with a single tag scope""" mockserver_request_file = f"{self.REQUEST_DIR}mock_create_daily_single_tag_1.json" - mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_daily_single_tag_1.json" + mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_daily_1.json" tooling_for_test.create_mockserver_expectation( CLUSTER, TENANT, @@ -76,7 +66,83 @@ def test_create_daily_single_tag(self): ) maintenance_scope = maintenance.generate_scope(tags=[{'context': "CONTEXTLESS",'key': "testing"}]) maintenance_json = maintenance.generate_window_json( - "Test Payload Daily with Tag", + "Test Payload Daily", + "Generating Payload for Test", + "DETECT_PROBLEMS_AND_ALERT", + maintenance_schedule, + scope= maintenance_scope, + is_planned=True + ) + result = maintenance.create_window(CLUSTER, TENANT, maintenance_json) + self.assertEqual(result, tooling_for_test.expected_payload(mockserver_response_file)) + + def test_create_daily_tags_and(self): + """Testing Payloads with multiple tags in an \"AND\" configuration""" + mockserver_request_file = f"{self.REQUEST_DIR}mock_create_daily_multi_tags_and_1.json" + mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_daily_1.json" + + tooling_for_test.create_mockserver_expectation( + CLUSTER, + TENANT, + URL_PATH, + "POST", + request_file=mockserver_request_file, + response_file=mockserver_response_file, + ) + maintenance_schedule = maintenance.generate_schedule( + "DAILY", + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00" + ) + maintenance_scope = maintenance.generate_scope( + tags=[ + {'context': "CONTEXTLESS",'key': "testing"}, + {'context': "CONTEXTLESS",'key': "testing2"} + ], + match_any_tag=False + ) + maintenance_json = maintenance.generate_window_json( + "Test Payload Daily", + "Generating Payload for Test", + "DETECT_PROBLEMS_AND_ALERT", + maintenance_schedule, + scope= maintenance_scope, + is_planned=True + ) + result = maintenance.create_window(CLUSTER, TENANT, maintenance_json) + self.assertEqual(result, tooling_for_test.expected_payload(mockserver_response_file)) + + def test_create_daily_tags_or(self): + """Testing Payloads with multiple tags in an \"AND\" configuration""" + mockserver_request_file = f"{self.REQUEST_DIR}mock_create_daily_multi_tags_or_1.json" + mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_daily_1.json" + + tooling_for_test.create_mockserver_expectation( + CLUSTER, + TENANT, + URL_PATH, + "POST", + request_file=mockserver_request_file, + response_file=mockserver_response_file, + ) + maintenance_schedule = maintenance.generate_schedule( + "DAILY", + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00" + ) + maintenance_scope = maintenance.generate_scope( + tags=[ + {'context': "CONTEXTLESS",'key': "testing"}, + {'context': "CONTEXTLESS",'key': "testing2"} + ], + match_any_tag=True + ) + maintenance_json = maintenance.generate_window_json( + "Test Payload Daily", "Generating Payload for Test", "DETECT_PROBLEMS_AND_ALERT", maintenance_schedule, From 4e3a6ab7ff7a0b563864fb543a2b3bb5b33b11aa Mon Sep 17 00:00:00 2001 From: Aaron Date: Thu, 6 Aug 2020 23:25:47 -0500 Subject: [PATCH 55/79] PAF-21 # Added all useful Enums for maintenance --- dynatrace/tenant/maintenance.py | 146 ++++++++++++++++++++++++++++++-- 1 file changed, 138 insertions(+), 8 deletions(-) diff --git a/dynatrace/tenant/maintenance.py b/dynatrace/tenant/maintenance.py index 7213f01..52cbd2d 100644 --- a/dynatrace/tenant/maintenance.py +++ b/dynatrace/tenant/maintenance.py @@ -14,19 +14,19 @@ class Suppression(Enum): Types of suppression for create Maintenance Window JSON. Suppression is required Args: - Enum (FULL_ALERTING): Full Alerting. Entites in scope will have notes that a Maintenance Window was active - Enum (DISABLE_ALERTING): Problems detected but alerting profiles in that scope are not triggered - Enum (DISABLE_DETECTION): Problem detection completely off for the scope + Enum (DETECT_PROBLEMS_AND_ALERT): Full Alerting. Entites in scope will have notes that a Maintenance Window was active + Enum (DETECT_PROBLEMS_DONT_ALERT): Problems detected but alerting profiles in that scope are not triggered + Enum (DONT_DETECT_PROBLEMS): Problem detection completely off for the scope """ - FULL_ALERTING = "DETECT_PROBLEMS_AND_ALERT" - DISABLE_ALERTING = "DETECT_PROBLEMS_DONT_ALERT" - DISABLE_DETECTION = "DONT_DETECT_PROBLEMS" + DETECT_PROBLEMS_AND_ALERT = auto() + DETECT_PROBLEMS_DONT_ALERT = auto() + DONT_DETECT_PROBLEMS = auto() def __str__(self): - return self.value + return self.name def __repr__(self): - return self.value + return self.name class Day(Enum): @@ -57,6 +57,136 @@ def __str__(self): def __repr__(self): return self.name +class Context(Enum): + """Tag Contexts that are available""" + AWS = auto() + AWS_GENERIC = auto() + AZURE = auto() + CLOUD_FOUNDRY = auto() + CONTEXTLESS = auto() + ENVIRONMENT = auto() + GOOGLE_CLOUD = auto() + KUBERNETES = auto() + + def __str__(self): + return self.name + + def __repr__(self): + return self.name +class RecurrenceType(Enum): + """Recurrence of the Maintenance Window""" + DAILY = auto() + MONTHLY = auto() + ONCE = auto() + WEEKLY = auto() + + def __str__(self): + return self.name + + def __repr__(self): + return self.name + +class FilterType(Enum): + """All Filter Types available for tag filters""" + APM_SECURITY_GATEWAY = auto() + APPLICATION = auto() + APPLICATION_METHOD = auto() + APPLICATION_METHOD_GROUP = auto() + APPMON_SERVER = auto() + APPMON_SYSTEM_PROFILE = auto() + AUTO_SCALING_GROUP = auto() + AUXILIARY_SYNTHETIC_TEST = auto() + AWS_APPLICATION_LOAD_BALANCER = auto() + AWS_AVAILABILITY_ZONE = auto() + AWS_CREDENTIALS = auto() + AWS_LAMBDA_FUNCTION = auto() + AWS_NETWORK_LOAD_BALANCER = auto() + AZURE_API_MANAGEMENT_SERVICE = auto() + AZURE_APPLICATION_GATEWAY = auto() + AZURE_COSMOS_DB = auto() + AZURE_CREDENTIALS = auto() + AZURE_EVENT_HUB = auto() + AZURE_EVENT_HUB_NAMESPACE = auto() + AZURE_FUNCTION_APP = auto() + AZURE_IOT_HUB = auto() + AZURE_LOAD_BALANCER = auto() + AZURE_MGMT_GROUP = auto() + AZURE_REDIS_CACHE = auto() + AZURE_REGION = auto() + AZURE_SERVICE_BUS_NAMESPACE = auto() + AZURE_SERVICE_BUS_QUEUE = auto() + AZURE_SERVICE_BUS_TOPIC = auto() + AZURE_SQL_DATABASE = auto() + AZURE_SQL_ELASTIC_POOL = auto() + AZURE_SQL_SERVER = auto() + AZURE_STORAGE_ACCOUNT = auto() + AZURE_SUBSCRIPTION = auto() + AZURE_TENANT = auto() + AZURE_VM = auto() + AZURE_VM_SCALE_SET = auto() + AZURE_WEB_APP = auto() + CF_APPLICATION = auto() + CF_FOUNDATION = auto() + CINDER_VOLUME = auto() + CLOUD_APPLICATION = auto() + CLOUD_APPLICATION_INSTANCE = auto() + CLOUD_APPLICATION_NAMESPACE = auto() + CONTAINER_GROUP = auto() + CONTAINER_GROUP_INSTANCE = auto() + CUSTOM_APPLICATION = auto() + CUSTOM_DEVICE = auto() + CUSTOM_DEVICE_GROUP = auto() + DCRUM_APPLICATION = auto() + DCRUM_SERVICE = auto() + DCRUM_SERVICE_INSTANCE = auto() + DEVICE_APPLICATION_METHOD = auto() + DISK = auto() + DOCKER_CONTAINER_GROUP = auto() + DOCKER_CONTAINER_GROUP_INSTANCE = auto() + DYNAMO_DB_TABLE = auto() + EBS_VOLUME = auto() + EC2_INSTANCE = auto() + ELASTIC_LOAD_BALANCER = auto() + ENVIRONMENT = auto() + EXTERNAL_SYNTHETIC_TEST_STEP = auto() + GCP_ZONE = auto() + GEOLOCATION = auto() + GEOLOC_SITE = auto() + GOOGLE_COMPUTE_ENGINE = auto() + HOST = auto() + HOST_GROUP = auto() + HTTP_CHECK = auto() + HTTP_CHECK_STEP = auto() + HYPERVISOR = auto() + KUBERNETES_CLUSTER = auto() + KUBERNETES_NODE = auto() + MOBILE_APPLICATION = auto() + NETWORK_INTERFACE = auto() + NEUTRON_SUBNET = auto() + OPENSTACK_PROJECT = auto() + OPENSTACK_REGION = auto() + OPENSTACK_VM = auto() + OS = auto() + PROCESS_GROUP = auto() + PROCESS_GROUP_INSTANCE = auto() + RELATIONAL_DATABASE_SERVICE = auto() + SERVICE = auto() + SERVICE_INSTANCE = auto() + SERVICE_METHOD = auto() + SERVICE_METHOD_GROUP = auto() + SWIFT_CONTAINER = auto() + SYNTHETIC_LOCATION = auto() + SYNTHETIC_TEST = auto() + SYNTHETIC_TEST_STEP = auto() + VIRTUALMACHINE = auto() + VMWARE_DATACENTER = auto() + + def __str__(self): + return self.name + + def __repr__(self): + return self.name + def validate_datetime(datetime_text, required_format): try: datetime.datetime.strptime(datetime_text, required_format) From 46c4304149b46fd6a1f2d8aaebaef6d196026490 Mon Sep 17 00:00:00 2001 From: Aaron Date: Fri, 7 Aug 2020 08:33:28 -0500 Subject: [PATCH 56/79] PAF-21 #Format& maintenance validation to use enum --- dynatrace/tenant/maintenance.py | 69 ++++++++++++++++------------- tests/test_maintenance_windows.py | 72 +++++++++++++++++++++++-------- 2 files changed, 94 insertions(+), 47 deletions(-) diff --git a/dynatrace/tenant/maintenance.py b/dynatrace/tenant/maintenance.py index 52cbd2d..4d88bc0 100644 --- a/dynatrace/tenant/maintenance.py +++ b/dynatrace/tenant/maintenance.py @@ -9,6 +9,7 @@ MZ_ENDPOINT = rh.TenantAPIs.MAINTENANCE_WINDOWS + class Suppression(Enum): """ Types of suppression for create Maintenance Window JSON. Suppression is required @@ -29,7 +30,7 @@ def __repr__(self): return self.name -class Day(Enum): +class DayOfWeek(Enum): """ Day of the Week @@ -57,6 +58,7 @@ def __str__(self): def __repr__(self): return self.name + class Context(Enum): """Tag Contexts that are available""" AWS = auto() @@ -73,6 +75,8 @@ def __str__(self): def __repr__(self): return self.name + + class RecurrenceType(Enum): """Recurrence of the Maintenance Window""" DAILY = auto() @@ -86,6 +90,7 @@ def __str__(self): def __repr__(self): return self.name + class FilterType(Enum): """All Filter Types available for tag filters""" APM_SECURITY_GATEWAY = auto() @@ -187,51 +192,61 @@ def __str__(self): def __repr__(self): return self.name + def validate_datetime(datetime_text, required_format): try: datetime.datetime.strptime(datetime_text, required_format) except ValueError: raise InvalidDateFormatException(required_format) + def generate_tag_scope(tag, filter_type=None, management_zone_id=None): tag_payload = {} if management_zone_id: - tag_payload ['managementZoneId'] = str(management_zone_id) + tag_payload['mzId'] = str(management_zone_id) - if isinstance (tag, list) and len(tag) > 0: - tag_payload ['tags'] = tag - elif isinstance (tag, dict): - tag_payload ['tags'] = [tag] - elif isinstance (tag, str): - tag_payload ['tags'] = [{'context': "CONTEXTLESS",'key': tag}] + if filter_type: + if filter_type in FilterType._member_names_: + tag_payload['type'] = filter_type + else: + raise ValueError( + "Invalid Filter Type! " + + "Please Refer to Enum or Dynatrace Documentation" + ) + + if isinstance(tag, list) and len(tag) > 0: + tag_payload['tags'] = tag + elif isinstance(tag, dict): + tag_payload['tags'] = [tag] + elif isinstance(tag, str): + tag_payload['tags'] = [{'context': "CONTEXTLESS", 'key': tag}] return tag_payload -def generate_scope(entities=None, filter_type=None, management_zone_id=None, tags=None, match_any_tag=True): + +def generate_scope(entities=None, tags=None, filter_type=None, management_zone_id=None, match_any_tag=True): if entities is None: entities = [] matches = [] matches_payload = {} - # if isinstance(filter_type, str): - # matches_payload['type'] = filter_type - if match_any_tag and isinstance(tags, list) and len(tags)>1: + if match_any_tag and isinstance(tags, list) and len(tags) > 1: for tag in tags: matches.append( - generate_tag_scope( - tag, - filter_type=filter_type, - management_zone_id=management_zone_id - ) + generate_tag_scope( + tag, + filter_type=filter_type, + management_zone_id=management_zone_id + ) ) else: matches.append( - generate_tag_scope( - tags, - filter_type=filter_type, - management_zone_id=management_zone_id - ) + generate_tag_scope( + tags, + filter_type=filter_type, + management_zone_id=management_zone_id + ) ) # if isinstance(match_any_tag, bool): @@ -262,14 +277,10 @@ def generate_window_json(name, description, suppression, schedule, scope=None, i def generate_schedule(recurrence_type, start_time, duration, range_start, range_end, day=None, zoneId=None,): """Create schedule structure for maintenance window""" # This structure requires a lot of input validation - types_available = ["DAILY", "MONTHLY", "ONCE", "WEEKLY"] - days_of_week = ["FRIDAY", "MONDAY", "SATURDAY", - "SUNDAY", "THURSDAY", "TUESDAY", "WEDNESDAY"] - recurrence_type = str(recurrence_type).upper() # Check Recurrence - if recurrence_type not in types_available: + if recurrence_type not in RecurrenceType._member_names_: raise ValueError( "Invalid Recurrence Type! Allowed values are: ONCE, DAILY, WEEKLY, MONTHLY") @@ -304,11 +315,11 @@ def generate_schedule(recurrence_type, start_time, duration, range_start, range_ # Check Weekly Day if recurrence_type == "WEEKLY": day = str(day).upper() - if day in days_of_week: + if day in DayOfWeek._member_names_: schedule['recurrence']['dayOfWeek'] = day else: raise ValueError("Invalid Weekly Day! Allowed values are " - + "SUNDAY, MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY") + + "SUNDAY, MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY") # Check Monthly Day if recurrence_type == "MONTHLY": diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py index bc95ab6..5d66d5f 100644 --- a/tests/test_maintenance_windows.py +++ b/tests/test_maintenance_windows.py @@ -9,6 +9,7 @@ TENANT = "tenant1" URL_PATH = TenantAPIs.MAINTENANCE_WINDOWS + class TestMaintenanceWindowCreate(unittest.TestCase): """Test Cases for Creating a Maintenance Window""" REQUEST_DIR = "tests/mockserver_payloads/requests/maintenance/" @@ -43,7 +44,8 @@ def test_create_daily_no_scope(self): is_planned=True ) result = maintenance.create_window(CLUSTER, TENANT, maintenance_json) - self.assertEqual(result, tooling_for_test.expected_payload(mockserver_response_file)) + self.assertEqual(result, tooling_for_test.expected_payload( + mockserver_response_file)) def test_create_daily_single_tag(self): """Testing create daily Maintenance Window with a single tag scope""" @@ -64,17 +66,19 @@ def test_create_daily_single_tag(self): "2020-01-01 00:00", "2020-01-02 00:00" ) - maintenance_scope = maintenance.generate_scope(tags=[{'context': "CONTEXTLESS",'key': "testing"}]) + maintenance_scope = maintenance.generate_scope( + tags=[{'context': "CONTEXTLESS", 'key': "testing"}]) maintenance_json = maintenance.generate_window_json( "Test Payload Daily", "Generating Payload for Test", "DETECT_PROBLEMS_AND_ALERT", maintenance_schedule, - scope= maintenance_scope, + scope=maintenance_scope, is_planned=True ) result = maintenance.create_window(CLUSTER, TENANT, maintenance_json) - self.assertEqual(result, tooling_for_test.expected_payload(mockserver_response_file)) + self.assertEqual(result, tooling_for_test.expected_payload( + mockserver_response_file)) def test_create_daily_tags_and(self): """Testing Payloads with multiple tags in an \"AND\" configuration""" @@ -97,22 +101,23 @@ def test_create_daily_tags_and(self): "2020-01-02 00:00" ) maintenance_scope = maintenance.generate_scope( - tags=[ - {'context': "CONTEXTLESS",'key': "testing"}, - {'context': "CONTEXTLESS",'key': "testing2"} - ], - match_any_tag=False + tags=[ + {'context': "CONTEXTLESS", 'key': "testing"}, + {'context': "CONTEXTLESS", 'key': "testing2"} + ], + match_any_tag=False ) maintenance_json = maintenance.generate_window_json( "Test Payload Daily", "Generating Payload for Test", "DETECT_PROBLEMS_AND_ALERT", maintenance_schedule, - scope= maintenance_scope, + scope=maintenance_scope, is_planned=True ) result = maintenance.create_window(CLUSTER, TENANT, maintenance_json) - self.assertEqual(result, tooling_for_test.expected_payload(mockserver_response_file)) + self.assertEqual(result, tooling_for_test.expected_payload( + mockserver_response_file)) def test_create_daily_tags_or(self): """Testing Payloads with multiple tags in an \"AND\" configuration""" @@ -135,22 +140,53 @@ def test_create_daily_tags_or(self): "2020-01-02 00:00" ) maintenance_scope = maintenance.generate_scope( - tags=[ - {'context': "CONTEXTLESS",'key': "testing"}, - {'context': "CONTEXTLESS",'key': "testing2"} - ], - match_any_tag=True + tags=[ + {'context': "CONTEXTLESS", 'key': "testing"}, + {'context': "CONTEXTLESS", 'key': "testing2"} + ], + match_any_tag=True ) maintenance_json = maintenance.generate_window_json( "Test Payload Daily", "Generating Payload for Test", "DETECT_PROBLEMS_AND_ALERT", maintenance_schedule, - scope= maintenance_scope, + scope=maintenance_scope, is_planned=True ) result = maintenance.create_window(CLUSTER, TENANT, maintenance_json) - self.assertEqual(result, tooling_for_test.expected_payload(mockserver_response_file)) + self.assertEqual(result, tooling_for_test.expected_payload( + mockserver_response_file)) + if __name__ == '__main__': unittest.main() + +# CREATE TESTS LEFT: +# ONCE TEST +# WEEKLY TEST +# MONTHLY TEST + +# Single Entity +# Multi Entity +# Single Tag with Filter Type +# Mutli Tags with Filter Type +# Single Tag with Management Zone +# Multi Tags with Management Zone + +# EXCEPTION TEST CASES: +# INVALID RECURRENCE +# INVALID WEEK DAY +# INVALID MONTH DAY +# WEEK DAY NOT SUPPLIED +# MONTH DAY NOT SUPPLIED +# MONTHLY DAY OUT OF SCOPE (31 in 30 day month) +# INVALID FILTER_TYPE +# MANAGEMENT_ZONE WITHOUT TAG +# FILTER_TYPE WITHOUT TAG + +# OTHER TEST CASES: +# GET ALL WINDOWS +# GET DETAILS OF WINDOW +# DELETE WINDOW +# UPDATE WINDOW From 981da4d72cbdf82f2fbc2cdac00de370358602ac Mon Sep 17 00:00:00 2001 From: Radu Stefan Date: Sun, 9 Aug 2020 14:33:41 +0100 Subject: [PATCH 57/79] simplified mockserver payloads; updated details as expected by circleCI --- .../responses/processes/get_all_pgis.json | 666 +----------------- .../responses/processes/get_one_pgi.json | 36 +- tests/test_processes.py | 6 +- 3 files changed, 8 insertions(+), 700 deletions(-) diff --git a/tests/mockserver_payloads/responses/processes/get_all_pgis.json b/tests/mockserver_payloads/responses/processes/get_all_pgis.json index 81d097d..121da81 100644 --- a/tests/mockserver_payloads/responses/processes/get_all_pgis.json +++ b/tests/mockserver_payloads/responses/processes/get_all_pgis.json @@ -1,669 +1,11 @@ [ { - "entityId": "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE", - "displayName": "Code*Service.exe", - "discoveredName": "Code*Service.exe", - "firstSeenTimestamp": 1595746260000, - "lastSeenTimestamp": 1596659400000, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-80AA2D475F709672" - ], - "isInstanceOf": [ - "PROCESS_GROUP-859E1549052CD876" - ], - "isNetworkClientOf": [ - "PROCESS_GROUP_INSTANCE-402BEC123CA7FA83" - ] - }, - "toRelationships": { - "runsOnProcessGroupInstance": [ - "SERVICE-C12BF59DA3B51679", - "SERVICE-B71ADA892013D156" - ] - }, - "metadata": { - "commandLineArgs": [ - "C:\\Program\\ Files\\Code42\\Code42Service.exe" - ], - "executables": [ - "Code*Service.exe" - ], - "executablePaths": [ - "C:/Program Files/Code*/Code*Service.exe", - "C:\\PROGRAM FILES\\CODE42\\CODE42SERVICE.EXE" - ] - }, - "softwareTechnologies": [ - { - "type": "APACHE_HTTP_CLIENT_SYNC", - "edition": null, - "version": "4.5.2" - }, - { - "type": "SQLITE", - "edition": null, - "version": null - }, - { - "type": "JETTY", - "edition": null, - "version": "9.4.27.v20200227" - }, - { - "type": "JAVA", - "edition": "OpenJDK", - "version": "11.0.4" - } - ], - "listenPorts": [ - 4244 - ], - "bitness": "64bit", - "monitoringState": { - "actualMonitoringState": "ON", - "expectedMonitoringState": "ON", - "restartRequired": true - }, - "agentVersions": [ - { - "major": 1, - "minor": 199, - "revision": 28, - "timestamp": "20200723-141750", - "sourceRevision": "" - } - ] + "entityId": "PROCESS_GROUP_INSTANCE-ABC123DEF456GHI7" }, { - "entityId": "PROCESS_GROUP_INSTANCE-A6AAFEA17E6F60FD", - "displayName": "Code.exe", - "discoveredName": "Code.exe", - "firstSeenTimestamp": 1593608520000, - "lastSeenTimestamp": 1596564450419, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-80AA2D475F709672" - ], - "isInstanceOf": [ - "PROCESS_GROUP-19DACA5E22637C33" - ] - }, - "toRelationships": {}, - "metadata": { - "commandLineArgs": [ - "C:\\Program\\ Files\\Microsoft\\ VS\\ Code\\Code.exe -n" - ], - "executables": [ - "Code.exe" - ], - "executablePaths": [ - "C:/Program Files/Microsoft VS Code/Code.exe" - ] - }, - "softwareTechnologies": [ - { - "type": "CLR", - "edition": "FullCLR", - "version": "4.8.4180.0" - }, - { - "type": "DOTNET", - "edition": ".NET Framework", - "version": "4.8.4180.0" - } - ], - "bitness": "64bit", - "monitoringState": { - "actualMonitoringState": "OFF", - "expectedMonitoringState": "ON", - "restartRequired": false - } + "entityId": "PROCESS_GROUP_INSTANCE-A6AAFEA17E6F60FD" }, { - "entityId": "PROCESS_GROUP_INSTANCE-F0967E6BFEE20424", - "displayName": "CodeHelper.exe", - "discoveredName": "CodeHelper.exe", - "firstSeenTimestamp": 1593608520000, - "lastSeenTimestamp": 1596659400000, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-80AA2D475F709672" - ], - "isInstanceOf": [ - "PROCESS_GROUP-2DADD604A183AF95" - ] - }, - "toRelationships": {}, - "metadata": { - "commandLineArgs": [ - ], - "executables": [ - "CodeHelper.exe" - ], - "executablePaths": [ - ] - }, - "softwareTechnologies": [ - { - "type": "CLR", - "edition": "FullCLR", - "version": "4.8.4180.0" - }, - { - "type": "DOTNET", - "edition": ".NET Framework", - "version": "4.8.4180.0" - } - ], - "bitness": "32bit", - "monitoringState": { - "actualMonitoringState": "OFF", - "expectedMonitoringState": "OFF", - "restartRequired": false - } - }, - { - "entityId": "PROCESS_GROUP_INSTANCE-C7BEFD2A8F523A60", - "displayName": "ConnectivityDiagnosis.exe", - "discoveredName": "ConnectivityDiagnosis.exe", - "firstSeenTimestamp": 1593679650232, - "lastSeenTimestamp": 1596627719999, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-80AA2D475F709672" - ], - "isInstanceOf": [ - "PROCESS_GROUP-FA40BCF5F71DC6D8" - ] - }, - "toRelationships": {}, - "metadata": { - "commandLineArgs": [ - "C:\\WINDOWS\\CCM\\ConnectivityDiagnosis.exe" - ], - "executables": [ - "ConnectivityDiagnosis.exe" - ], - "executablePaths": [ - "C:/WINDOWS/CCM/ConnectivityDiagnosis.exe", - "C:\\WINDOWS\\CCM\\CONNECTIVITYDIAGNOSIS.EXE" - ] - }, - "softwareTechnologies": [ - { - "type": "CLR", - "edition": "FullCLR", - "version": "4.8.4180.0" - }, - { - "type": "DOTNET", - "edition": ".NET Framework", - "version": "4.8.4180.0" - } - ], - "bitness": "64bit", - "monitoringState": { - "actualMonitoringState": "OFF", - "expectedMonitoringState": "ON", - "restartRequired": true - } - }, - { - "entityId": "PROCESS_GROUP_INSTANCE-A5B3318E9D88975D", - "displayName": "DellCommandUpdate.exe", - "discoveredName": "DellCommandUpdate.exe", - "firstSeenTimestamp": 1594030620000, - "lastSeenTimestamp": 1596659400000, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-80AA2D475F709672" - ], - "isInstanceOf": [ - "PROCESS_GROUP-30E1CF56034D1657" - ] - }, - "toRelationships": {}, - "metadata": { - "commandLineArgs": [ - "C:\\Program\\ Files\\ (x86)\\Dell\\CommandUpdate\\DellCommandUpdate.exe NewUpdatesReadyToApply" - ], - "executables": [ - "DellCommandUpdate.exe" - ], - "executablePaths": [ - "C:/Program Files (x*)/Dell/CommandUpdate/DellCommandUpdate.exe", - "C:\\PROGRAM FILES (X86)\\DELL\\COMMANDUPDATE\\DELLCOMMANDUPDATE.EXE" - ] - }, - "softwareTechnologies": [ - { - "type": "CLR", - "edition": "FullCLR", - "version": "4.8.4180.0" - }, - { - "type": "DOTNET", - "edition": ".NET Framework", - "version": "4.8.4180.0" - } - ], - "bitness": "64bit", - "monitoringState": { - "actualMonitoringState": "OFF", - "expectedMonitoringState": "OFF", - "restartRequired": false - } - }, - { - "entityId": "PROCESS_GROUP_INSTANCE-F131DA558F8051F9", - "displayName": "Docker Desktop Installer.exe", - "discoveredName": "Docker Desktop Installer.exe", - "firstSeenTimestamp": 1596565440000, - "lastSeenTimestamp": 1596570839999, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-80AA2D475F709672" - ], - "isInstanceOf": [ - "PROCESS_GROUP-C340A62CDF763878" - ] - }, - "toRelationships": {}, - "metadata": { - "commandLineArgs": [ - "C:\\Program\\ Files\\Docker\\Docker\\Docker\\ Desktop\\ Installer.exe check-for-update" - ], - "executables": [ - "Docker Desktop Installer.exe" - ], - "executablePaths": [ - "C:/Program Files/Docker/Docker/Docker Desktop Installer.exe", - "C:\\PROGRAM FILES\\DOCKER\\DOCKER\\DOCKER DESKTOP INSTALLER.EXE" - ] - }, - "softwareTechnologies": [ - { - "type": "CLR", - "edition": "FullCLR", - "version": "4.8.4180.0" - }, - { - "type": "DOTNET", - "edition": ".NET Framework", - "version": "4.8.4180.0" - } - ], - "bitness": "64bit", - "monitoringState": { - "actualMonitoringState": "OFF", - "expectedMonitoringState": "ON", - "restartRequired": false - } - }, - { - "entityId": "PROCESS_GROUP_INSTANCE-8BBC7F9C695E6480", - "displayName": "Docker Desktop.exe", - "discoveredName": "Docker Desktop.exe", - "firstSeenTimestamp": 1596565439999, - "lastSeenTimestamp": 1596565439999, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-80AA2D475F709672" - ], - "isInstanceOf": [ - "PROCESS_GROUP-1688581C565DEABE" - ] - }, - "toRelationships": {}, - "metadata": { - "commandLineArgs": [ - "C:\\Program\\ Files\\Docker\\Docker\\Docker\\ Desktop.exe" - ], - "executables": [ - "Docker Desktop.exe" - ], - "executablePaths": [ - "C:/Program Files/Docker/Docker/Docker Desktop.exe", - "C:\\PROGRAM FILES\\DOCKER\\DOCKER\\DOCKER DESKTOP.EXE" - ] - }, - "softwareTechnologies": [ - { - "type": "CLR", - "edition": "FullCLR", - "version": "4.8.4180.0" - }, - { - "type": "DOTNET", - "edition": ".NET Framework", - "version": "4.8.4180.0" - } - ], - "bitness": "64bit", - "monitoringState": { - "actualMonitoringState": "OFF", - "expectedMonitoringState": "ON", - "restartRequired": false - } - }, - { - "entityId": "PROCESS_GROUP_INSTANCE-402BEC123CA7FA83", - "displayName": "Dynatrace ActiveGate", - "discoveredName": "Dynatrace ActiveGate", - "firstSeenTimestamp": 1594791720000, - "lastSeenTimestamp": 1596464730209, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-1BB99004B1F22491" - ], - "isInstanceOf": [ - "PROCESS_GROUP-5B927C168D55DE12" - ] - }, - "toRelationships": { - "isNetworkClientOf": [ - "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE", - "PROCESS_GROUP_INSTANCE-BBA4AAF390993A8D", - "PROCESS_GROUP_INSTANCE-E42674B38558C0CF", - "PROCESS_GROUP_INSTANCE-E8BD469415E3A0AE", - "PROCESS_GROUP_INSTANCE-31B7F45EF1965AD7", - "PROCESS_GROUP_INSTANCE-0257C724F074BDCE", - "PROCESS_GROUP_INSTANCE-60C0DE2F26F0E308", - "PROCESS_GROUP_INSTANCE-7F1EFFDC6C1F29CB", - "PROCESS_GROUP_INSTANCE-1FE3191C7C750153", - "PROCESS_GROUP_INSTANCE-C063F464B1BC2B07" - ] - }, - "metadata": { - "commandLineArgs": [ - "/opt/dynatrace/gateway/jre/bin/java -Dcom.compuware.apm.WatchDogPort=50006 -classpath /opt/dynatrace/gateway/lib/* -Xms1024M -XX:ErrorFile=/var/log/dynatrace/gateway/hs_err_pid_%p.log -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=60 -Duser.language=en -Dcom.compuware.apm.debug.webserver=true -Djava.util.logging.manager=com.compuware.apm.logging.impl.backend.CustomShutdownLogManager -Djdk.tls.ephemeralDHKeySize=2048 -ea -Dorg.xerial.snappy.lib.path=/opt/dynatrace/gateway/lib/native -Dorg.xerial.snappy.lib.name=libsnappyjava.so -Dcom.compuware.apm.WatchDogTimeout=180 com.compuware.apm.collector.core.CollectorImpl -CONFIG_DIR /var/lib/dynatrace/gateway/config" - ], - "hostGroups": [ - "linux_plugins" - ], - "executables": [ - "java" - ], - "javaMainClasses": [ - "com.compuware.apm.collector.core.CollectorImpl" - ], - "executablePaths": [ - "/opt/dynatrace/gateway/jre/bin/java" - ] - }, - "softwareTechnologies": [ - { - "type": "APMNG", - "edition": "OpenJDK", - "version": "1.8.0_252" - } - ], - "bitness": "64bit", - "monitoringState": { - "actualMonitoringState": "OFF", - "expectedMonitoringState": "OFF", - "restartRequired": false - } - }, - { - "entityId": "PROCESS_GROUP_INSTANCE-1AA2930908C82AF5", - "displayName": "GfxDownloadWrapper.exe", - "discoveredName": "GfxDownloadWrapper.exe", - "firstSeenTimestamp": 1594743060000, - "lastSeenTimestamp": 1596627660000, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-80AA2D475F709672" - ], - "isInstanceOf": [ - "PROCESS_GROUP-E83BBCE6EA416069" - ] - }, - "toRelationships": {}, - "metadata": { - "commandLineArgs": [ - ], - "executables": [ - "GfxDownloadWrapper.exe" - ], - "executablePaths": [ - ] - }, - "softwareTechnologies": [ - { - "type": "CLR", - "edition": "FullCLR", - "version": "4.8.4180.0" - }, - { - "type": "DOTNET", - "edition": ".NET Framework", - "version": "4.8.4180.0" - } - ], - "bitness": "64bit", - "monitoringState": { - "actualMonitoringState": "OFF", - "expectedMonitoringState": "ON", - "restartRequired": true - } - }, - { - "entityId": "PROCESS_GROUP_INSTANCE-792BE86953F73281", - "displayName": "GlobalProtect service", - "discoveredName": "GlobalProtect service", - "firstSeenTimestamp": 1593603300000, - "lastSeenTimestamp": 1596659400000, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-80AA2D475F709672" - ], - "isInstanceOf": [ - "PROCESS_GROUP-19A66884085CCFE1" - ] - }, - "toRelationships": {}, - "metadata": { - "executables": [ - "PanGPS.exe" - ], - "executablePaths": [ - "C:\\PROGRAM FILES\\PALO ALTO NETWORKS\\GLOBALPROTECT\\PANGPS.EXE" - ] - }, - "listenPorts": [ - 4767 - ], - "bitness": "64bit", - "monitoringState": { - "actualMonitoringState": "ON", - "expectedMonitoringState": "OFF", - "restartRequired": false - } - }, - { - "entityId": "PROCESS_GROUP_INSTANCE-979851F08FB75636", - "displayName": "Google Chrome", - "discoveredName": "Google Chrome", - "firstSeenTimestamp": 1593605280000, - "lastSeenTimestamp": 1596659400000, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-80AA2D475F709672" - ], - "isInstanceOf": [ - "PROCESS_GROUP-1DB7168BDCE9CE4B" - ], - "isNetworkClientOf": [ - "PROCESS_GROUP_INSTANCE-D18F46A3E5EEF8C7", - "PROCESS_GROUP_INSTANCE-148467FDC40B7504" - ] - }, - "toRelationships": {}, - "metadata": { - "executables": [ - "chrome.exe" - ], - "executablePaths": [ - "C:\\PROGRAM FILES (X86)\\GOOGLE\\CHROME\\APPLICATION\\CHROME.EXE" - ] - }, - "bitness": "64bit", - "monitoringState": { - "actualMonitoringState": "ON", - "expectedMonitoringState": "OFF", - "restartRequired": false - } - }, - { - "entityId": "PROCESS_GROUP_INSTANCE-2D90C092D03BD6B9", - "displayName": "Host Process for Microsoft Configuration Manager", - "discoveredName": "Host Process for Microsoft Configuration Manager", - "firstSeenTimestamp": 1593610860000, - "lastSeenTimestamp": 1596636960000, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-80AA2D475F709672" - ], - "isInstanceOf": [ - "PROCESS_GROUP-5D9A038AD1100883" - ] - }, - "toRelationships": {}, - "metadata": { - "executables": [ - "CcmExec.exe" - ], - "executablePaths": [ - "C:\\WINDOWS\\CCM\\CCMEXEC.EXE" - ] - }, - "bitness": "64bit", - "monitoringState": { - "actualMonitoringState": "ON", - "expectedMonitoringState": "OFF", - "restartRequired": false - } - }, - { - "entityId": "PROCESS_GROUP_INSTANCE-46AE3D05C7012909", - "displayName": "Insomnia", - "discoveredName": "Insomnia", - "firstSeenTimestamp": 1596656520000, - "lastSeenTimestamp": 1596656640000, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-80AA2D475F709672" - ], - "isInstanceOf": [ - "PROCESS_GROUP-C400D5D1212102C4" - ] - }, - "toRelationships": {}, - "metadata": { - "executables": [ - "Insomnia.exe" - ], - "executablePaths": [ - "C:\\USERS\\RADU.STEFAN\\APPDATA\\LOCAL\\INSOMNIA\\APP-2020.3.3\\INSOMNIA.EXE" - ] - }, - "bitness": "64bit", - "monitoringState": { - "actualMonitoringState": "OFF", - "expectedMonitoringState": "OFF", - "restartRequired": false - } - }, - { - "entityId": "PROCESS_GROUP_INSTANCE-AD53BA0E5F9FB61D", - "displayName": "Linux System", - "discoveredName": "Linux System", - "firstSeenTimestamp": 1596279540000, - "lastSeenTimestamp": 1596659640000, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-FFC515D2848C762E" - ], - "isInstanceOf": [ - "PROCESS_GROUP-EF1E81586EB8EDD6" - ] - }, - "toRelationships": {}, - "metadata": { - "executables": [ - "kthreadd" - ], - "executablePaths": [ - "kthreadd" - ] - }, - "softwareTechnologies": [ - { - "type": "LINUX_SYSTEM", - "edition": null, - "version": null - } - ], - "monitoringState": { - "actualMonitoringState": "ON", - "expectedMonitoringState": "OFF", - "restartRequired": false - } - }, - { - "entityId": "PROCESS_GROUP_INSTANCE-9CD5F00A9D5B1DD7", - "displayName": "Linux System", - "discoveredName": "Linux System", - "firstSeenTimestamp": 1594791720000, - "lastSeenTimestamp": 1596485100000, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-1BB99004B1F22491" - ], - "isInstanceOf": [ - "PROCESS_GROUP-C4E413AE6C59611D" - ] - }, - "toRelationships": {}, - "metadata": { - "hostGroups": [ - "linux_plugins" - ], - "executables": [ - "kthreadd" - ], - "executablePaths": [ - "kthreadd" - ] - }, - "softwareTechnologies": [ - { - "type": "LINUX_SYSTEM", - "edition": null, - "version": null - } - ], - "monitoringState": { - "actualMonitoringState": "OFF", - "expectedMonitoringState": "OFF", - "restartRequired": false - } + "entityId": "PROCESS_GROUP_INSTANCE-F0967E6BFEE20424" } - ] \ No newline at end of file +] \ No newline at end of file diff --git a/tests/mockserver_payloads/responses/processes/get_one_pgi.json b/tests/mockserver_payloads/responses/processes/get_one_pgi.json index 1eff28d..0898df1 100644 --- a/tests/mockserver_payloads/responses/processes/get_one_pgi.json +++ b/tests/mockserver_payloads/responses/processes/get_one_pgi.json @@ -1,37 +1,3 @@ { - "entityId": "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE", - "displayName": "Code*Service.exe", - "discoveredName": "Code*Service.exe", - "firstSeenTimestamp": 1595746260000, - "lastSeenTimestamp": 1596659400000, - "tags": [], - "fromRelationships": { - "isProcessOf": [ - "HOST-80AA2D475F709672" - ], - "isInstanceOf": [ - "PROCESS_GROUP-859E1549052CD876" - ], - "isNetworkClientOf": [ - "PROCESS_GROUP_INSTANCE-402BEC123CA7FA83" - ] - }, - "toRelationships": { - "runsOnProcessGroupInstance": [ - "SERVICE-C12BF59DA3B51679", - "SERVICE-B71ADA892013D156" - ] - }, - "metadata": { - "commandLineArgs": [ - "C:\\Program\\ Files\\Code42\\Code42Service.exe" - ], - "executables": [ - "Code*Service.exe" - ], - "executablePaths": [ - "C:/Program Files/Code*/Code*Service.exe", - "C:\\PROGRAM FILES\\CODE42\\CODE42SERVICE.EXE" - ] - } + "entityId": "PROCESS_GROUP_INSTANCE-ABC123DEF456GHI7" } \ No newline at end of file diff --git a/tests/test_processes.py b/tests/test_processes.py index d78a4f2..336f9c7 100644 --- a/tests/test_processes.py +++ b/tests/test_processes.py @@ -6,8 +6,8 @@ from dynatrace.requests.request_handler import TenantAPIs from dynatrace.tenant.topology import process -cluster = FULL_SET.get('mock_cluster') -tenant = 'mock_tenant' +cluster = FULL_SET.get('mockcluster1') +tenant = 'tenant1' url = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/processes" request_dir = "tests/mockserver_payloads/requests/processes" response_dir = "tests/mockserver_payloads/responses/processes" @@ -34,7 +34,7 @@ def test_get_all_processes(self): def test_get_single_process(self): """Tests getting one specific process.""" response_file = f"{response_dir}/get_one_pgi.json" - process_id = "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE" + process_id = "PROCESS_GROUP_INSTANCE-ABC123DEF456GHI7" testtools.create_mockserver_expectation( cluster=cluster, From 1d81998d7a02beb0fb61b1fc4672849647710234 Mon Sep 17 00:00:00 2001 From: Radu Stefan Date: Sun, 9 Aug 2020 14:44:35 +0100 Subject: [PATCH 58/79] simplified mockserver payloads; changed connection details to circleci expectations --- .../responses/processes/get_all_pgs.json | 203 +----------------- .../responses/processes/get_one_pg.json | 61 +----- tests/test_process_groups.py | 6 +- 3 files changed, 7 insertions(+), 263 deletions(-) diff --git a/tests/mockserver_payloads/responses/processes/get_all_pgs.json b/tests/mockserver_payloads/responses/processes/get_all_pgs.json index 12ad4cc..73e086a 100644 --- a/tests/mockserver_payloads/responses/processes/get_all_pgs.json +++ b/tests/mockserver_payloads/responses/processes/get_all_pgs.json @@ -1,208 +1,11 @@ [ { - "entityId": "PROCESS_GROUP-859E1549052CD876", - "displayName": "Code*Service.exe", - "discoveredName": "Code*Service.exe", - "firstSeenTimestamp": 1595746300858, - "lastSeenTimestamp": 1596661825512, - "tags": [], - "fromRelationships": { - "isNetworkClientOfProcessGroup": [ - "PROCESS_GROUP-5B927C168D55DE12" - ], - "runsOn": [ - "HOST-80AA2D475F709672" - ] - }, - "toRelationships": { - "isInstanceOf": [ - "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE" - ], - "runsOn": [ - "SERVICE-C12BF59DA3B51679", - "SERVICE-B71ADA892013D156" - ] - }, - "metadata": { - "commandLineArgs": [ - "C:\\Program\\ Files\\Code42\\Code42Service.exe" - ], - "executables": [ - "Code*Service.exe" - ], - "executablePaths": [ - "C:/Program Files/Code*/Code*Service.exe", - "C:\\PROGRAM FILES\\CODE42\\CODE42SERVICE.EXE" - ] - }, - "softwareTechnologies": [ - { - "type": "APACHE_HTTP_CLIENT_SYNC", - "edition": null, - "version": "4.5.2" - }, - { - "type": "JETTY", - "edition": null, - "version": "9.4.27.v20200227" - }, - { - "type": "JAVA", - "edition": "OpenJDK", - "version": "11.0.4" - }, - { - "type": "SQLITE", - "edition": null, - "version": null - } - ], - "listenPorts": [ - 4244 - ] + "entityId": "PROCESS_GROUP-ABC123DEF456GHI7" }, { - "entityId": "PROCESS_GROUP-19DACA5E22637C33", - "displayName": "Code.exe", - "discoveredName": "Code.exe", - "firstSeenTimestamp": 1592585950992, - "lastSeenTimestamp": 1596564482129, - "tags": [], - "fromRelationships": { - "runsOn": [ - "HOST-80AA2D475F709672" - ] - }, - "toRelationships": { - "isInstanceOf": [ - "PROCESS_GROUP_INSTANCE-A6AAFEA17E6F60FD" - ] - }, - "metadata": { - "commandLineArgs": [ - "C:\\Program\\ Files\\Microsoft\\ VS\\ Code\\Code.exe -n" - ], - "executables": [ - "Code.exe" - ], - "executablePaths": [ - "C:/Program Files/Microsoft VS Code/Code.exe" - ] - }, - "softwareTechnologies": [ - { - "type": "CLR", - "edition": "FullCLR", - "version": "4.8.4180.0" - }, - { - "type": "DOTNET", - "edition": ".NET Framework", - "version": "4.8.4180.0" - } - ] + "entityId": "PROCESS_GROUP-19DACA5E22637C33" }, { - "entityId": "PROCESS_GROUP-859E1549052CD876", - "displayName": "Code*Service.exe", - "discoveredName": "Code*Service.exe", - "firstSeenTimestamp": 1595746300858, - "lastSeenTimestamp": 1596661825512, - "tags": [], - "fromRelationships": { - "isNetworkClientOfProcessGroup": [ - "PROCESS_GROUP-5B927C168D55DE12" - ], - "runsOn": [ - "HOST-80AA2D475F709672" - ] - }, - "toRelationships": { - "isInstanceOf": [ - "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE" - ], - "runsOn": [ - "SERVICE-C12BF59DA3B51679", - "SERVICE-B71ADA892013D156" - ] - }, - "metadata": { - "commandLineArgs": [ - "C:\\Program\\ Files\\Code42\\Code42Service.exe" - ], - "executables": [ - "Code*Service.exe" - ], - "executablePaths": [ - "C:/Program Files/Code*/Code*Service.exe", - "C:\\PROGRAM FILES\\CODE42\\CODE42SERVICE.EXE" - ] - }, - "softwareTechnologies": [ - { - "type": "APACHE_HTTP_CLIENT_SYNC", - "edition": null, - "version": "4.5.2" - }, - { - "type": "JETTY", - "edition": null, - "version": "9.4.27.v20200227" - }, - { - "type": "JAVA", - "edition": "OpenJDK", - "version": "11.0.4" - }, - { - "type": "SQLITE", - "edition": null, - "version": null - } - ], - "listenPorts": [ - 4244 - ] - }, - { - "entityId": "PROCESS_GROUP-19DACA5E22637C33", - "displayName": "Code.exe", - "discoveredName": "Code.exe", - "firstSeenTimestamp": 1592585950992, - "lastSeenTimestamp": 1596564482129, - "tags": [], - "fromRelationships": { - "runsOn": [ - "HOST-80AA2D475F709672" - ] - }, - "toRelationships": { - "isInstanceOf": [ - "PROCESS_GROUP_INSTANCE-A6AAFEA17E6F60FD" - ] - }, - "metadata": { - "commandLineArgs": [ - "C:\\Program\\ Files\\Microsoft\\ VS\\ Code\\Code.exe -n" - ], - "executables": [ - "Code.exe" - ], - "executablePaths": [ - "C:/Program Files/Microsoft VS Code/Code.exe" - ] - }, - "softwareTechnologies": [ - { - "type": "CLR", - "edition": "FullCLR", - "version": "4.8.4180.0" - }, - { - "type": "DOTNET", - "edition": ".NET Framework", - "version": "4.8.4180.0" - } - ] + "entityId": "PROCESS_GROUP-859E1549052CD876" } ] \ No newline at end of file diff --git a/tests/mockserver_payloads/responses/processes/get_one_pg.json b/tests/mockserver_payloads/responses/processes/get_one_pg.json index 882af98..14223a1 100644 --- a/tests/mockserver_payloads/responses/processes/get_one_pg.json +++ b/tests/mockserver_payloads/responses/processes/get_one_pg.json @@ -1,62 +1,3 @@ { - "entityId": "PROCESS_GROUP-859E1549052CD876", - "displayName": "Code*Service.exe", - "discoveredName": "Code*Service.exe", - "firstSeenTimestamp": 1595746300858, - "lastSeenTimestamp": 1596661825512, - "tags": [], - "fromRelationships": { - "isNetworkClientOfProcessGroup": [ - "PROCESS_GROUP-5B927C168D55DE12" - ], - "runsOn": [ - "HOST-80AA2D475F709672" - ] - }, - "toRelationships": { - "isInstanceOf": [ - "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE" - ], - "runsOn": [ - "SERVICE-C12BF59DA3B51679", - "SERVICE-B71ADA892013D156" - ] - }, - "metadata": { - "commandLineArgs": [ - "C:\\Program\\ Files\\Code42\\Code42Service.exe" - ], - "executables": [ - "Code*Service.exe" - ], - "executablePaths": [ - "C:/Program Files/Code*/Code*Service.exe", - "C:\\PROGRAM FILES\\CODE42\\CODE42SERVICE.EXE" - ] - }, - "softwareTechnologies": [ - { - "type": "APACHE_HTTP_CLIENT_SYNC", - "edition": null, - "version": "4.5.2" - }, - { - "type": "JETTY", - "edition": null, - "version": "9.4.27.v20200227" - }, - { - "type": "JAVA", - "edition": "OpenJDK", - "version": "11.0.4" - }, - { - "type": "SQLITE", - "edition": null, - "version": null - } - ], - "listenPorts": [ - 4244 - ] + "entityId": "PROCESS_GROUP-ABC123DEF456GHI7" } \ No newline at end of file diff --git a/tests/test_process_groups.py b/tests/test_process_groups.py index bf78f9d..bf08efe 100644 --- a/tests/test_process_groups.py +++ b/tests/test_process_groups.py @@ -6,8 +6,8 @@ from dynatrace.requests.request_handler import TenantAPIs from dynatrace.tenant.topology import process_groups -cluster = FULL_SET.get('mock_cluster') -tenant = 'mock_tenant' +cluster = FULL_SET.get('mockserver1') +tenant = 'tenant1' url = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/process-groups" request_dir = "tests/mockserver_payloads/requests/processes" response_dir = "tests/mockserver_payloads/responses/processes" @@ -34,7 +34,7 @@ def test_get_all_pgs(self): def test_get_single_pg(self): """Test fetching single PG""" response_file = f"{response_dir}/get_one_pg.json" - pg_id = "PROCESS_GROUP-859E1549052CD876" + pg_id = "PROCESS_GROUP-ABC123DEF456GHI7" testtools.create_mockserver_expectation( cluster=cluster, From a066f4cc21e028152057013a2cbfeb7e8f298860 Mon Sep 17 00:00:00 2001 From: Radu Stefan Date: Sun, 9 Aug 2020 14:45:49 +0100 Subject: [PATCH 59/79] changed mockcluster1 to mockserver1 to comply with circleci test details --- tests/test_processes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_processes.py b/tests/test_processes.py index 336f9c7..2377c99 100644 --- a/tests/test_processes.py +++ b/tests/test_processes.py @@ -6,7 +6,7 @@ from dynatrace.requests.request_handler import TenantAPIs from dynatrace.tenant.topology import process -cluster = FULL_SET.get('mockcluster1') +cluster = FULL_SET.get('mockserver1') tenant = 'tenant1' url = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/processes" request_dir = "tests/mockserver_payloads/requests/processes" From 238685bed590fbdb9e921169fd540d488835ce07 Mon Sep 17 00:00:00 2001 From: Radu Stefan Date: Sun, 9 Aug 2020 14:47:53 +0100 Subject: [PATCH 60/79] simplified mockserver payloads --- .../responses/services/get_all.json | 134 +----------------- .../responses/services/get_one.json | 50 +------ 2 files changed, 4 insertions(+), 180 deletions(-) diff --git a/tests/mockserver_payloads/responses/services/get_all.json b/tests/mockserver_payloads/responses/services/get_all.json index 42c6e27..bdcce80 100644 --- a/tests/mockserver_payloads/responses/services/get_all.json +++ b/tests/mockserver_payloads/responses/services/get_all.json @@ -1,139 +1,11 @@ [ { - "entityId": "SERVICE-C12BF59DA3B51679", - "displayName": "/", - "discoveredName": "/", - "firstSeenTimestamp": 1595746255774, - "lastSeenTimestamp": 1596699715160, - "tags": [], - "fromRelationships": { - "runsOnProcessGroupInstance": [ - "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE" - ], - "runsOn": [ - "PROCESS_GROUP-859E1549052CD876" - ] - }, - "toRelationships": { - "calls": [ - "SERVICE-B71ADA892013D156" - ] - }, - "agentTechnologyType": "JAVA", - "serviceTechnologyTypes": [ - "Java" - ], - "serviceType": "WebRequest", - "softwareTechnologies": [ - { - "type": "APACHE_HTTP_CLIENT_SYNC", - "edition": null, - "version": "4.5.2" - }, - { - "type": "JETTY", - "edition": null, - "version": "9.4.27.v20200227" - }, - { - "type": "JAVA", - "edition": "OpenJDK", - "version": "11.0.4" - }, - { - "type": "SQLITE", - "edition": null, - "version": null - } - ], - "webApplicationId": "/", - "webServerName": "localhost", - "contextRoot": "/" + "entityId": "SERVICE-ABC123DEF456GHI7" }, { - "entityId": "SERVICE-C096CE0BA471AEFD", - "displayName": "Netty on 0:0:0:0:0:0:0:0:*", - "discoveredName": "Netty on 0:0:0:0:0:0:0:0:*", - "firstSeenTimestamp": 1596565951607, - "lastSeenTimestamp": 1596663764465, - "tags": [], - "fromRelationships": { - "runsOnProcessGroupInstance": [ - "PROCESS_GROUP_INSTANCE-148467FDC40B7504" - ], - "runsOn": [ - "PROCESS_GROUP-A6C0C543A3B775E3" - ] - }, - "toRelationships": {}, - "agentTechnologyType": "JAVA", - "serviceTechnologyTypes": [ - "Java", - "NETTY" - ], - "serviceType": "WebRequest", - "softwareTechnologies": [ - { - "type": "NETTY", - "edition": null, - "version": "4.1.50.Final" - }, - { - "type": "JAVA", - "edition": "Oracle HotSpot", - "version": "13.0.2" - } - ], - "webApplicationId": "Netty on 0:0:0:0:0:0:0:0:*", - "webServerName": "0:0:0:0:0:0:0:0:5555", - "contextRoot": "/", - "port": 5555 + "entityId": "SERVICE-C096CE0BA471AEFD" }, { - "entityId": "SERVICE-B71ADA892013D156", - "displayName": "Requests executed in background threads of Code*Service.exe", - "discoveredName": "Requests executed in background threads of Code*Service.exe", - "firstSeenTimestamp": 1595746270529, - "lastSeenTimestamp": 1596699743160, - "tags": [], - "fromRelationships": { - "runsOnProcessGroupInstance": [ - "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE" - ], - "runsOn": [ - "PROCESS_GROUP-859E1549052CD876" - ], - "calls": [ - "SERVICE-C12BF59DA3B51679" - ] - }, - "toRelationships": {}, - "agentTechnologyType": "JAVA", - "serviceTechnologyTypes": [ - "Java" - ], - "serviceType": "Process", - "softwareTechnologies": [ - { - "type": "APACHE_HTTP_CLIENT_SYNC", - "edition": null, - "version": "4.5.2" - }, - { - "type": "JETTY", - "edition": null, - "version": "9.4.27.v20200227" - }, - { - "type": "JAVA", - "edition": "OpenJDK", - "version": "11.0.4" - }, - { - "type": "SQLITE", - "edition": null, - "version": null - } - ] + "entityId": "SERVICE-B71ADA892013D156" } ] \ No newline at end of file diff --git a/tests/mockserver_payloads/responses/services/get_one.json b/tests/mockserver_payloads/responses/services/get_one.json index 8eb4e76..f5c0619 100644 --- a/tests/mockserver_payloads/responses/services/get_one.json +++ b/tests/mockserver_payloads/responses/services/get_one.json @@ -1,51 +1,3 @@ { - "entityId": "SERVICE-C12BF59DA3B51679", - "displayName": "/", - "discoveredName": "/", - "firstSeenTimestamp": 1595746255774, - "lastSeenTimestamp": 1596699715160, - "tags": [], - "fromRelationships": { - "runsOnProcessGroupInstance": [ - "PROCESS_GROUP_INSTANCE-718687D9E9D0D7CE" - ], - "runsOn": [ - "PROCESS_GROUP-859E1549052CD876" - ] - }, - "toRelationships": { - "calls": [ - "SERVICE-B71ADA892013D156" - ] - }, - "agentTechnologyType": "JAVA", - "serviceTechnologyTypes": [ - "Java" - ], - "serviceType": "WebRequest", - "softwareTechnologies": [ - { - "type": "APACHE_HTTP_CLIENT_SYNC", - "edition": null, - "version": "4.5.2" - }, - { - "type": "JETTY", - "edition": null, - "version": "9.4.27.v20200227" - }, - { - "type": "JAVA", - "edition": "OpenJDK", - "version": "11.0.4" - }, - { - "type": "SQLITE", - "edition": null, - "version": null - } - ], - "webApplicationId": "/", - "webServerName": "localhost", - "contextRoot": "/" + "entityId": "SERVICE-ABC123DEF456GHI7" } \ No newline at end of file From 22dd6698893cf0057698a74e1dc7c61f706da41d Mon Sep 17 00:00:00 2001 From: Radu Stefan Date: Sun, 9 Aug 2020 14:49:01 +0100 Subject: [PATCH 61/79] changed test details to comply with circleci mockserver --- tests/test_services.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_services.py b/tests/test_services.py index 068cd1f..345eb6d 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -6,8 +6,8 @@ from dynatrace.requests.request_handler import TenantAPIs from dynatrace.tenant.topology import services -cluster = FULL_SET.get('mock_cluster') -tenant = 'mock_tenant' +cluster = FULL_SET.get('mockserver1') +tenant = 'tenant1' url = f"{TenantAPIs.V1_TOPOLOGY}/services" request_dir = "tests/mockserver_payloads/requests/services" response_dir = "tests/mockserver_payloads/responses/services" @@ -34,7 +34,7 @@ def test_get_all_svc(self): def test_get_single_svc(self): """Test fetching single service""" response_file = f"{response_dir}/get_one.json" - svc_id = "SERVICE-C12BF59DA3B51679" + svc_id = "SERVICE-ABC123DEF456GHI7" testtools.create_mockserver_expectation( cluster=cluster, @@ -68,7 +68,7 @@ class TestServiceTags(unittest.TestCase): def test_add_svc_tags(self): """Test adding two tags to the service.""" - svc_id = "SERVICE-C12BF59DA3B51679" + svc_id = "SERVICE-ABC123DEF456GHI7" request_file = f"{request_dir}/tags.json" tags = ["demo", "example"] From d10ac3d4a17dbc6def37fa75e2a6c4fc5c99b97a Mon Sep 17 00:00:00 2001 From: Radu Stefan Date: Sun, 9 Aug 2020 17:43:44 +0100 Subject: [PATCH 62/79] PAF-37: simplified make_api_call function; moved Api-Token to headers instead of query string --- dynatrace/requests/request_handler.py | 34 ++++++++++----------------- 1 file changed, 12 insertions(+), 22 deletions(-) diff --git a/dynatrace/requests/request_handler.py b/dynatrace/requests/request_handler.py index b2e303b..3baf04d 100644 --- a/dynatrace/requests/request_handler.py +++ b/dynatrace/requests/request_handler.py @@ -84,7 +84,7 @@ def make_api_call(cluster, endpoint, tenant=None, params=None, json=None, method ''' Function makes an API call in a safe way, taking into account the rate limits. This will ensure the API call will always go through, with the program waiting for the limit to reset if needed.\n - + @param cluster - Cluster dictionary from variable_set\n @param endpoint - API endpoint to call.\n @param tenant - String of tenant name used in cluster dictionary\n @@ -101,31 +101,21 @@ def make_api_call(cluster, endpoint, tenant=None, params=None, json=None, method # Get correct token for the operation if 'onpremise' in str(endpoint) or 'cluster' in str(endpoint): - check_managed (cluster) - params['Api-Token'] = cluster['cluster_token'] + check_managed(cluster) + headers = dict(Authorization=f"Api-Token {cluster['cluster_token']}") else: - params['Api-Token'] = cluster['api_token'][tenant] + headers = dict(Authorization=f"Api-Token {cluster['api_token'][tenant]}") # Loop to retry in case of rate limits while True: - if method == HTTP.GET: - response = requests.get(url=url, - params=params, - verify=cluster.get('verify_ssl')) - elif method == HTTP.PUT: - response = requests.put(url=url, - params=params, - verify=cluster.get('verify_ssl'), - json=json) - elif method == HTTP.POST: - response = requests.post(url=url, - params=params, - verify=cluster.get('verify_ssl'), - json=json) - elif method == HTTP.DELETE: - response = requests.delete(url=url, - params=params, - verify=cluster.get('verify_ssl')) + response = requests.request( + method=str(method), + url=url, + params=params, + headers=headers, + json=json, + verify=cluster.get('verify_ssl') + ) if check_response(response): break From 039b812406fa17496ce1555b7bf171265c7d3cf9 Mon Sep 17 00:00:00 2001 From: Aaron Date: Sun, 9 Aug 2020 22:14:53 -0500 Subject: [PATCH 63/79] Adding coverage for testing breadth --- .coveragerc | 9 ++++++++ Pipfile | 1 + Pipfile.lock | 58 ++++++++++++++++++++++++++++++++++++++++++++-------- 3 files changed, 59 insertions(+), 9 deletions(-) create mode 100644 .coveragerc diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..b961959 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,9 @@ +[run] +omit = + # omit anything in a .local directory anywhere + */.local/* + # omit everything in /usr + /usr/* + # omit in the test tools + tests/* + user_variables.py \ No newline at end of file diff --git a/Pipfile b/Pipfile index b253a43..393ec6b 100644 --- a/Pipfile +++ b/Pipfile @@ -6,6 +6,7 @@ verify_ssl = true [dev-packages] pylint = "*" autopep8 = "*" +coverage = "*" [packages] requests = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 3da6634..19a4db6 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "f1850de3b2311e799288920e10d9afb1837a02c65754e827532b4f40af27ab0d" + "sha256": "d77ab23630511fa40710f418270d79d24bc9d2b8a61ab2d2af4b0e938036b609" }, "pipfile-spec": 6, "requires": { @@ -32,10 +32,10 @@ }, "idna": { "hashes": [ - "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb", - "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa" + "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", + "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" ], - "version": "==2.9" + "version": "==2.10" }, "requests": { "hashes": [ @@ -47,10 +47,10 @@ }, "urllib3": { "hashes": [ - "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527", - "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115" + "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a", + "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461" ], - "version": "==1.25.9" + "version": "==1.25.10" } }, "develop": { @@ -63,10 +63,50 @@ }, "autopep8": { "hashes": [ - "sha256:60fd8c4341bab59963dafd5d2a566e94f547e660b9b396f772afe67d8481dbf0" + "sha256:d21d3901cb0da6ebd1e83fc9b0dfbde8b46afc2ede4fe32fbda0c7c6118ca094" ], "index": "pypi", - "version": "==1.5.3" + "version": "==1.5.4" + }, + "coverage": { + "hashes": [ + "sha256:098a703d913be6fbd146a8c50cc76513d726b022d170e5e98dc56d958fd592fb", + "sha256:16042dc7f8e632e0dcd5206a5095ebd18cb1d005f4c89694f7f8aafd96dd43a3", + "sha256:1adb6be0dcef0cf9434619d3b892772fdb48e793300f9d762e480e043bd8e716", + "sha256:27ca5a2bc04d68f0776f2cdcb8bbd508bbe430a7bf9c02315cd05fb1d86d0034", + "sha256:28f42dc5172ebdc32622a2c3f7ead1b836cdbf253569ae5673f499e35db0bac3", + "sha256:2fcc8b58953d74d199a1a4d633df8146f0ac36c4e720b4a1997e9b6327af43a8", + "sha256:304fbe451698373dc6653772c72c5d5e883a4aadaf20343592a7abb2e643dae0", + "sha256:30bc103587e0d3df9e52cd9da1dd915265a22fad0b72afe54daf840c984b564f", + "sha256:40f70f81be4d34f8d491e55936904db5c527b0711b2a46513641a5729783c2e4", + "sha256:4186fc95c9febeab5681bc3248553d5ec8c2999b8424d4fc3a39c9cba5796962", + "sha256:46794c815e56f1431c66d81943fa90721bb858375fb36e5903697d5eef88627d", + "sha256:4869ab1c1ed33953bb2433ce7b894a28d724b7aa76c19b11e2878034a4e4680b", + "sha256:4f6428b55d2916a69f8d6453e48a505c07b2245653b0aa9f0dee38785939f5e4", + "sha256:52f185ffd3291196dc1aae506b42e178a592b0b60a8610b108e6ad892cfc1bb3", + "sha256:538f2fd5eb64366f37c97fdb3077d665fa946d2b6d95447622292f38407f9258", + "sha256:64c4f340338c68c463f1b56e3f2f0423f7b17ba6c3febae80b81f0e093077f59", + "sha256:675192fca634f0df69af3493a48224f211f8db4e84452b08d5fcebb9167adb01", + "sha256:700997b77cfab016533b3e7dbc03b71d33ee4df1d79f2463a318ca0263fc29dd", + "sha256:8505e614c983834239f865da2dd336dcf9d72776b951d5dfa5ac36b987726e1b", + "sha256:962c44070c281d86398aeb8f64e1bf37816a4dfc6f4c0f114756b14fc575621d", + "sha256:9e536783a5acee79a9b308be97d3952b662748c4037b6a24cbb339dc7ed8eb89", + "sha256:9ea749fd447ce7fb1ac71f7616371f04054d969d412d37611716721931e36efd", + "sha256:a34cb28e0747ea15e82d13e14de606747e9e484fb28d63c999483f5d5188e89b", + "sha256:a3ee9c793ffefe2944d3a2bd928a0e436cd0ac2d9e3723152d6fd5398838ce7d", + "sha256:aab75d99f3f2874733946a7648ce87a50019eb90baef931698f96b76b6769a46", + "sha256:b1ed2bdb27b4c9fc87058a1cb751c4df8752002143ed393899edb82b131e0546", + "sha256:b360d8fd88d2bad01cb953d81fd2edd4be539df7bfec41e8753fe9f4456a5082", + "sha256:b8f58c7db64d8f27078cbf2a4391af6aa4e4767cc08b37555c4ae064b8558d9b", + "sha256:c1bbb628ed5192124889b51204de27c575b3ffc05a5a91307e7640eff1d48da4", + "sha256:c2ff24df02a125b7b346c4c9078c8936da06964cc2d276292c357d64378158f8", + "sha256:c890728a93fffd0407d7d37c1e6083ff3f9f211c83b4316fae3778417eab9811", + "sha256:c96472b8ca5dc135fb0aa62f79b033f02aa434fb03a8b190600a5ae4102df1fd", + "sha256:ce7866f29d3025b5b34c2e944e66ebef0d92e4a4f2463f7266daa03a1332a651", + "sha256:e26c993bd4b220429d4ec8c1468eca445a4064a61c74ca08da7429af9bc53bb0" + ], + "index": "pypi", + "version": "==5.2.1" }, "isort": { "hashes": [ From b7ebcaa32ac1df6d3a9266b0521d6a2ffd51375b Mon Sep 17 00:00:00 2001 From: Aaron Date: Sun, 9 Aug 2020 22:15:31 -0500 Subject: [PATCH 64/79] PAF-21 #Adding Enum Tests --- tests/test_maintenance_windows.py | 41 +++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py index 5d66d5f..a736ab9 100644 --- a/tests/test_maintenance_windows.py +++ b/tests/test_maintenance_windows.py @@ -158,6 +158,47 @@ def test_create_daily_tags_or(self): self.assertEqual(result, tooling_for_test.expected_payload( mockserver_response_file)) +class TestEnumTypes(unittest.TestCase): + def test_suppression_enum_str(self): + suppression = maintenance.Suppression(maintenance.Suppression.DETECT_PROBLEMS_AND_ALERT) + self.assertIsInstance(maintenance.Suppression.__str__(suppression), str) + + def test_suppression_enum_repr(self): + suppression = maintenance.Suppression(maintenance.Suppression.DETECT_PROBLEMS_AND_ALERT) + self.assertIsInstance(maintenance.Suppression.__repr__(suppression), str) + + def test_day_of_week_enum_str(self): + day_of_week = maintenance.DayOfWeek(maintenance.DayOfWeek.MONDAY) + self.assertIsInstance(maintenance.DayOfWeek.__str__(day_of_week), str) + + def test_day_of_week_enum_repr(self): + day_of_week = maintenance.DayOfWeek(maintenance.DayOfWeek.MONDAY) + self.assertIsInstance(maintenance.DayOfWeek.__repr__(day_of_week), str) + + def test_context_enum_str(self): + context = maintenance.Context(maintenance.Context.CONTEXTLESS) + self.assertIsInstance(maintenance.Context.__str__(context), str) + + def test_context_enum_repr(self): + context = maintenance.Context(maintenance.Context.CONTEXTLESS) + self.assertIsInstance(maintenance.Context.__repr__(context), str) + + def test_recurrence_type_enum_str(self): + recurrence_type = maintenance.RecurrenceType(maintenance.RecurrenceType.DAILY) + self.assertIsInstance(maintenance.RecurrenceType.__str__(recurrence_type), str) + + def test_recurrence_type_enum_repr(self): + recurrence_type = maintenance.RecurrenceType(maintenance.RecurrenceType.DAILY) + self.assertIsInstance(maintenance.RecurrenceType.__repr__(recurrence_type), str) + + def test_filter_type_enum_str(self): + suppression = maintenance.FilterType(maintenance.FilterType.APM_SECURITY_GATEWAY) + self.assertIsInstance(maintenance.FilterType.__str__(suppression), str) + + def test_filter_type_enum_repr(self): + suppression = maintenance.FilterType(maintenance.FilterType.APM_SECURITY_GATEWAY) + self.assertIsInstance(maintenance.FilterType.__repr__(suppression), str) + if __name__ == '__main__': unittest.main() From 177da3a728c2fa07e22e842be83e5eb6f2a31812 Mon Sep 17 00:00:00 2001 From: Aaron Date: Sun, 9 Aug 2020 22:16:56 -0500 Subject: [PATCH 65/79] Removing commented code --- dynatrace/tenant/maintenance.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/dynatrace/tenant/maintenance.py b/dynatrace/tenant/maintenance.py index 4d88bc0..fb58b88 100644 --- a/dynatrace/tenant/maintenance.py +++ b/dynatrace/tenant/maintenance.py @@ -249,10 +249,6 @@ def generate_scope(entities=None, tags=None, filter_type=None, management_zone_i ) ) - # if isinstance(match_any_tag, bool): - # matches_payload['tagsCombination'] = "OR" if match_any_tag \ - # else "AND" - scope = { 'entities': entities, 'matches': matches From eaddf3fd2052e20fbb157d548ba5fd3f696855ba Mon Sep 17 00:00:00 2001 From: Aaron Date: Sun, 9 Aug 2020 22:46:04 -0500 Subject: [PATCH 66/79] PAF-21 #Added Recurrance Types. Refactored Payload --- .../maintenance/mock_create_daily_1.json | 2 +- .../mock_create_daily_multi_tags_and_1.json | 2 +- .../mock_create_daily_multi_tags_or_1.json | 2 +- .../mock_create_daily_single_tag_1.json | 2 +- .../maintenance/mock_create_monthly_1.json | 17 +++ .../maintenance/mock_create_once_1.json | 12 ++ .../maintenance/mock_create_weekly_1.json | 17 +++ ...create_daily_1.json => mock_create_1.json} | 2 +- tests/test_maintenance_windows.py | 116 ++++++++++++++++-- 9 files changed, 157 insertions(+), 15 deletions(-) create mode 100644 tests/mockserver_payloads/requests/maintenance/mock_create_monthly_1.json create mode 100644 tests/mockserver_payloads/requests/maintenance/mock_create_once_1.json create mode 100644 tests/mockserver_payloads/requests/maintenance/mock_create_weekly_1.json rename tests/mockserver_payloads/responses/maintenance/{mock_create_daily_1.json => mock_create_1.json} (75%) diff --git a/tests/mockserver_payloads/requests/maintenance/mock_create_daily_1.json b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_1.json index d740f54..97a5743 100644 --- a/tests/mockserver_payloads/requests/maintenance/mock_create_daily_1.json +++ b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_1.json @@ -1,5 +1,5 @@ { - "name":"Test Payload Daily", + "name":"Test Payload", "description":"Generating Payload for Test", "suppression":"DETECT_PROBLEMS_AND_ALERT", "schedule":{ diff --git a/tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_and_1.json b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_and_1.json index c15fe1f..7299850 100644 --- a/tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_and_1.json +++ b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_and_1.json @@ -1,5 +1,5 @@ { - "name": "Test Payload Daily", + "name": "Test Payload", "description": "Generating Payload for Test", "suppression": "DETECT_PROBLEMS_AND_ALERT", "schedule": { diff --git a/tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_or_1.json b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_or_1.json index 88f59e8..77ade49 100644 --- a/tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_or_1.json +++ b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_multi_tags_or_1.json @@ -1,5 +1,5 @@ { - "name": "Test Payload Daily", + "name": "Test Payload", "description": "Generating Payload for Test", "suppression": "DETECT_PROBLEMS_AND_ALERT", "schedule": { diff --git a/tests/mockserver_payloads/requests/maintenance/mock_create_daily_single_tag_1.json b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_single_tag_1.json index 9a8bd8b..63d1348 100644 --- a/tests/mockserver_payloads/requests/maintenance/mock_create_daily_single_tag_1.json +++ b/tests/mockserver_payloads/requests/maintenance/mock_create_daily_single_tag_1.json @@ -1,5 +1,5 @@ { - "name": "Test Payload Daily", + "name": "Test Payload", "description": "Generating Payload for Test", "suppression": "DETECT_PROBLEMS_AND_ALERT", "schedule": { diff --git a/tests/mockserver_payloads/requests/maintenance/mock_create_monthly_1.json b/tests/mockserver_payloads/requests/maintenance/mock_create_monthly_1.json new file mode 100644 index 0000000..5e0ddf6 --- /dev/null +++ b/tests/mockserver_payloads/requests/maintenance/mock_create_monthly_1.json @@ -0,0 +1,17 @@ +{ + "name": "Test Payload", + "description": "Generating Payload for Test", + "suppression": "DETECT_PROBLEMS_AND_ALERT", + "schedule": { + "recurrenceType": "MONTHLY", + "start": "2020-01-01 00:00", + "end": "2020-01-02 00:00", + "zoneId": "America/Chicago", + "recurrence": { + "startTime": "23:00", + "durationMinutes": 60, + "dayOfMonth": 1 + } + }, + "type": "PLANNED" +} \ No newline at end of file diff --git a/tests/mockserver_payloads/requests/maintenance/mock_create_once_1.json b/tests/mockserver_payloads/requests/maintenance/mock_create_once_1.json new file mode 100644 index 0000000..b849cf5 --- /dev/null +++ b/tests/mockserver_payloads/requests/maintenance/mock_create_once_1.json @@ -0,0 +1,12 @@ +{ + "name": "Test Payload", + "description": "Generating Payload for Test", + "suppression": "DETECT_PROBLEMS_AND_ALERT", + "schedule": { + "recurrenceType": "ONCE", + "start": "2020-01-01 00:00", + "end": "2020-01-02 00:00", + "zoneId": "America/Chicago" + }, + "type": "PLANNED" +} \ No newline at end of file diff --git a/tests/mockserver_payloads/requests/maintenance/mock_create_weekly_1.json b/tests/mockserver_payloads/requests/maintenance/mock_create_weekly_1.json new file mode 100644 index 0000000..2639fa9 --- /dev/null +++ b/tests/mockserver_payloads/requests/maintenance/mock_create_weekly_1.json @@ -0,0 +1,17 @@ +{ + "name": "Test Payload", + "description": "Generating Payload for Test", + "suppression": "DETECT_PROBLEMS_AND_ALERT", + "schedule": { + "recurrenceType": "WEEKLY", + "start": "2020-01-01 00:00", + "end": "2020-01-02 00:00", + "zoneId": "America/Chicago", + "recurrence": { + "startTime": "23:00", + "durationMinutes": 60, + "dayOfWeek": "SUNDAY" + } + }, + "type": "PLANNED" +} \ No newline at end of file diff --git a/tests/mockserver_payloads/responses/maintenance/mock_create_daily_1.json b/tests/mockserver_payloads/responses/maintenance/mock_create_1.json similarity index 75% rename from tests/mockserver_payloads/responses/maintenance/mock_create_daily_1.json rename to tests/mockserver_payloads/responses/maintenance/mock_create_1.json index 36d21d7..de86d44 100644 --- a/tests/mockserver_payloads/responses/maintenance/mock_create_daily_1.json +++ b/tests/mockserver_payloads/responses/maintenance/mock_create_1.json @@ -1,5 +1,5 @@ { "id": "1a000000-200a-3000-4000-5abc00000000", - "name": "Test Payload Daily", + "name": "Test Payload", "description": "Generating Payload for Test" } \ No newline at end of file diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py index a736ab9..b2a9bef 100644 --- a/tests/test_maintenance_windows.py +++ b/tests/test_maintenance_windows.py @@ -20,7 +20,7 @@ def test_create_daily_no_scope(self): Testing create daily Maintenance Window with no scope """ mockserver_request_file = f"{self.REQUEST_DIR}mock_create_daily_1.json" - mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_daily_1.json" + mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_1.json" tooling_for_test.create_mockserver_expectation( CLUSTER, TENANT, @@ -37,7 +37,7 @@ def test_create_daily_no_scope(self): "2020-01-02 00:00" ) maintenance_json = maintenance.generate_window_json( - "Test Payload Daily", + "Test Payload", "Generating Payload for Test", "DETECT_PROBLEMS_AND_ALERT", maintenance_schedule, @@ -50,7 +50,7 @@ def test_create_daily_no_scope(self): def test_create_daily_single_tag(self): """Testing create daily Maintenance Window with a single tag scope""" mockserver_request_file = f"{self.REQUEST_DIR}mock_create_daily_single_tag_1.json" - mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_daily_1.json" + mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_1.json" tooling_for_test.create_mockserver_expectation( CLUSTER, TENANT, @@ -69,7 +69,7 @@ def test_create_daily_single_tag(self): maintenance_scope = maintenance.generate_scope( tags=[{'context': "CONTEXTLESS", 'key': "testing"}]) maintenance_json = maintenance.generate_window_json( - "Test Payload Daily", + "Test Payload", "Generating Payload for Test", "DETECT_PROBLEMS_AND_ALERT", maintenance_schedule, @@ -83,7 +83,7 @@ def test_create_daily_single_tag(self): def test_create_daily_tags_and(self): """Testing Payloads with multiple tags in an \"AND\" configuration""" mockserver_request_file = f"{self.REQUEST_DIR}mock_create_daily_multi_tags_and_1.json" - mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_daily_1.json" + mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_1.json" tooling_for_test.create_mockserver_expectation( CLUSTER, @@ -108,7 +108,7 @@ def test_create_daily_tags_and(self): match_any_tag=False ) maintenance_json = maintenance.generate_window_json( - "Test Payload Daily", + "Test Payload", "Generating Payload for Test", "DETECT_PROBLEMS_AND_ALERT", maintenance_schedule, @@ -122,7 +122,7 @@ def test_create_daily_tags_and(self): def test_create_daily_tags_or(self): """Testing Payloads with multiple tags in an \"AND\" configuration""" mockserver_request_file = f"{self.REQUEST_DIR}mock_create_daily_multi_tags_or_1.json" - mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_daily_1.json" + mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_1.json" tooling_for_test.create_mockserver_expectation( CLUSTER, @@ -147,7 +147,7 @@ def test_create_daily_tags_or(self): match_any_tag=True ) maintenance_json = maintenance.generate_window_json( - "Test Payload Daily", + "Test Payload", "Generating Payload for Test", "DETECT_PROBLEMS_AND_ALERT", maintenance_schedule, @@ -158,6 +158,104 @@ def test_create_daily_tags_or(self): self.assertEqual(result, tooling_for_test.expected_payload( mockserver_response_file)) + def test_create_once_no_scope(self): + """Testing Payloads with ONCE recurrance type""" + mockserver_request_file = f"{self.REQUEST_DIR}mock_create_once_1.json" + mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_1.json" + + tooling_for_test.create_mockserver_expectation( + CLUSTER, + TENANT, + URL_PATH, + "POST", + request_file=mockserver_request_file, + response_file=mockserver_response_file, + ) + maintenance_schedule = maintenance.generate_schedule( + maintenance.RecurrenceType.ONCE, + #TODO Remove need for these variables. ONCE does not use them + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00" + ) + maintenance_json = maintenance.generate_window_json( + "Test Payload", + "Generating Payload for Test", + "DETECT_PROBLEMS_AND_ALERT", + maintenance_schedule, + is_planned=True + ) + result = maintenance.create_window(CLUSTER, TENANT, maintenance_json) + self.assertEqual(result, tooling_for_test.expected_payload( + mockserver_response_file)) + + def test_create_weekly_no_scope(self): + """Testing Payloads with WEEKLY recurrance type""" + mockserver_request_file = f"{self.REQUEST_DIR}mock_create_weekly_1.json" + mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_1.json" + + tooling_for_test.create_mockserver_expectation( + CLUSTER, + TENANT, + URL_PATH, + "POST", + request_file=mockserver_request_file, + response_file=mockserver_response_file, + ) + maintenance_schedule = maintenance.generate_schedule( + maintenance.RecurrenceType.WEEKLY, + #TODO Remove need for these variables. ONCE does not use them + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00", + day=maintenance.DayOfWeek.SUNDAY + ) + maintenance_json = maintenance.generate_window_json( + "Test Payload", + "Generating Payload for Test", + "DETECT_PROBLEMS_AND_ALERT", + maintenance_schedule, + is_planned=True + ) + result = maintenance.create_window(CLUSTER, TENANT, maintenance_json) + self.assertEqual(result, tooling_for_test.expected_payload( + mockserver_response_file)) + + def test_create_monthly_no_scope(self): + """Testing Payloads with WEEKLY recurrance type""" + mockserver_request_file = f"{self.REQUEST_DIR}mock_create_weekly_1.json" + mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_1.json" + + tooling_for_test.create_mockserver_expectation( + CLUSTER, + TENANT, + URL_PATH, + "POST", + request_file=mockserver_request_file, + response_file=mockserver_response_file, + ) + maintenance_schedule = maintenance.generate_schedule( + maintenance.RecurrenceType.MONTHLY, + #TODO Remove need for these variables. ONCE does not use them + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00", + day=1 + ) + maintenance_json = maintenance.generate_window_json( + "Test Payload", + "Generating Payload for Test", + "DETECT_PROBLEMS_AND_ALERT", + maintenance_schedule, + is_planned=True + ) + result = maintenance.create_window(CLUSTER, TENANT, maintenance_json) + self.assertEqual(result, tooling_for_test.expected_payload( + mockserver_response_file)) + class TestEnumTypes(unittest.TestCase): def test_suppression_enum_str(self): suppression = maintenance.Suppression(maintenance.Suppression.DETECT_PROBLEMS_AND_ALERT) @@ -204,8 +302,6 @@ def test_filter_type_enum_repr(self): unittest.main() # CREATE TESTS LEFT: -# ONCE TEST -# WEEKLY TEST # MONTHLY TEST # Single Entity From a430284e7987cac693931b5dd7a1790453a94d18 Mon Sep 17 00:00:00 2001 From: Aaron Date: Sun, 9 Aug 2020 22:51:01 -0500 Subject: [PATCH 67/79] PAF-21 #Monthly now calls correct file --- tests/test_maintenance_windows.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py index b2a9bef..278afba 100644 --- a/tests/test_maintenance_windows.py +++ b/tests/test_maintenance_windows.py @@ -224,8 +224,8 @@ def test_create_weekly_no_scope(self): mockserver_response_file)) def test_create_monthly_no_scope(self): - """Testing Payloads with WEEKLY recurrance type""" - mockserver_request_file = f"{self.REQUEST_DIR}mock_create_weekly_1.json" + """Testing Payloads with MONTHLY recurrance type""" + mockserver_request_file = f"{self.REQUEST_DIR}mock_create_monthly_1.json" mockserver_response_file = f"{self.RESPONSE_DIR}mock_create_1.json" tooling_for_test.create_mockserver_expectation( From 6124202bb5808f891e1f1947a9039d11d5957d3e Mon Sep 17 00:00:00 2001 From: Aaron Date: Sun, 9 Aug 2020 23:10:47 -0500 Subject: [PATCH 68/79] PAF-21 #Tag Parsing Logic --- tests/test_maintenance_windows.py | 52 +++++++++++++++++++++++++++++-- 1 file changed, 50 insertions(+), 2 deletions(-) diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py index 278afba..e9c44ea 100644 --- a/tests/test_maintenance_windows.py +++ b/tests/test_maintenance_windows.py @@ -298,11 +298,59 @@ def test_filter_type_enum_repr(self): self.assertIsInstance(maintenance.FilterType.__repr__(suppression), str) +class TestTagParsing(unittest.TestCase): + def test_tag_variations(self): + """Testing various ways tags need to be parsed""" + # Test 1 - Key + # Test 2 - Key, Value + # Test 3 - Context, Key and Value + # Test 4 - Key with Colon, Value + # Test 5 - Key with Colon, Value Blank + # Test 6 - Context, Key with Colon and Value + # Test 7 - Context, Key + # Test 8 - Context, Key with square brackets + # Test 9 - Context, Key with colon and squares + # Test 10 - Empty Context with squares + + test_tag_list = [ + "Key", + "Key:Value", + "[Context]Key:Value", + "Key:withColon:Value", + "Key:withColon:", + "[Context]Key:withColon:Value", + "[Context]Key", + "[Context][KeywithSquares]", + "[Context][KeyWithSquares]:AndColons:Value", + "[][KeywithSquares]", + ] + + test_tag_expected_results = [ + {'context': 'CONTEXTLESS', 'key': 'Key'}, + {'context': 'CONTEXTLESS', 'key': 'Key:Value'}, + {'context': 'Context', 'key': 'Key:Value'}, + {'context': 'CONTEXTLESS', 'key': 'Key:withColon:Value'}, + {'context': 'CONTEXTLESS', 'key': 'Key:withColon:'}, + {'context': 'Context', 'key': 'Key:withColon:Value'}, + {'context': 'Context', 'key': 'Key'}, + {'context': 'Context', 'key': '[KeywithSquares]'}, + {'context': 'Context', 'key': '[KeyWithSquares]:AndColons:Value'}, + {'context': 'CONTEXTLESS', 'key': '[][KeywithSquares]'}, + ] + + all_tests_passed = True + for i in range(0, len(test_tag_list)): + processed_tag = test_tag_list[i] + self.assertTrue( + (result := maintenance.parse_tag(processed_tag)) == test_tag_expected_results[i], + f"Test {i}: {result} did not match {test_tag_expected_results[i]}") + if __name__ == '__main__': unittest.main() # CREATE TESTS LEFT: -# MONTHLY TEST +# INCORRECT DAY OF WEEK +# INCORRECT DAY OF MONTH # Single Entity # Multi Entity @@ -326,4 +374,4 @@ def test_filter_type_enum_repr(self): # GET ALL WINDOWS # GET DETAILS OF WINDOW # DELETE WINDOW -# UPDATE WINDOW +# UPDATE WINDOW \ No newline at end of file From d808ddba5f1c37908cbaa7d4100c5be160d8dc4c Mon Sep 17 00:00:00 2001 From: Aaron Date: Sun, 9 Aug 2020 23:39:58 -0500 Subject: [PATCH 69/79] PAF-21 #Adding some exception test cases --- tests/test_maintenance_windows.py | 87 +++++++++++++++++++++++++++---- 1 file changed, 77 insertions(+), 10 deletions(-) diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py index e9c44ea..b394275 100644 --- a/tests/test_maintenance_windows.py +++ b/tests/test_maintenance_windows.py @@ -256,7 +256,83 @@ def test_create_monthly_no_scope(self): self.assertEqual(result, tooling_for_test.expected_payload( mockserver_response_file)) -class TestEnumTypes(unittest.TestCase): +class TestMaintenanceExceptions(unittest.TestCase): + def test_invalid_recurrence_type(self): + """Testing exception thrown for invalid recurrence type""" + with self.assertRaises(ValueError) as context: + maintenance_schedule = maintenance.generate_schedule( + "HOURLY", + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00", + ) + self.assertTrue("Invalid Recurrence Type!" in str(context.exception)) + def test_invalid_day_of_week(self): + """Testing exception thrown for invalid dayOfWeek""" + with self.assertRaises(ValueError) as context: + maintenance_schedule = maintenance.generate_schedule( + maintenance.RecurrenceType.WEEKLY, + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00", + day=1 + ) + self.assertTrue("Invalid Weekly Day!" in str(context.exception)) + + def test_invalid_day_of_month_value(self): + """Testing exception thrown for invalid dayOfMonth for incorrect int""" + with self.assertRaises(ValueError) as context: + maintenance_schedule = maintenance.generate_schedule( + maintenance.RecurrenceType.MONTHLY, + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00", + day=32 + ) + self.assertTrue("Invalid Monthly Day!" in str(context.exception)) + + def test_invalid_day_of_month_type(self): + """Testing exception thrown for invalid dayOfMonth for a non-int""" + with self.assertRaises(TypeError) as context: + maintenance_schedule = maintenance.generate_schedule( + maintenance.RecurrenceType.MONTHLY, + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00", + day="Eleven" + ) + self.assertTrue("Invalid type for Day of Month! Int between 1-31 required" in str(context.exception)) + + def test_no_day_of_week_supplied(self): + """Weekly Maintenance Window with no dayOfWeek supplied""" + with self.assertRaises(Exception) as context: + maintenance_schedule = maintenance.generate_schedule( + maintenance.RecurrenceType.WEEKLY, + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00", + ) + self.assertTrue("Invalid Weekly Day!" in str(context.exception)) + + def test_no_day_of_week_supplied(self): + """Monthly Maintenance Window with no dayOfMonth supplied""" + with self.assertRaises(Exception) as context: + maintenance_schedule = maintenance.generate_schedule( + maintenance.RecurrenceType.MONTHLY, + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00", + ) + self.assertTrue("Invalid type for Day of Month!" in str(context.exception)) + + +class TestMaintenanceEnumTypes(unittest.TestCase): def test_suppression_enum_str(self): suppression = maintenance.Suppression(maintenance.Suppression.DETECT_PROBLEMS_AND_ALERT) self.assertIsInstance(maintenance.Suppression.__str__(suppression), str) @@ -349,9 +425,6 @@ def test_tag_variations(self): unittest.main() # CREATE TESTS LEFT: -# INCORRECT DAY OF WEEK -# INCORRECT DAY OF MONTH - # Single Entity # Multi Entity # Single Tag with Filter Type @@ -360,12 +433,6 @@ def test_tag_variations(self): # Multi Tags with Management Zone # EXCEPTION TEST CASES: -# INVALID RECURRENCE -# INVALID WEEK DAY -# INVALID MONTH DAY -# WEEK DAY NOT SUPPLIED -# MONTH DAY NOT SUPPLIED -# MONTHLY DAY OUT OF SCOPE (31 in 30 day month) # INVALID FILTER_TYPE # MANAGEMENT_ZONE WITHOUT TAG # FILTER_TYPE WITHOUT TAG From ea683ef99e7d41d7e9e9239334ba12a9896b707b Mon Sep 17 00:00:00 2001 From: Aaron Date: Sun, 9 Aug 2020 23:42:01 -0500 Subject: [PATCH 70/79] PAF-21 #Fixed a Day of Month test function name --- tests/test_maintenance_windows.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py index b394275..46ef714 100644 --- a/tests/test_maintenance_windows.py +++ b/tests/test_maintenance_windows.py @@ -319,7 +319,7 @@ def test_no_day_of_week_supplied(self): ) self.assertTrue("Invalid Weekly Day!" in str(context.exception)) - def test_no_day_of_week_supplied(self): + def test_no_day_of_month_supplied(self): """Monthly Maintenance Window with no dayOfMonth supplied""" with self.assertRaises(Exception) as context: maintenance_schedule = maintenance.generate_schedule( From e89511ccbbfcec5f623b0fcc8a2ebea2536e1d94 Mon Sep 17 00:00:00 2001 From: Aaron Date: Sun, 9 Aug 2020 23:45:09 -0500 Subject: [PATCH 71/79] PAF-21 #Forgot to add the adjustments to real file --- dynatrace/tenant/maintenance.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dynatrace/tenant/maintenance.py b/dynatrace/tenant/maintenance.py index fb58b88..4b0a9a1 100644 --- a/dynatrace/tenant/maintenance.py +++ b/dynatrace/tenant/maintenance.py @@ -319,6 +319,8 @@ def generate_schedule(recurrence_type, start_time, duration, range_start, range_ # Check Monthly Day if recurrence_type == "MONTHLY": + if not isinstance(day, int): + raise TypeError("Invalid type for Day of Month! Int between 1-31 required") if (1 <= int(day) <= 31): schedule['recurrence']['dayOfMonth'] = day else: From 0fcd654fc6ff911fd63545791c15d1cdb9229130 Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 10 Aug 2020 00:12:15 -0500 Subject: [PATCH 72/79] PAF-21 #Adding Exception cases and linting --- tests/test_maintenance_windows.py | 89 +++++++++++++++++++------------ 1 file changed, 55 insertions(+), 34 deletions(-) diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py index 46ef714..9d66a21 100644 --- a/tests/test_maintenance_windows.py +++ b/tests/test_maintenance_windows.py @@ -4,6 +4,7 @@ from tests import tooling_for_test from dynatrace.tenant import maintenance from dynatrace.requests.request_handler import TenantAPIs +from dynatrace.exceptions import InvalidDateFormatException CLUSTER = user_variables.FULL_SET["mockserver1"] TENANT = "tenant1" @@ -261,23 +262,23 @@ def test_invalid_recurrence_type(self): """Testing exception thrown for invalid recurrence type""" with self.assertRaises(ValueError) as context: maintenance_schedule = maintenance.generate_schedule( - "HOURLY", - "23:00", - 60, - "2020-01-01 00:00", - "2020-01-02 00:00", + "HOURLY", + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00", ) self.assertTrue("Invalid Recurrence Type!" in str(context.exception)) def test_invalid_day_of_week(self): """Testing exception thrown for invalid dayOfWeek""" with self.assertRaises(ValueError) as context: maintenance_schedule = maintenance.generate_schedule( - maintenance.RecurrenceType.WEEKLY, - "23:00", - 60, - "2020-01-01 00:00", - "2020-01-02 00:00", - day=1 + maintenance.RecurrenceType.WEEKLY, + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00", + day=1 ) self.assertTrue("Invalid Weekly Day!" in str(context.exception)) @@ -285,12 +286,12 @@ def test_invalid_day_of_month_value(self): """Testing exception thrown for invalid dayOfMonth for incorrect int""" with self.assertRaises(ValueError) as context: maintenance_schedule = maintenance.generate_schedule( - maintenance.RecurrenceType.MONTHLY, - "23:00", - 60, - "2020-01-01 00:00", - "2020-01-02 00:00", - day=32 + maintenance.RecurrenceType.MONTHLY, + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00", + day=32 ) self.assertTrue("Invalid Monthly Day!" in str(context.exception)) @@ -298,12 +299,12 @@ def test_invalid_day_of_month_type(self): """Testing exception thrown for invalid dayOfMonth for a non-int""" with self.assertRaises(TypeError) as context: maintenance_schedule = maintenance.generate_schedule( - maintenance.RecurrenceType.MONTHLY, - "23:00", - 60, - "2020-01-01 00:00", - "2020-01-02 00:00", - day="Eleven" + maintenance.RecurrenceType.MONTHLY, + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00", + day="Eleven" ) self.assertTrue("Invalid type for Day of Month! Int between 1-31 required" in str(context.exception)) @@ -311,11 +312,11 @@ def test_no_day_of_week_supplied(self): """Weekly Maintenance Window with no dayOfWeek supplied""" with self.assertRaises(Exception) as context: maintenance_schedule = maintenance.generate_schedule( - maintenance.RecurrenceType.WEEKLY, - "23:00", - 60, - "2020-01-01 00:00", - "2020-01-02 00:00", + maintenance.RecurrenceType.WEEKLY, + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00", ) self.assertTrue("Invalid Weekly Day!" in str(context.exception)) @@ -323,14 +324,35 @@ def test_no_day_of_month_supplied(self): """Monthly Maintenance Window with no dayOfMonth supplied""" with self.assertRaises(Exception) as context: maintenance_schedule = maintenance.generate_schedule( - maintenance.RecurrenceType.MONTHLY, - "23:00", - 60, - "2020-01-01 00:00", - "2020-01-02 00:00", + maintenance.RecurrenceType.MONTHLY, + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02 00:00", ) self.assertTrue("Invalid type for Day of Month!" in str(context.exception)) + def test_invalid_datetime_format(self): + """Test invalid datetime supplied to trigger ValueError""" + #TODO Fix Exceoption to have a message as first arg + with self.assertRaises(InvalidDateFormatException) as context: + maintenance_schedule = maintenance.generate_schedule( + maintenance.RecurrenceType.DAILY, + "23:00", + 60, + "2020-01-01 00:00", + "2020-01-02" + ) + self.assertTrue("%Y-%m-%d %H:%M" in (msg := str(context.exception)), msg) + def test_invalid_filter_type(self): + """Invalid Filter_Type""" + with self.assertRaises(ValueError) as context: + maintenance_scope = maintenance.generate_scope( + tags=[{'context': "CONTEXTLESS", 'key': "testing"}], + filter_type="INVALID_TYPE" + ) + self.assertTrue("Invalid Filter Type" in (msg := str(context.exception)), msg) + class TestMaintenanceEnumTypes(unittest.TestCase): def test_suppression_enum_str(self): @@ -433,7 +455,6 @@ def test_tag_variations(self): # Multi Tags with Management Zone # EXCEPTION TEST CASES: -# INVALID FILTER_TYPE # MANAGEMENT_ZONE WITHOUT TAG # FILTER_TYPE WITHOUT TAG From 887b42964f2616924d5a46b08bcd9ff5d6a7d7a5 Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 10 Aug 2020 00:36:17 -0500 Subject: [PATCH 73/79] PAF-21 #Desmelling. Fixing Suppress to allow enum --- dynatrace/tenant/maintenance.py | 3 +- tests/test_maintenance_windows.py | 125 +++++++++++++++--------------- 2 files changed, 65 insertions(+), 63 deletions(-) diff --git a/dynatrace/tenant/maintenance.py b/dynatrace/tenant/maintenance.py index 4b0a9a1..c1aef4e 100644 --- a/dynatrace/tenant/maintenance.py +++ b/dynatrace/tenant/maintenance.py @@ -229,7 +229,6 @@ def generate_scope(entities=None, tags=None, filter_type=None, management_zone_i if entities is None: entities = [] matches = [] - matches_payload = {} if match_any_tag and isinstance(tags, list) and len(tags) > 1: for tag in tags: @@ -261,7 +260,7 @@ def generate_window_json(name, description, suppression, schedule, scope=None, i window_json = { "name": name, "description": description, - "suppression": suppression, + "suppression": str(suppression), "schedule": schedule } window_json['type'] = "PLANNED" if is_planned else "UNPLANNED" diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py index 9d66a21..e723c38 100644 --- a/tests/test_maintenance_windows.py +++ b/tests/test_maintenance_windows.py @@ -9,6 +9,10 @@ CLUSTER = user_variables.FULL_SET["mockserver1"] TENANT = "tenant1" URL_PATH = TenantAPIs.MAINTENANCE_WINDOWS +TEST_RANGE_START = "2020-01-01 00:00" +TEST_RANGE_END = "2020-01-02 00:00" +TEST_PAYLOAD_TITLE = "Test Payload" +TEST_PAYLOAD_DESC = "Generating Payload for Test" class TestMaintenanceWindowCreate(unittest.TestCase): @@ -31,16 +35,16 @@ def test_create_daily_no_scope(self): response_file=mockserver_response_file, ) maintenance_schedule = maintenance.generate_schedule( - "DAILY", + maintenance.RecurrenceType.DAILY, "23:00", 60, - "2020-01-01 00:00", - "2020-01-02 00:00" + TEST_RANGE_START, + TEST_RANGE_END ) maintenance_json = maintenance.generate_window_json( - "Test Payload", - "Generating Payload for Test", - "DETECT_PROBLEMS_AND_ALERT", + TEST_PAYLOAD_TITLE, + TEST_PAYLOAD_DESC, + maintenance.Suppression.DETECT_PROBLEMS_AND_ALERT, maintenance_schedule, is_planned=True ) @@ -61,18 +65,18 @@ def test_create_daily_single_tag(self): response_file=mockserver_response_file, ) maintenance_schedule = maintenance.generate_schedule( - "DAILY", + maintenance.RecurrenceType.DAILY, "23:00", 60, - "2020-01-01 00:00", - "2020-01-02 00:00" + TEST_RANGE_START, + TEST_RANGE_END ) maintenance_scope = maintenance.generate_scope( tags=[{'context': "CONTEXTLESS", 'key': "testing"}]) maintenance_json = maintenance.generate_window_json( - "Test Payload", - "Generating Payload for Test", - "DETECT_PROBLEMS_AND_ALERT", + TEST_PAYLOAD_TITLE, + TEST_PAYLOAD_DESC, + maintenance.Suppression.DETECT_PROBLEMS_AND_ALERT, maintenance_schedule, scope=maintenance_scope, is_planned=True @@ -95,11 +99,11 @@ def test_create_daily_tags_and(self): response_file=mockserver_response_file, ) maintenance_schedule = maintenance.generate_schedule( - "DAILY", + maintenance.RecurrenceType.DAILY, "23:00", 60, - "2020-01-01 00:00", - "2020-01-02 00:00" + TEST_RANGE_START, + TEST_RANGE_END ) maintenance_scope = maintenance.generate_scope( tags=[ @@ -109,9 +113,9 @@ def test_create_daily_tags_and(self): match_any_tag=False ) maintenance_json = maintenance.generate_window_json( - "Test Payload", - "Generating Payload for Test", - "DETECT_PROBLEMS_AND_ALERT", + TEST_PAYLOAD_TITLE, + TEST_PAYLOAD_DESC, + maintenance.Suppression.DETECT_PROBLEMS_AND_ALERT, maintenance_schedule, scope=maintenance_scope, is_planned=True @@ -134,11 +138,11 @@ def test_create_daily_tags_or(self): response_file=mockserver_response_file, ) maintenance_schedule = maintenance.generate_schedule( - "DAILY", + maintenance.RecurrenceType.DAILY, "23:00", 60, - "2020-01-01 00:00", - "2020-01-02 00:00" + TEST_RANGE_START, + TEST_RANGE_END ) maintenance_scope = maintenance.generate_scope( tags=[ @@ -148,9 +152,9 @@ def test_create_daily_tags_or(self): match_any_tag=True ) maintenance_json = maintenance.generate_window_json( - "Test Payload", - "Generating Payload for Test", - "DETECT_PROBLEMS_AND_ALERT", + TEST_PAYLOAD_TITLE, + TEST_PAYLOAD_DESC, + maintenance.Suppression.DETECT_PROBLEMS_AND_ALERT, maintenance_schedule, scope=maintenance_scope, is_planned=True @@ -177,13 +181,13 @@ def test_create_once_no_scope(self): #TODO Remove need for these variables. ONCE does not use them "23:00", 60, - "2020-01-01 00:00", - "2020-01-02 00:00" + TEST_RANGE_START, + TEST_RANGE_END ) maintenance_json = maintenance.generate_window_json( - "Test Payload", - "Generating Payload for Test", - "DETECT_PROBLEMS_AND_ALERT", + TEST_PAYLOAD_TITLE, + TEST_PAYLOAD_DESC, + maintenance.Suppression.DETECT_PROBLEMS_AND_ALERT, maintenance_schedule, is_planned=True ) @@ -209,14 +213,14 @@ def test_create_weekly_no_scope(self): #TODO Remove need for these variables. ONCE does not use them "23:00", 60, - "2020-01-01 00:00", - "2020-01-02 00:00", + TEST_RANGE_START, + TEST_RANGE_END, day=maintenance.DayOfWeek.SUNDAY ) maintenance_json = maintenance.generate_window_json( - "Test Payload", - "Generating Payload for Test", - "DETECT_PROBLEMS_AND_ALERT", + TEST_PAYLOAD_TITLE, + TEST_PAYLOAD_DESC, + maintenance.Suppression.DETECT_PROBLEMS_AND_ALERT, maintenance_schedule, is_planned=True ) @@ -242,14 +246,14 @@ def test_create_monthly_no_scope(self): #TODO Remove need for these variables. ONCE does not use them "23:00", 60, - "2020-01-01 00:00", - "2020-01-02 00:00", + TEST_RANGE_START, + TEST_RANGE_END, day=1 ) maintenance_json = maintenance.generate_window_json( - "Test Payload", - "Generating Payload for Test", - "DETECT_PROBLEMS_AND_ALERT", + TEST_PAYLOAD_TITLE, + TEST_PAYLOAD_DESC, + maintenance.Suppression.DETECT_PROBLEMS_AND_ALERT, maintenance_schedule, is_planned=True ) @@ -261,23 +265,23 @@ class TestMaintenanceExceptions(unittest.TestCase): def test_invalid_recurrence_type(self): """Testing exception thrown for invalid recurrence type""" with self.assertRaises(ValueError) as context: - maintenance_schedule = maintenance.generate_schedule( + maintenance.generate_schedule( "HOURLY", "23:00", 60, - "2020-01-01 00:00", - "2020-01-02 00:00", + TEST_RANGE_START, + TEST_RANGE_END, ) self.assertTrue("Invalid Recurrence Type!" in str(context.exception)) def test_invalid_day_of_week(self): """Testing exception thrown for invalid dayOfWeek""" with self.assertRaises(ValueError) as context: - maintenance_schedule = maintenance.generate_schedule( + maintenance.generate_schedule( maintenance.RecurrenceType.WEEKLY, "23:00", 60, - "2020-01-01 00:00", - "2020-01-02 00:00", + TEST_RANGE_START, + TEST_RANGE_END, day=1 ) self.assertTrue("Invalid Weekly Day!" in str(context.exception)) @@ -285,12 +289,12 @@ def test_invalid_day_of_week(self): def test_invalid_day_of_month_value(self): """Testing exception thrown for invalid dayOfMonth for incorrect int""" with self.assertRaises(ValueError) as context: - maintenance_schedule = maintenance.generate_schedule( + maintenance.generate_schedule( maintenance.RecurrenceType.MONTHLY, "23:00", 60, - "2020-01-01 00:00", - "2020-01-02 00:00", + TEST_RANGE_START, + TEST_RANGE_END, day=32 ) self.assertTrue("Invalid Monthly Day!" in str(context.exception)) @@ -298,12 +302,12 @@ def test_invalid_day_of_month_value(self): def test_invalid_day_of_month_type(self): """Testing exception thrown for invalid dayOfMonth for a non-int""" with self.assertRaises(TypeError) as context: - maintenance_schedule = maintenance.generate_schedule( + maintenance.generate_schedule( maintenance.RecurrenceType.MONTHLY, "23:00", 60, - "2020-01-01 00:00", - "2020-01-02 00:00", + TEST_RANGE_START, + TEST_RANGE_END, day="Eleven" ) self.assertTrue("Invalid type for Day of Month! Int between 1-31 required" in str(context.exception)) @@ -311,24 +315,24 @@ def test_invalid_day_of_month_type(self): def test_no_day_of_week_supplied(self): """Weekly Maintenance Window with no dayOfWeek supplied""" with self.assertRaises(Exception) as context: - maintenance_schedule = maintenance.generate_schedule( + maintenance.generate_schedule( maintenance.RecurrenceType.WEEKLY, "23:00", 60, - "2020-01-01 00:00", - "2020-01-02 00:00", + TEST_RANGE_START, + TEST_RANGE_END, ) self.assertTrue("Invalid Weekly Day!" in str(context.exception)) def test_no_day_of_month_supplied(self): """Monthly Maintenance Window with no dayOfMonth supplied""" with self.assertRaises(Exception) as context: - maintenance_schedule = maintenance.generate_schedule( + maintenance.generate_schedule( maintenance.RecurrenceType.MONTHLY, "23:00", 60, - "2020-01-01 00:00", - "2020-01-02 00:00", + TEST_RANGE_START, + TEST_RANGE_END, ) self.assertTrue("Invalid type for Day of Month!" in str(context.exception)) @@ -336,18 +340,18 @@ def test_invalid_datetime_format(self): """Test invalid datetime supplied to trigger ValueError""" #TODO Fix Exceoption to have a message as first arg with self.assertRaises(InvalidDateFormatException) as context: - maintenance_schedule = maintenance.generate_schedule( + maintenance.generate_schedule( maintenance.RecurrenceType.DAILY, "23:00", 60, - "2020-01-01 00:00", + TEST_RANGE_START, "2020-01-02" ) self.assertTrue("%Y-%m-%d %H:%M" in (msg := str(context.exception)), msg) def test_invalid_filter_type(self): """Invalid Filter_Type""" with self.assertRaises(ValueError) as context: - maintenance_scope = maintenance.generate_scope( + maintenance.generate_scope( tags=[{'context': "CONTEXTLESS", 'key': "testing"}], filter_type="INVALID_TYPE" ) @@ -436,7 +440,6 @@ def test_tag_variations(self): {'context': 'CONTEXTLESS', 'key': '[][KeywithSquares]'}, ] - all_tests_passed = True for i in range(0, len(test_tag_list)): processed_tag = test_tag_list[i] self.assertTrue( From 92b05d236ed80c96e44f6ba4289007585d29816c Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 10 Aug 2020 07:49:34 -0500 Subject: [PATCH 74/79] PAF-21 #Fixed date assert to look at message --- tests/test_maintenance_windows.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py index e723c38..d791a3a 100644 --- a/tests/test_maintenance_windows.py +++ b/tests/test_maintenance_windows.py @@ -347,7 +347,7 @@ def test_invalid_datetime_format(self): TEST_RANGE_START, "2020-01-02" ) - self.assertTrue("%Y-%m-%d %H:%M" in (msg := str(context.exception)), msg) + self.assertTrue("Incorrect Date " in context.exception.message, context.exception.message) def test_invalid_filter_type(self): """Invalid Filter_Type""" with self.assertRaises(ValueError) as context: From 27c3770aa1de70808ed40844ef06afa79f94af03 Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 10 Aug 2020 07:51:35 -0500 Subject: [PATCH 75/79] Adjusting exception to use message --- dynatrace/exceptions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dynatrace/exceptions.py b/dynatrace/exceptions.py index b92a074..3af3679 100644 --- a/dynatrace/exceptions.py +++ b/dynatrace/exceptions.py @@ -9,8 +9,8 @@ def __init__ (self, message): class InvalidDateFormatException(ValueError): def __init__(self, required_format): - self.required_format = required_format - print("Incorrect Date for following entry: %s", required_format, file=stderr) + self.message = f"Incorrect Date for following entry: {required_format}" + class InvalidScopeException(ValueError): def __init__(self, required_format): From 646066b32b2d360b0c875d2164a4558267367ec5 Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 10 Aug 2020 09:42:04 -0500 Subject: [PATCH 76/79] Fixed All Test Cases minus Maintenance --- tests/test_host_groups.py | 1 - tests/test_process_groups.py | 2 +- tests/test_services.py | 2 +- tests/test_topology_hosts.py | 40 ++++++++++++++++++------------------ tests/tooling_for_test.py | 7 ++++--- 5 files changed, 26 insertions(+), 26 deletions(-) diff --git a/tests/test_host_groups.py b/tests/test_host_groups.py index 48fda87..2ce311a 100644 --- a/tests/test_host_groups.py +++ b/tests/test_host_groups.py @@ -16,7 +16,6 @@ def test_get_host_groups_tenantwide(self): parameters = { "relativeTime": ["day"], "includeDetails": ["true"], - "Api-Token": [CLUSTER["api_token"][TENANT]], } mockserver_response_file = f"{self.RESPONSE_DIR}mock_get_general_1.json" tooling_for_test.create_mockserver_expectation( diff --git a/tests/test_process_groups.py b/tests/test_process_groups.py index bf08efe..a6fe496 100644 --- a/tests/test_process_groups.py +++ b/tests/test_process_groups.py @@ -61,7 +61,7 @@ def test_get_pg_count(self): result = process_groups.get_process_group_count_tenantwide(cluster, tenant) - self.assertEqual(result, 4) + self.assertEqual(result, 3) class TestPGTags(unittest.TestCase): diff --git a/tests/test_services.py b/tests/test_services.py index 345eb6d..501fc78 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -8,7 +8,7 @@ cluster = FULL_SET.get('mockserver1') tenant = 'tenant1' -url = f"{TenantAPIs.V1_TOPOLOGY}/services" +url = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/services" request_dir = "tests/mockserver_payloads/requests/services" response_dir = "tests/mockserver_payloads/responses/services" diff --git a/tests/test_topology_hosts.py b/tests/test_topology_hosts.py index 5aef7c8..dc43c6f 100644 --- a/tests/test_topology_hosts.py +++ b/tests/test_topology_hosts.py @@ -7,8 +7,8 @@ from dynatrace.requests.request_handler import TenantAPIs from dynatrace.tenant.topology import hosts -cluster = FULL_SET.get('mock_cluster') -tenant = 'mock_tenant' +CLUSTER = FULL_SET["mockserver1"] +TENANT = "tenant1" url = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/hosts" request_dir = "tests/mockserver_payloads/requests/hosts" response_dir = "tests/mockserver_payloads/responses/hosts" @@ -23,14 +23,14 @@ def test_get_all_hosts(self): response_file = f"{response_dir}/get_all.json" testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, + cluster=CLUSTER, + tenant=TENANT, url_path=url, request_type="GET", response_file=response_file ) - result = hosts.get_hosts_tenantwide(cluster, tenant) + result = hosts.get_hosts_tenantwide(CLUSTER, TENANT) self.assertEqual(result, testtools.expected_payload(response_file)) def test_get_single_host(self): @@ -40,14 +40,14 @@ def test_get_single_host(self): response_file = f"{response_dir}/get_single.json" testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, + cluster=CLUSTER, + tenant=TENANT, url_path=f"{url}/{host_id}", request_type="GET", response_file=response_file ) - result = hosts.get_host(cluster, tenant, host_id) + result = hosts.get_host(CLUSTER, TENANT, host_id) self.assertEqual(result, testtools.expected_payload(response_file)) def test_get_host_count(self): @@ -55,8 +55,8 @@ def test_get_host_count(self): response_file = f"{response_dir}/get_all.json" testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, + cluster=CLUSTER, + tenant=TENANT, url_path=url, request_type="GET", response_file=response_file, @@ -64,7 +64,7 @@ def test_get_host_count(self): includeDetails=['False']) ) - result = hosts.get_host_count_tenantwide(cluster, tenant) + result = hosts.get_host_count_tenantwide(CLUSTER, TENANT) self.assertEqual(result, 3) def test_get_host_units(self): @@ -72,14 +72,14 @@ def test_get_host_units(self): response_file = f"{response_dir}/get_all.json" testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, + cluster=CLUSTER, + tenant=TENANT, url_path=url, request_type="GET", response_file=response_file ) - result = hosts.get_host_units_tenantwide(cluster, tenant) + result = hosts.get_host_units_tenantwide(CLUSTER, TENANT) self.assertEqual(result, 4) hosts.set_host_properties @@ -96,15 +96,15 @@ def test_add_tags(self): tags = ["demo", "example"] testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, + cluster=CLUSTER, + tenant=TENANT, request_type="POST", url_path=f"{url}/{host_id}", request_file=request_file, response_code=201 ) - result = hosts.add_host_tags(cluster, tenant, host_id, tags) + result = hosts.add_host_tags(CLUSTER, TENANT, host_id, tags) self.assertEqual(result, 201) def test_delete_tags(self): @@ -114,14 +114,14 @@ def test_delete_tags(self): tag = "demo" testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, + cluster=CLUSTER, + tenant=TENANT, url_path=f"{url}/{host_id}/tags/{tag}", request_type="DELETE", response_code=204 ) - result = hosts.delete_host_tag(cluster, tenant, host_id, tag) + result = hosts.delete_host_tag(CLUSTER, TENANT, host_id, tag) self.assertEqual(204, result.status_code) diff --git a/tests/tooling_for_test.py b/tests/tooling_for_test.py index ccc7fb7..27e14fb 100644 --- a/tests/tooling_for_test.py +++ b/tests/tooling_for_test.py @@ -11,8 +11,8 @@ def create_mockserver_expectation(cluster, tenant, url_path, request_type, **kwa requests.packages.urllib3.disable_warnings() expectation = { "httpRequest": { - "queryStringParameters": { - "Api-Token": [cluster.get('api_token').get(tenant)] + "headers": { + "Authorization": [f"Api-Token {cluster.get('api_token').get(tenant)}"], }, "path": url_path, "method": request_type @@ -27,10 +27,11 @@ def create_mockserver_expectation(cluster, tenant, url_path, request_type, **kwa "id": "OneOff", } + logging.debug(f"URL PATH: {url_path}") logging.debug(f"KWARGS {kwargs}") # Paramaters should always at least have Api-Token if 'parameters' in kwargs: - expectation["httpRequest"]["queryStringParameters"].update(kwargs['parameters']) + expectation["httpRequest"]["queryStringParameters"] = kwargs['parameters'] if "request_file" in kwargs: with open(kwargs['request_file']) as f: From 578f0c9f583210319c2e21e6cde30575a6f79c74 Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 10 Aug 2020 09:59:57 -0500 Subject: [PATCH 77/79] PAF-41 #Matching PEP8 Style for constants --- tests/test_process_groups.py | 52 ++++++++++++++++++------------------ tests/test_processes.py | 29 ++++++++++---------- tests/test_services.py | 50 +++++++++++++++++----------------- tests/test_topology_hosts.py | 28 +++++++++---------- 4 files changed, 79 insertions(+), 80 deletions(-) diff --git a/tests/test_process_groups.py b/tests/test_process_groups.py index a6fe496..5f103b2 100644 --- a/tests/test_process_groups.py +++ b/tests/test_process_groups.py @@ -6,11 +6,11 @@ from dynatrace.requests.request_handler import TenantAPIs from dynatrace.tenant.topology import process_groups -cluster = FULL_SET.get('mockserver1') -tenant = 'tenant1' -url = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/process-groups" -request_dir = "tests/mockserver_payloads/requests/processes" -response_dir = "tests/mockserver_payloads/responses/processes" +CLUSTER = FULL_SET.get('mockserver1') +TENANT = 'tenant1' +URL_PATH = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/process-groups" +REQUEST_DIR = "tests/mockserver_payloads/requests/processes" +RESPONSE_DIR = "tests/mockserver_payloads/responses/processes" class TestGetPGs(unittest.TestCase): @@ -18,49 +18,49 @@ class TestGetPGs(unittest.TestCase): def test_get_all_pgs(self): """Test fetching all PGs""" - response_file = f"{response_dir}/get_all_pgs.json" + response_file = f"{RESPONSE_DIR}/get_all_pgs.json" testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, - url_path=url, + cluster=CLUSTER, + tenant=TENANT, + url_path=URL_PATH, request_type="GET", response_file=response_file ) - result = process_groups.get_process_groups_tenantwide(cluster, tenant) + result = process_groups.get_process_groups_tenantwide(CLUSTER, TENANT) self.assertEqual(result, testtools.expected_payload(response_file)) def test_get_single_pg(self): """Test fetching single PG""" - response_file = f"{response_dir}/get_one_pg.json" + response_file = f"{RESPONSE_DIR}/get_one_pg.json" pg_id = "PROCESS_GROUP-ABC123DEF456GHI7" testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, - url_path=f"{url}/{pg_id}", + cluster=CLUSTER, + tenant=TENANT, + url_path=f"{URL_PATH}/{pg_id}", request_type="GET", response_file=response_file ) - result = process_groups.get_process_group(cluster, tenant, pg_id) + result = process_groups.get_process_group(CLUSTER, TENANT, pg_id) self.assertEqual(result, testtools.expected_payload(response_file)) def test_get_pg_count(self): """Test getting the PG count tenantwide.""" - response_file = f"{response_dir}/get_all_pgs.json" + response_file = f"{RESPONSE_DIR}/get_all_pgs.json" testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, - url_path=url, + cluster=CLUSTER, + tenant=TENANT, + url_path=URL_PATH, request_type="GET", response_file=response_file ) - result = process_groups.get_process_group_count_tenantwide(cluster, - tenant) + result = process_groups.get_process_group_count_tenantwide(CLUSTER, + TENANT) self.assertEqual(result, 3) @@ -70,19 +70,19 @@ class TestPGTags(unittest.TestCase): def test_add_pg_tags(self): """Test adding two tags to the PG.""" pg_id = "PROCESS_GROUP-859E1549052CD876" - request_file = f"{request_dir}/tags.json" + request_file = f"{REQUEST_DIR}/tags.json" tags = ["demo", "example"] testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, + cluster=CLUSTER, + tenant=TENANT, request_type="POST", - url_path=f"{url}/{pg_id}", + url_path=f"{URL_PATH}/{pg_id}", request_file=request_file, response_code=201 ) - result = process_groups.add_process_group_tags(cluster, tenant, + result = process_groups.add_process_group_tags(CLUSTER, TENANT, pg_id, tags) self.assertEqual(result, 201) diff --git a/tests/test_processes.py b/tests/test_processes.py index 2377c99..3fc29b1 100644 --- a/tests/test_processes.py +++ b/tests/test_processes.py @@ -6,11 +6,10 @@ from dynatrace.requests.request_handler import TenantAPIs from dynatrace.tenant.topology import process -cluster = FULL_SET.get('mockserver1') -tenant = 'tenant1' -url = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/processes" -request_dir = "tests/mockserver_payloads/requests/processes" -response_dir = "tests/mockserver_payloads/responses/processes" +CLUSTER = FULL_SET.get('mockserver1') +TENANT = 'tenant1' +URL_PATH = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/processes" +RESPONSE_DIR = "tests/mockserver_payloads/responses/processes" class TestGetProcesses(unittest.TestCase): @@ -18,33 +17,33 @@ class TestGetProcesses(unittest.TestCase): def test_get_all_processes(self): """Test getting all processes tenantwide.""" - response_file = f"{response_dir}/get_all_pgis.json" + response_file = f"{RESPONSE_DIR}/get_all_pgis.json" testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, - url_path=url, + cluster=CLUSTER, + tenant=TENANT, + url_path=URL_PATH, request_type="GET", response_file=response_file ) - result = process.get_processes_tenantwide(cluster, tenant) + result = process.get_processes_tenantwide(CLUSTER, TENANT) self.assertEqual(result, testtools.expected_payload(response_file)) def test_get_single_process(self): """Tests getting one specific process.""" - response_file = f"{response_dir}/get_one_pgi.json" + response_file = f"{RESPONSE_DIR}/get_one_pgi.json" process_id = "PROCESS_GROUP_INSTANCE-ABC123DEF456GHI7" testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, - url_path=f"{url}/{process_id}", + cluster=CLUSTER, + tenant=TENANT, + url_path=f"{URL_PATH}/{process_id}", request_type="GET", response_file=response_file ) - result = process.get_process(cluster, tenant, process_id) + result = process.get_process(CLUSTER, TENANT, process_id) self.assertEqual(result, testtools.expected_payload(response_file)) diff --git a/tests/test_services.py b/tests/test_services.py index 501fc78..68e02ac 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -6,11 +6,11 @@ from dynatrace.requests.request_handler import TenantAPIs from dynatrace.tenant.topology import services -cluster = FULL_SET.get('mockserver1') -tenant = 'tenant1' -url = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/services" -request_dir = "tests/mockserver_payloads/requests/services" -response_dir = "tests/mockserver_payloads/responses/services" +CLUSTER = FULL_SET.get('mockserver1') +TENANT = 'tenant1' +URL_PATH = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/services" +REQUEST_DIR = "tests/mockserver_payloads/requests/services" +RESPONSE_DIR = "tests/mockserver_payloads/responses/services" class TestGetServices(unittest.TestCase): @@ -18,48 +18,48 @@ class TestGetServices(unittest.TestCase): def test_get_all_svc(self): """Test fetching all services""" - response_file = f"{response_dir}/get_all.json" + response_file = f"{RESPONSE_DIR}/get_all.json" testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, - url_path=url, + cluster=CLUSTER, + tenant=TENANT, + url_path=URL_PATH, request_type="GET", response_file=response_file ) - result = services.get_services_tenantwide(cluster, tenant) + result = services.get_services_tenantwide(CLUSTER, TENANT) self.assertEqual(result, testtools.expected_payload(response_file)) def test_get_single_svc(self): """Test fetching single service""" - response_file = f"{response_dir}/get_one.json" + response_file = f"{RESPONSE_DIR}/get_one.json" svc_id = "SERVICE-ABC123DEF456GHI7" testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, - url_path=f"{url}/{svc_id}", + cluster=CLUSTER, + tenant=TENANT, + url_path=f"{URL_PATH}/{svc_id}", request_type="GET", response_file=response_file ) - result = services.get_service(cluster, tenant, svc_id) + result = services.get_service(CLUSTER, TENANT, svc_id) self.assertEqual(result, testtools.expected_payload(response_file)) def test_get_svc_count(self): """Test getting the service count tenantwide.""" - response_file = f"{response_dir}/get_all.json" + response_file = f"{RESPONSE_DIR}/get_all.json" testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, - url_path=url, + cluster=CLUSTER, + tenant=TENANT, + url_path=URL_PATH, request_type="GET", response_file=response_file ) - result = services.get_service_count_tenantwide(cluster, tenant) + result = services.get_service_count_tenantwide(CLUSTER, TENANT) self.assertEqual(result, 3) @@ -69,19 +69,19 @@ class TestServiceTags(unittest.TestCase): def test_add_svc_tags(self): """Test adding two tags to the service.""" svc_id = "SERVICE-ABC123DEF456GHI7" - request_file = f"{request_dir}/tags.json" + request_file = f"{REQUEST_DIR}/tags.json" tags = ["demo", "example"] testtools.create_mockserver_expectation( - cluster=cluster, - tenant=tenant, + cluster=CLUSTER, + tenant=TENANT, request_type="POST", - url_path=f"{url}/{svc_id}", + url_path=f"{URL_PATH}/{svc_id}", request_file=request_file, response_code=201 ) - result = services.add_service_tags(cluster, tenant, svc_id, tags) + result = services.add_service_tags(CLUSTER, TENANT, svc_id, tags) self.assertEqual(result, 201) diff --git a/tests/test_topology_hosts.py b/tests/test_topology_hosts.py index dc43c6f..ac99dc7 100644 --- a/tests/test_topology_hosts.py +++ b/tests/test_topology_hosts.py @@ -9,9 +9,9 @@ CLUSTER = FULL_SET["mockserver1"] TENANT = "tenant1" -url = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/hosts" -request_dir = "tests/mockserver_payloads/requests/hosts" -response_dir = "tests/mockserver_payloads/responses/hosts" +URL_PATH = f"{TenantAPIs.V1_TOPOLOGY}/infrastructure/hosts" +REQUEST_DIR = "tests/mockserver_payloads/requests/hosts" +RESPONSE_DIR = "tests/mockserver_payloads/responses/hosts" class TestGetHosts(unittest.TestCase): @@ -20,12 +20,12 @@ class TestGetHosts(unittest.TestCase): def test_get_all_hosts(self): """Test fetching all hosts""" - response_file = f"{response_dir}/get_all.json" + response_file = f"{RESPONSE_DIR}/get_all.json" testtools.create_mockserver_expectation( cluster=CLUSTER, tenant=TENANT, - url_path=url, + url_path=URL_PATH, request_type="GET", response_file=response_file ) @@ -37,12 +37,12 @@ def test_get_single_host(self): """Test fetching a specific host""" host_id = "HOST-9F74450267BAAE20" - response_file = f"{response_dir}/get_single.json" + response_file = f"{RESPONSE_DIR}/get_single.json" testtools.create_mockserver_expectation( cluster=CLUSTER, tenant=TENANT, - url_path=f"{url}/{host_id}", + url_path=f"{URL_PATH}/{host_id}", request_type="GET", response_file=response_file ) @@ -53,11 +53,11 @@ def test_get_single_host(self): def test_get_host_count(self): """Test getting the count of hosts in a tenant.""" - response_file = f"{response_dir}/get_all.json" + response_file = f"{RESPONSE_DIR}/get_all.json" testtools.create_mockserver_expectation( cluster=CLUSTER, tenant=TENANT, - url_path=url, + url_path=URL_PATH, request_type="GET", response_file=response_file, parameters=dict(relativeTime=['day'], @@ -70,11 +70,11 @@ def test_get_host_count(self): def test_get_host_units(self): """Tests getting the consumed host units in a tenant.""" - response_file = f"{response_dir}/get_all.json" + response_file = f"{RESPONSE_DIR}/get_all.json" testtools.create_mockserver_expectation( cluster=CLUSTER, tenant=TENANT, - url_path=url, + url_path=URL_PATH, request_type="GET", response_file=response_file ) @@ -92,14 +92,14 @@ def test_add_tags(self): """Test adding two tags to a specific host.""" host_id = "HOST-ABC123DEF456GHIJ" - request_file = f"{request_dir}/tags.json" + request_file = f"{REQUEST_DIR}/tags.json" tags = ["demo", "example"] testtools.create_mockserver_expectation( cluster=CLUSTER, tenant=TENANT, request_type="POST", - url_path=f"{url}/{host_id}", + url_path=f"{URL_PATH}/{host_id}", request_file=request_file, response_code=201 ) @@ -116,7 +116,7 @@ def test_delete_tags(self): testtools.create_mockserver_expectation( cluster=CLUSTER, tenant=TENANT, - url_path=f"{url}/{host_id}/tags/{tag}", + url_path=f"{URL_PATH}/{host_id}/tags/{tag}", request_type="DELETE", response_code=204 ) From 9532a123973f026a0996b574bd3ce62f689c5d32 Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 10 Aug 2020 10:01:07 -0500 Subject: [PATCH 78/79] PAF-41 #Renaming for consistancy --- tests/{test_process_groups.py => test_topology_process_groups.py} | 0 tests/{test_processes.py => test_topology_processes.py} | 0 tests/{test_services.py => test_topology_services.py} | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename tests/{test_process_groups.py => test_topology_process_groups.py} (100%) rename tests/{test_processes.py => test_topology_processes.py} (100%) rename tests/{test_services.py => test_topology_services.py} (100%) diff --git a/tests/test_process_groups.py b/tests/test_topology_process_groups.py similarity index 100% rename from tests/test_process_groups.py rename to tests/test_topology_process_groups.py diff --git a/tests/test_processes.py b/tests/test_topology_processes.py similarity index 100% rename from tests/test_processes.py rename to tests/test_topology_processes.py diff --git a/tests/test_services.py b/tests/test_topology_services.py similarity index 100% rename from tests/test_services.py rename to tests/test_topology_services.py From 570d1facf04320ea594294638d0349c2a94828f7 Mon Sep 17 00:00:00 2001 From: Aaron Date: Mon, 10 Aug 2020 10:29:25 -0500 Subject: [PATCH 79/79] PAF-21 #Forcing Endpoint to String --- tests/test_maintenance_windows.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_maintenance_windows.py b/tests/test_maintenance_windows.py index d791a3a..e76709a 100644 --- a/tests/test_maintenance_windows.py +++ b/tests/test_maintenance_windows.py @@ -8,7 +8,7 @@ CLUSTER = user_variables.FULL_SET["mockserver1"] TENANT = "tenant1" -URL_PATH = TenantAPIs.MAINTENANCE_WINDOWS +URL_PATH = str(TenantAPIs.MAINTENANCE_WINDOWS) TEST_RANGE_START = "2020-01-01 00:00" TEST_RANGE_END = "2020-01-02 00:00" TEST_PAYLOAD_TITLE = "Test Payload"