From 4ef375d8c95272041d553c6549f7ed88c21889c1 Mon Sep 17 00:00:00 2001 From: Murilo Pereira Date: Mon, 11 Mar 2019 20:49:57 +0100 Subject: [PATCH 01/10] Upgrade Elastic base-tech to 6.6.1. --- frameworks/elastic/versions.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frameworks/elastic/versions.sh b/frameworks/elastic/versions.sh index 3fb25edccb0..d1d9a6b05e9 100755 --- a/frameworks/elastic/versions.sh +++ b/frameworks/elastic/versions.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -export TEMPLATE_ELASTIC_VERSION="6.3.2" +export TEMPLATE_ELASTIC_VERSION="6.6.1" export TEMPLATE_ELASTIC_STATSD_VERSION="${TEMPLATE_ELASTIC_VERSION}.0" export TEMPLATE_SUPPORT_DIAGNOSTICS_VERSION="6.4.4" From 407e1392d4c754aa53a2a31cb0e5ad838b3130fd Mon Sep 17 00:00:00 2001 From: Murilo Pereira Date: Mon, 25 Mar 2019 22:44:28 +0100 Subject: [PATCH 02/10] Run TLS tests with a trial license enabled. And healthcheck credentials configured. --- frameworks/elastic/tests/config.py | 25 +++++++++++++++---- frameworks/elastic/tests/test_tls.py | 36 ++++++++++++++++++++++------ 2 files changed, 49 insertions(+), 12 deletions(-) diff --git a/frameworks/elastic/tests/config.py b/frameworks/elastic/tests/config.py index b5ae7e45630..c2700a65aba 100644 --- a/frameworks/elastic/tests/config.py +++ b/frameworks/elastic/tests/config.py @@ -104,7 +104,9 @@ def check_elasticsearch_index_health( def check_custom_elasticsearch_cluster_setting( service_name=SERVICE_NAME, setting_path=None, expected_value=None ): - settings = _curl_query(service_name, "GET", "_cluster/settings?include_defaults=true")["defaults"] + settings = _curl_query(service_name, "GET", "_cluster/settings?include_defaults=true")[ + "defaults" + ] if not settings: return False actual_value = get_in(setting_path, settings) @@ -289,15 +291,26 @@ def verify_xpack_license( @retrying.retry( wait_fixed=1000, stop_max_delay=5 * 1000, retry_on_result=lambda return_value: not return_value ) -def setup_passwords(service_name=SERVICE_NAME, task_name="master-0-node"): +def setup_passwords(service_name=SERVICE_NAME, task_name="master-0-node", https=None): + if https: + master_0_node_dns = sdk_networks.get_endpoint(PACKAGE_NAME, service_name, "master-http")[ + "dns" + ][0] + url = "--url https://{}".format(master_0_node_dns) + else: + url = "" + cmd = "\n".join( [ "set -x", "export JAVA_HOME=$(ls -d ${MESOS_SANDBOX}/jdk*/jre/)", "ELASTICSEARCH_PATH=$(ls -d ${MESOS_SANDBOX}/elasticsearch-*/)", - "${ELASTICSEARCH_PATH}/bin/elasticsearch-setup-passwords auto --batch --verbose", + "${{ELASTICSEARCH_PATH}}/bin/elasticsearch-setup-passwords auto --batch --verbose {}".format( + url + ), ] ) + full_cmd = "bash -c '{}'".format(cmd) _, stdout, _ = sdk_cmd.service_task_exec(service_name, task_name, full_cmd) @@ -328,8 +341,10 @@ def explore_graph( ) -def start_trial_license(service_name=SERVICE_NAME): - return _curl_query(service_name, "POST", "_xpack/license/start_trial?acknowledge=true") +def start_trial_license(service_name=SERVICE_NAME, https=None): + return _curl_query( + service_name, "POST", "_xpack/license/start_trial?acknowledge=true", https=https + ) def get_elasticsearch_indices_stats(index_name, service_name=SERVICE_NAME): diff --git a/frameworks/elastic/tests/test_tls.py b/frameworks/elastic/tests/test_tls.py index 9aeaf592d77..c9340b75c6d 100644 --- a/frameworks/elastic/tests/test_tls.py +++ b/frameworks/elastic/tests/test_tls.py @@ -35,27 +35,49 @@ def service_account(configure_security): @pytest.fixture(scope="module") def elastic_service(service_account): + package_name = config.PACKAGE_NAME + service_name = config.SERVICE_NAME + expected_running_tasks = config.DEFAULT_TASK_COUNT + service_options = { "service": { - "name": config.SERVICE_NAME, + "name": service_name, "service_account": service_account["name"], "service_account_secret": service_account["secret"], "security": {"transport_encryption": {"enabled": True}}, }, - "elasticsearch": {"xpack_enabled": True}, + "elasticsearch": {"xpack_security_enabled": True}, } - sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME) + sdk_install.uninstall(package_name, service_name) + try: sdk_install.install( - config.PACKAGE_NAME, - service_name=config.SERVICE_NAME, - expected_running_tasks=config.DEFAULT_TASK_COUNT, + package_name, + service_name=service_name, + expected_running_tasks=expected_running_tasks, additional_options=service_options, timeout_seconds=30 * 60, ) - yield {**service_options, **{"package_name": config.PACKAGE_NAME}} + # Start trial license. + config.start_trial_license(service_name, https=True) + + # Set up passwords. Basic HTTP credentials will have to be used in HTTP requests to + # Elasticsearch from now on. + passwords = config.setup_passwords(service_name, https=True) + + # Set up healthcheck basic HTTP credentials. + sdk_service.update_configuration( + package_name, + service_name, + { + "elasticsearch": {"health_user_password": passwords["elastic"]}, + }, + expected_running_tasks, + ) + + yield {**service_options, **{"package_name": package_name}} finally: sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME) From 354109591e88045f53cc0dcee65447a435f3cd3f Mon Sep 17 00:00:00 2001 From: Murilo Pereira Date: Mon, 25 Mar 2019 23:21:44 +0100 Subject: [PATCH 03/10] Add missing import. --- frameworks/elastic/tests/test_tls.py | 1 + 1 file changed, 1 insertion(+) diff --git a/frameworks/elastic/tests/test_tls.py b/frameworks/elastic/tests/test_tls.py index de1f5528bce..655fc6dfb3c 100644 --- a/frameworks/elastic/tests/test_tls.py +++ b/frameworks/elastic/tests/test_tls.py @@ -6,6 +6,7 @@ import sdk_install import sdk_hosts import sdk_recovery +import sdk_service import sdk_utils from security import transport_encryption From bb89d4289c5ccadfb09d91225a449dda8d644ede Mon Sep 17 00:00:00 2001 From: Murilo Pereira Date: Tue, 26 Mar 2019 22:00:10 +0100 Subject: [PATCH 04/10] Black formatting, assign constants with long names to variables. This commit introduces no functional changes. It is an aesthetic refactoring. --- frameworks/elastic/tests/config.py | 53 ++--- frameworks/elastic/tests/test_sanity.py | 303 +++++++++++------------- frameworks/elastic/tests/test_tls.py | 22 +- 3 files changed, 164 insertions(+), 214 deletions(-) diff --git a/frameworks/elastic/tests/config.py b/frameworks/elastic/tests/config.py index a5440bb0eb4..7f022b63740 100644 --- a/frameworks/elastic/tests/config.py +++ b/frameworks/elastic/tests/config.py @@ -111,7 +111,9 @@ def check_custom_elasticsearch_cluster_setting( setting_path: Optional[str] = None, expected_value: Optional[str] = None, ) -> bool: - settings = _curl_query(service_name, "GET", "_cluster/settings?include_defaults=true")["defaults"] + settings = _curl_query(service_name, "GET", "_cluster/settings?include_defaults=true")[ + "defaults" + ] if not settings: return False actual_value = get_in(setting_path, settings) @@ -125,8 +127,7 @@ def check_custom_elasticsearch_cluster_setting( wait_fixed=1000, stop_max_delay=DEFAULT_TIMEOUT * 1000, retry_on_result=lambda res: not res ) def wait_for_expected_nodes_to_exist( - service_name: str = SERVICE_NAME, - task_count: int = DEFAULT_TASK_COUNT, + service_name: str = SERVICE_NAME, task_count: int = DEFAULT_TASK_COUNT ) -> bool: result = _curl_query(service_name, "GET", "_cluster/health") if not result or "number_of_nodes" not in result: @@ -140,10 +141,7 @@ def wait_for_expected_nodes_to_exist( @retrying.retry( wait_fixed=1000, stop_max_delay=DEFAULT_TIMEOUT * 1000, retry_on_result=lambda res: not res ) -def check_kibana_plugin_installed( - plugin_name: str, - service_name: str = SERVICE_NAME, -) -> bool: +def check_kibana_plugin_installed(plugin_name: str, service_name: str = SERVICE_NAME) -> bool: task_sandbox = sdk_cmd.get_task_sandbox_path(service_name) # Environment variables aren't available on DC/OS 1.9 so we manually inject MESOS_SANDBOX (and # can't use ELASTIC_VERSION). @@ -163,7 +161,7 @@ def check_kibana_plugin_installed( wait_fixed=1000, stop_max_delay=DEFAULT_TIMEOUT * 1000, retry_on_result=lambda res: not res ) def check_elasticsearch_plugin_installed( - plugin_name: str, service_name: str = SERVICE_NAME, + plugin_name: str, service_name: str = SERVICE_NAME ) -> bool: result = _get_hosts_with_plugin(service_name, plugin_name) return result is not None and len(result) == DEFAULT_TASK_COUNT @@ -173,8 +171,7 @@ def check_elasticsearch_plugin_installed( wait_fixed=1000, stop_max_delay=DEFAULT_TIMEOUT * 1000, retry_on_result=lambda res: not res ) def check_elasticsearch_plugin_uninstalled( - plugin_name: str, - service_name: str = SERVICE_NAME, + plugin_name: str, service_name: str = SERVICE_NAME ) -> bool: result = _get_hosts_with_plugin(service_name, plugin_name) return result is not None and result == [] @@ -236,9 +233,14 @@ def verify_commercial_api_status( http_user: Optional[str] = None, http_password: Optional[str] = None, ) -> bool: - return bool(verify_graph_explore_endpoint( - is_expected_to_be_enabled, service_name, http_user=http_user, http_password=http_password - )) + return bool( + verify_graph_explore_endpoint( + is_expected_to_be_enabled, + service_name, + http_user=http_user, + http_password=http_password, + ) + ) # On Elastic 6.x, the "Graph Explore API" is available when the Elasticsearch cluster is configured @@ -330,9 +332,7 @@ def verify_xpack_license( wait_fixed=1000, stop_max_delay=5 * 1000, retry_on_result=lambda return_value: not return_value ) def setup_passwords( - service_name: str = SERVICE_NAME, - task_name: str = "master-0-node", - https: bool = False, + service_name: str = SERVICE_NAME, task_name: str = "master-0-node", https: bool = False ) -> Union[bool, Dict[str, str]]: if https: master_0_node_dns = sdk_networks.get_endpoint(PACKAGE_NAME, service_name, "master-http")[ @@ -390,10 +390,7 @@ def explore_graph( return result -def start_trial_license( - service_name: str = SERVICE_NAME, - https: bool = False, -) -> Dict[str, Any]: +def start_trial_license(service_name: str = SERVICE_NAME, https: bool = False) -> Dict[str, Any]: result = _curl_query( service_name, "POST", "_xpack/license/start_trial?acknowledge=true", https=https ) @@ -402,8 +399,7 @@ def start_trial_license( def get_elasticsearch_indices_stats( - index_name: str, - service_name: str = SERVICE_NAME, + index_name: str, service_name: str = SERVICE_NAME ) -> Dict[str, Any]: result = _curl_query(service_name, "GET", "{}/_stats".format(index_name)) assert isinstance(result, dict) @@ -563,9 +559,7 @@ def build_errmsg(msg: str) -> str: # TODO(mpereira): it is safe to remove this test after the 6.x release. def test_xpack_enabled_update( - service_name: str, - from_xpack_enabled: bool, - to_xpack_enabled: bool, + service_name: str, from_xpack_enabled: bool, to_xpack_enabled: bool ) -> None: sdk_upgrade.test_upgrade( PACKAGE_NAME, @@ -584,9 +578,7 @@ def test_xpack_enabled_update( # TODO(mpereira): change this to xpack_security_enabled to xpack_security_enabled after the 6.x # release. def test_update_from_xpack_enabled_to_xpack_security_enabled( - service_name: str, - xpack_enabled: bool, - xpack_security_enabled: bool, + service_name: str, xpack_enabled: bool, xpack_security_enabled: bool ) -> None: assert not ( xpack_enabled is True and xpack_security_enabled is True @@ -607,10 +599,7 @@ def test_update_from_xpack_enabled_to_xpack_security_enabled( def test_upgrade_from_xpack_enabled( - package_name: str, - service_name: str, - options: Dict[str, Any], - expected_task_count: int, + package_name: str, service_name: str, options: Dict[str, Any], expected_task_count: int ) -> None: # This test needs to run some code in between the Universe version installation and the upgrade # to the 'stub-universe' version, so it cannot use `sdk_upgrade.test_upgrade`. diff --git a/frameworks/elastic/tests/test_sanity.py b/frameworks/elastic/tests/test_sanity.py index d458eca9a30..888ac67831e 100644 --- a/frameworks/elastic/tests/test_sanity.py +++ b/frameworks/elastic/tests/test_sanity.py @@ -18,72 +18,73 @@ log = logging.getLogger(__name__) -foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) +package_name = config.PACKAGE_NAME +service_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) current_expected_task_count = config.DEFAULT_TASK_COUNT +kibana_package_name = config.KIBANA_PACKAGE_NAME +kibana_service_name = config.KIBANA_SERVICE_NAME +kibana_timeout = config.KIBANA_DEFAULT_TIMEOUT +index_name = config.DEFAULT_INDEX_NAME +index_type = config.DEFAULT_INDEX_TYPE +index = config.DEFAULT_SETTINGS_MAPPINGS @pytest.fixture(scope="module", autouse=True) def configure_package(configure_security: None) -> Iterator[None]: try: log.info("Ensure elasticsearch and kibana are uninstalled...") - sdk_install.uninstall(config.KIBANA_PACKAGE_NAME, config.KIBANA_PACKAGE_NAME) - sdk_install.uninstall(config.PACKAGE_NAME, foldered_name) + sdk_install.uninstall(kibana_package_name, kibana_package_name) + sdk_install.uninstall(package_name, service_name) sdk_upgrade.test_upgrade( - config.PACKAGE_NAME, - foldered_name, + package_name, + service_name, current_expected_task_count, - additional_options={"service": {"name": foldered_name}}, + additional_options={"service": {"name": service_name}}, ) yield # let the test session execute finally: log.info("Clean up elasticsearch and kibana...") - sdk_install.uninstall(config.KIBANA_PACKAGE_NAME, config.KIBANA_PACKAGE_NAME) - sdk_install.uninstall(config.PACKAGE_NAME, foldered_name) + sdk_install.uninstall(kibana_package_name, kibana_package_name) + sdk_install.uninstall(package_name, service_name) @pytest.fixture(autouse=True) def pre_test_setup() -> None: - sdk_tasks.check_running(foldered_name, current_expected_task_count) + sdk_tasks.check_running(service_name, current_expected_task_count) config.wait_for_expected_nodes_to_exist( - service_name=foldered_name, task_count=current_expected_task_count + service_name=service_name, task_count=current_expected_task_count ) @pytest.fixture def default_populated_index() -> None: - config.delete_index(config.DEFAULT_INDEX_NAME, service_name=foldered_name) - config.create_index( - config.DEFAULT_INDEX_NAME, config.DEFAULT_SETTINGS_MAPPINGS, service_name=foldered_name - ) + config.delete_index(index_name, service_name=service_name) + config.create_index(index_name, index, service_name=service_name) config.create_document( - config.DEFAULT_INDEX_NAME, - config.DEFAULT_INDEX_TYPE, - 1, - {"name": "Loren", "role": "developer"}, - service_name=foldered_name, + index_name, index_type, 1, {"name": "Loren", "role": "developer"}, service_name=service_name ) @pytest.mark.recovery @pytest.mark.sanity def test_pod_replace_then_immediate_config_update() -> None: - sdk_cmd.svc_cli(config.PACKAGE_NAME, foldered_name, "pod replace data-0") + sdk_cmd.svc_cli(package_name, service_name, "pod replace data-0") plugins = "analysis-phonetic" sdk_service.update_configuration( - config.PACKAGE_NAME, - foldered_name, + package_name, + service_name, {"service": {"update_strategy": "parallel"}, "elasticsearch": {"plugins": plugins}}, current_expected_task_count, ) # Ensure all nodes, especially data-0, get launched with the updated config. - config.check_elasticsearch_plugin_installed(plugins, service_name=foldered_name) - sdk_plan.wait_for_completed_deployment(foldered_name) - sdk_plan.wait_for_completed_recovery(foldered_name) + config.check_elasticsearch_plugin_installed(plugins, service_name=service_name) + sdk_plan.wait_for_completed_deployment(service_name) + sdk_plan.wait_for_completed_recovery(service_name) @pytest.mark.sanity @@ -91,30 +92,24 @@ def test_endpoints() -> None: # Check that we can reach the scheduler via admin router, and that returned endpoints are # sanitized. for endpoint in config.ENDPOINT_TYPES: - endpoints = sdk_networks.get_endpoint(config.PACKAGE_NAME, foldered_name, endpoint) + endpoints = sdk_networks.get_endpoint(package_name, service_name, endpoint) host = endpoint.split("-")[0] # 'coordinator-http' => 'coordinator' - assert endpoints["dns"][0].startswith( - sdk_hosts.autoip_host(foldered_name, host + "-0-node") - ) - assert endpoints["vip"].startswith(sdk_hosts.vip_host(foldered_name, host)) + assert endpoints["dns"][0].startswith(sdk_hosts.autoip_host(service_name, host + "-0-node")) + assert endpoints["vip"].startswith(sdk_hosts.vip_host(service_name, host)) - sdk_plan.wait_for_completed_deployment(foldered_name) - sdk_plan.wait_for_completed_recovery(foldered_name) + sdk_plan.wait_for_completed_deployment(service_name) + sdk_plan.wait_for_completed_recovery(service_name) @pytest.mark.sanity def test_indexing(default_populated_index: None) -> None: - indices_stats = config.get_elasticsearch_indices_stats( - config.DEFAULT_INDEX_NAME, service_name=foldered_name - ) + indices_stats = config.get_elasticsearch_indices_stats(index_name, service_name=service_name) assert indices_stats["_all"]["primaries"]["docs"]["count"] == 1 - doc = config.get_document( - config.DEFAULT_INDEX_NAME, config.DEFAULT_INDEX_TYPE, 1, service_name=foldered_name - ) + doc = config.get_document(index_name, index_type, 1, service_name=service_name) assert doc["_source"]["name"] == "Loren" - sdk_plan.wait_for_completed_deployment(foldered_name) - sdk_plan.wait_for_completed_recovery(foldered_name) + sdk_plan.wait_for_completed_deployment(service_name) + sdk_plan.wait_for_completed_recovery(service_name) @pytest.mark.sanity @@ -135,16 +130,16 @@ def expected_metrics_exist(emitted_metrics: List[str]) -> bool: return sdk_metrics.check_metrics_presence(metric_names, expected_metrics) sdk_metrics.wait_for_service_metrics( - config.PACKAGE_NAME, - foldered_name, + package_name, + service_name, "data-0", "data-0-node", config.DEFAULT_TIMEOUT, expected_metrics_exist, ) - sdk_plan.wait_for_completed_deployment(foldered_name) - sdk_plan.wait_for_completed_recovery(foldered_name) + sdk_plan.wait_for_completed_deployment(service_name) + sdk_plan.wait_for_completed_recovery(service_name) @pytest.mark.sanity @@ -156,15 +151,17 @@ def test_custom_yaml_base64() -> None: # allocation: # node_initial_primaries_recoveries: 3 # script.allowed_contexts: ["search", "update"] - base64_elasticsearch_yml = "".join([ - "Y2x1c3RlcjoKICByb3V0aW5nOgogICAgYWxsb2NhdGlvbjoKICAgICAgbm9kZV9pbml0aWFsX3By", - "aW1hcmllc19yZWNvdmVyaWVzOiAzCnNjcmlwdC5hbGxvd2VkX2NvbnRleHRzOiBbInNlYXJjaCIs", - "ICJ1cGRhdGUiXQ==", - ]) + base64_elasticsearch_yml = "".join( + [ + "Y2x1c3RlcjoKICByb3V0aW5nOgogICAgYWxsb2NhdGlvbjoKICAgICAgbm9kZV9pbml0aWFsX3By", + "aW1hcmllc19yZWNvdmVyaWVzOiAzCnNjcmlwdC5hbGxvd2VkX2NvbnRleHRzOiBbInNlYXJjaCIs", + "ICJ1cGRhdGUiXQ==", + ] + ) sdk_service.update_configuration( - config.PACKAGE_NAME, - foldered_name, + package_name, + service_name, {"elasticsearch": {"custom_elasticsearch_yml": base64_elasticsearch_yml}}, current_expected_task_count, ) @@ -175,9 +172,7 @@ def test_custom_yaml_base64() -> None: # Here we want to make sure that the end-result for the multiple YAML/Mustache compilation steps # results in a valid elasticsearch.yml file, with the correct setting value. config.check_custom_elasticsearch_cluster_setting( - foldered_name, - ["cluster", "routing", "allocation", "node_initial_primaries_recoveries"], - "3", + service_name, ["cluster", "routing", "allocation", "node_initial_primaries_recoveries"], "3" ) # 2. `script.allowed_contexts` has an "array of strings" value defined in the custom YAML. Here @@ -185,55 +180,55 @@ def test_custom_yaml_base64() -> None: # results in a valid elasticsearch.yml file, but with a trickier compilation case due to the # setting value being an array of strings. config.check_custom_elasticsearch_cluster_setting( - foldered_name, - ["script", "allowed_contexts"], - ["search", "update"], + service_name, ["script", "allowed_contexts"], ["search", "update"] ) @pytest.mark.sanity @pytest.mark.timeout(60 * 60) def test_security_toggle_with_kibana(default_populated_index: None) -> None: + http_user = config.DEFAULT_ELASTICSEARCH_USER + # Verify that commercial APIs are disabled by default in Elasticsearch. - config.verify_commercial_api_status(False, service_name=foldered_name) + config.verify_commercial_api_status(False, service_name=service_name) # Write some data with security disabled, enabled security, and afterwards verify that we can # still read what we wrote. document_security_disabled_id = 1 document_security_disabled_fields = {"name": "Elasticsearch", "role": "search engine"} config.create_document( - config.DEFAULT_INDEX_NAME, - config.DEFAULT_INDEX_TYPE, + index_name, + index_type, document_security_disabled_id, document_security_disabled_fields, - service_name=foldered_name, + service_name=service_name, ) # Verify that basic license is enabled by default. - config.verify_xpack_license("basic", service_name=foldered_name) + config.verify_xpack_license("basic", service_name=service_name) # Install Kibana. - elasticsearch_url = "http://" + sdk_hosts.vip_host(foldered_name, "coordinator", 9200) + elasticsearch_url = "http://" + sdk_hosts.vip_host(service_name, "coordinator", 9200) sdk_install.install( - config.KIBANA_PACKAGE_NAME, - config.KIBANA_PACKAGE_NAME, + kibana_package_name, + kibana_service_name, 0, {"kibana": {"elasticsearch_url": elasticsearch_url}}, - timeout_seconds=config.KIBANA_DEFAULT_TIMEOUT, + timeout_seconds=kibana_timeout, wait_for_deployment=False, insert_strict_options=False, ) # Verify that it works. - config.check_kibana_adminrouter_integration("service/{}/".format(config.KIBANA_PACKAGE_NAME)) + config.check_kibana_adminrouter_integration("service/{}/".format(kibana_package_name)) # Uninstall it. - sdk_install.uninstall(config.KIBANA_PACKAGE_NAME, config.KIBANA_PACKAGE_NAME) + sdk_install.uninstall(kibana_package_name, kibana_package_name) # Enable Elasticsearch security. sdk_service.update_configuration( - config.PACKAGE_NAME, - foldered_name, + package_name, + service_name, { "elasticsearch": {"xpack_security_enabled": True}, "service": {"update_strategy": "parallel"}, @@ -242,27 +237,21 @@ def test_security_toggle_with_kibana(default_populated_index: None) -> None: ) # This should still be disabled. - config.verify_commercial_api_status(False, service_name=foldered_name) + config.verify_commercial_api_status(False, service_name=service_name) # Start trial license. - config.start_trial_license(service_name=foldered_name) + config.start_trial_license(service_name=service_name) # Set up passwords. Basic HTTP credentials will have to be used in HTTP requests to # Elasticsearch from now on. - passwords = config.setup_passwords(foldered_name) + passwords = config.setup_passwords(service_name) # Verify trial license is working. config.verify_xpack_license( - "trial", - service_name=foldered_name, - http_user=config.DEFAULT_ELASTICSEARCH_USER, - http_password=passwords["elastic"], + "trial", service_name=service_name, http_user=http_user, http_password=passwords["elastic"] ) config.verify_commercial_api_status( - True, - service_name=foldered_name, - http_user=config.DEFAULT_ELASTICSEARCH_USER, - http_password=passwords["elastic"], + True, service_name=service_name, http_user=http_user, http_password=passwords["elastic"] ) # Write some data with security enabled, disable security, and afterwards verify that we can @@ -270,19 +259,19 @@ def test_security_toggle_with_kibana(default_populated_index: None) -> None: document_security_enabled_id = 2 document_security_enabled_fields = {"name": "X-Pack", "role": "commercial plugin"} config.create_document( - config.DEFAULT_INDEX_NAME, - config.DEFAULT_INDEX_TYPE, + index_name, + index_type, document_security_enabled_id, document_security_enabled_fields, - service_name=foldered_name, - http_user=config.DEFAULT_ELASTICSEARCH_USER, + service_name=service_name, + http_user=http_user, http_password=passwords["elastic"], ) # Install Kibana with security enabled. sdk_install.install( - config.KIBANA_PACKAGE_NAME, - config.KIBANA_PACKAGE_NAME, + kibana_package_name, + kibana_package_name, 0, { "kibana": { @@ -292,24 +281,22 @@ def test_security_toggle_with_kibana(default_populated_index: None) -> None: "password": passwords["kibana"], } }, - timeout_seconds=config.KIBANA_DEFAULT_TIMEOUT, + timeout_seconds=kibana_timeout, wait_for_deployment=False, insert_strict_options=False, ) # Verify that it works. Notice that with security enabled, one has to access # /service/kibana/login instead of /service/kibana. - config.check_kibana_adminrouter_integration( - "service/{}/login".format(config.KIBANA_PACKAGE_NAME) - ) + config.check_kibana_adminrouter_integration("service/{}/login".format(kibana_package_name)) # Uninstall it. - sdk_install.uninstall(config.KIBANA_PACKAGE_NAME, config.KIBANA_PACKAGE_NAME) + sdk_install.uninstall(kibana_package_name, kibana_package_name) # Disable Elastic security. sdk_service.update_configuration( - config.PACKAGE_NAME, - foldered_name, + package_name, + service_name, { "elasticsearch": {"xpack_security_enabled": False}, "service": {"update_strategy": "parallel"}, @@ -319,10 +306,7 @@ def test_security_toggle_with_kibana(default_populated_index: None) -> None: # Verify we can read what was written before toggling security, without basic HTTP credentials. document_security_disabled = config.get_document( - config.DEFAULT_INDEX_NAME, - config.DEFAULT_INDEX_TYPE, - document_security_disabled_id, - service_name=foldered_name, + index_name, index_type, document_security_disabled_id, service_name=service_name ) assert ( document_security_disabled["_source"]["name"] == document_security_disabled_fields["name"] @@ -330,17 +314,14 @@ def test_security_toggle_with_kibana(default_populated_index: None) -> None: # Verify we can read what was written when security was enabled, without basic HTTP credentials. document_security_enabled = config.get_document( - config.DEFAULT_INDEX_NAME, - config.DEFAULT_INDEX_TYPE, - document_security_enabled_id, - service_name=foldered_name, + index_name, index_type, document_security_enabled_id, service_name=service_name ) assert document_security_enabled["_source"]["name"] == document_security_enabled_fields["name"] # Set update_strategy back to serial. sdk_service.update_configuration( - config.PACKAGE_NAME, - foldered_name, + package_name, + service_name, {"service": {"update_strategy": "serial"}}, current_expected_task_count, ) @@ -349,42 +330,36 @@ def test_security_toggle_with_kibana(default_populated_index: None) -> None: @pytest.mark.recovery @pytest.mark.sanity def test_losing_and_regaining_index_health(default_populated_index: None) -> None: - config.check_elasticsearch_index_health( - config.DEFAULT_INDEX_NAME, "green", service_name=foldered_name - ) + config.check_elasticsearch_index_health(index_name, "green", service_name=service_name) sdk_cmd.kill_task_with_pattern( "data__.*Elasticsearch", "nobody", - agent_host=sdk_tasks.get_service_tasks(foldered_name, "data-0-node")[0].host, - ) - config.check_elasticsearch_index_health( - config.DEFAULT_INDEX_NAME, "yellow", service_name=foldered_name - ) - config.check_elasticsearch_index_health( - config.DEFAULT_INDEX_NAME, "green", service_name=foldered_name + agent_host=sdk_tasks.get_service_tasks(service_name, "data-0-node")[0].host, ) + config.check_elasticsearch_index_health(index_name, "yellow", service_name=service_name) + config.check_elasticsearch_index_health(index_name, "green", service_name=service_name) - sdk_plan.wait_for_completed_deployment(foldered_name) - sdk_plan.wait_for_completed_recovery(foldered_name) + sdk_plan.wait_for_completed_deployment(service_name) + sdk_plan.wait_for_completed_recovery(service_name) @pytest.mark.recovery @pytest.mark.sanity def test_master_reelection() -> None: - initial_master = config.get_elasticsearch_master(service_name=foldered_name) + initial_master = config.get_elasticsearch_master(service_name=service_name) sdk_cmd.kill_task_with_pattern( "master__.*Elasticsearch", "nobody", - agent_host=sdk_tasks.get_service_tasks(foldered_name, initial_master)[0].host, + agent_host=sdk_tasks.get_service_tasks(service_name, initial_master)[0].host, ) - sdk_plan.wait_for_in_progress_recovery(foldered_name) - sdk_plan.wait_for_completed_recovery(foldered_name) - config.wait_for_expected_nodes_to_exist(service_name=foldered_name) - new_master = config.get_elasticsearch_master(service_name=foldered_name) + sdk_plan.wait_for_in_progress_recovery(service_name) + sdk_plan.wait_for_completed_recovery(service_name) + config.wait_for_expected_nodes_to_exist(service_name=service_name) + new_master = config.get_elasticsearch_master(service_name=service_name) assert new_master.startswith("master") and new_master != initial_master - sdk_plan.wait_for_completed_deployment(foldered_name) - sdk_plan.wait_for_completed_recovery(foldered_name) + sdk_plan.wait_for_completed_deployment(service_name) + sdk_plan.wait_for_completed_recovery(service_name) @pytest.mark.recovery @@ -393,25 +368,25 @@ def test_master_node_replace() -> None: # Ideally, the pod will get placed on a different agent. This test will verify that the # remaining two masters find the replaced master at its new IP address. This requires a # reasonably low TTL for Java DNS lookups. - sdk_cmd.svc_cli(config.PACKAGE_NAME, foldered_name, "pod replace master-0") - sdk_plan.wait_for_in_progress_recovery(foldered_name) - sdk_plan.wait_for_completed_recovery(foldered_name) + sdk_cmd.svc_cli(package_name, service_name, "pod replace master-0") + sdk_plan.wait_for_in_progress_recovery(service_name) + sdk_plan.wait_for_completed_recovery(service_name) @pytest.mark.recovery @pytest.mark.sanity def test_data_node_replace() -> None: - sdk_cmd.svc_cli(config.PACKAGE_NAME, foldered_name, "pod replace data-0") - sdk_plan.wait_for_in_progress_recovery(foldered_name) - sdk_plan.wait_for_completed_recovery(foldered_name) + sdk_cmd.svc_cli(package_name, service_name, "pod replace data-0") + sdk_plan.wait_for_in_progress_recovery(service_name) + sdk_plan.wait_for_completed_recovery(service_name) @pytest.mark.recovery @pytest.mark.sanity def test_coordinator_node_replace() -> None: - sdk_cmd.svc_cli(config.PACKAGE_NAME, foldered_name, "pod replace coordinator-0") - sdk_plan.wait_for_in_progress_recovery(foldered_name) - sdk_plan.wait_for_completed_recovery(foldered_name) + sdk_cmd.svc_cli(package_name, service_name, "pod replace coordinator-0") + sdk_plan.wait_for_in_progress_recovery(service_name) + sdk_plan.wait_for_completed_recovery(service_name) @pytest.mark.recovery @@ -421,34 +396,29 @@ def test_plugin_install_and_uninstall(default_populated_index: None) -> None: plugins = "analysis-icu" sdk_service.update_configuration( - config.PACKAGE_NAME, - foldered_name, + package_name, + service_name, {"elasticsearch": {"plugins": plugins}}, current_expected_task_count, ) - config.check_elasticsearch_plugin_installed(plugins, service_name=foldered_name) + config.check_elasticsearch_plugin_installed(plugins, service_name=service_name) sdk_service.update_configuration( - config.PACKAGE_NAME, - foldered_name, - {"elasticsearch": {"plugins": ""}}, - current_expected_task_count, + package_name, service_name, {"elasticsearch": {"plugins": ""}}, current_expected_task_count ) - config.check_elasticsearch_plugin_uninstalled(plugins, service_name=foldered_name) + config.check_elasticsearch_plugin_uninstalled(plugins, service_name=service_name) @pytest.mark.recovery @pytest.mark.sanity def test_add_ingest_and_coordinator_nodes_does_not_restart_master_or_data_nodes() -> None: - initial_master_task_ids = sdk_tasks.get_task_ids(foldered_name, "master") - initial_data_task_ids = sdk_tasks.get_task_ids(foldered_name, "data") + initial_master_task_ids = sdk_tasks.get_task_ids(service_name, "master") + initial_data_task_ids = sdk_tasks.get_task_ids(service_name, "data") # Get service configuration. - _, svc_config, _ = sdk_cmd.svc_cli( - config.PACKAGE_NAME, foldered_name, "describe", parse_json=True - ) + _, svc_config, _ = sdk_cmd.svc_cli(package_name, service_name, "describe", parse_json=True) ingest_nodes_count = get_in(["ingest_nodes", "count"], svc_config) coordinator_nodes_count = get_in(["coordinator_nodes", "count"], svc_config) @@ -456,8 +426,8 @@ def test_add_ingest_and_coordinator_nodes_does_not_restart_master_or_data_nodes( global current_expected_task_count sdk_service.update_configuration( - config.PACKAGE_NAME, - foldered_name, + package_name, + service_name, { "ingest_nodes": {"count": ingest_nodes_count + 1}, "coordinator_nodes": {"count": coordinator_nodes_count + 1}, @@ -471,24 +441,22 @@ def test_add_ingest_and_coordinator_nodes_does_not_restart_master_or_data_nodes( # Should be running 2 tasks more. current_expected_task_count += 2 - sdk_tasks.check_running(foldered_name, current_expected_task_count) + sdk_tasks.check_running(service_name, current_expected_task_count) # Master nodes should not restart. - sdk_tasks.check_tasks_not_updated(foldered_name, "master", initial_master_task_ids) + sdk_tasks.check_tasks_not_updated(service_name, "master", initial_master_task_ids) # Data nodes should not restart. - sdk_tasks.check_tasks_not_updated(foldered_name, "data", initial_data_task_ids) + sdk_tasks.check_tasks_not_updated(service_name, "data", initial_data_task_ids) @pytest.mark.recovery @pytest.mark.sanity def test_adding_data_node_only_restarts_masters() -> None: - initial_master_task_ids = sdk_tasks.get_task_ids(foldered_name, "master") - initial_data_task_ids = sdk_tasks.get_task_ids(foldered_name, "data") - initial_coordinator_task_ids = sdk_tasks.get_task_ids(foldered_name, "coordinator") + initial_master_task_ids = sdk_tasks.get_task_ids(service_name, "master") + initial_data_task_ids = sdk_tasks.get_task_ids(service_name, "data") + initial_coordinator_task_ids = sdk_tasks.get_task_ids(service_name, "coordinator") # Get service configuration. - _, svc_config, _ = sdk_cmd.svc_cli( - config.PACKAGE_NAME, foldered_name, "describe", parse_json=True - ) + _, svc_config, _ = sdk_cmd.svc_cli(package_name, service_name, "describe", parse_json=True) data_nodes_count = get_in(["data_nodes", "count"], svc_config) @@ -496,8 +464,8 @@ def test_adding_data_node_only_restarts_masters() -> None: # Increase the data nodes count by 1. sdk_service.update_configuration( - config.PACKAGE_NAME, - foldered_name, + package_name, + service_name, {"data_nodes": {"count": data_nodes_count + 1}}, current_expected_task_count, # As of 2018-12-14, sdk_upgrade's `wait_for_deployment` has different behavior than @@ -506,14 +474,11 @@ def test_adding_data_node_only_restarts_masters() -> None: wait_for_deployment=False, ) - sdk_plan.wait_for_kicked_off_deployment(foldered_name) - sdk_plan.wait_for_completed_deployment(foldered_name) + sdk_plan.wait_for_kicked_off_deployment(service_name) + sdk_plan.wait_for_completed_deployment(service_name) _, new_data_pod_info, _ = sdk_cmd.svc_cli( - config.PACKAGE_NAME, - foldered_name, - "pod info data-{}".format(data_nodes_count), - parse_json=True, + package_name, service_name, "pod info data-{}".format(data_nodes_count), parse_json=True ) # Get task ID for new data node task. @@ -521,12 +486,12 @@ def test_adding_data_node_only_restarts_masters() -> None: # Should be running 1 task more. current_expected_task_count += 1 - sdk_tasks.check_running(foldered_name, current_expected_task_count) + sdk_tasks.check_running(service_name, current_expected_task_count) # Master nodes should restart. - sdk_tasks.check_tasks_updated(foldered_name, "master", initial_master_task_ids) + sdk_tasks.check_tasks_updated(service_name, "master", initial_master_task_ids) # Data node tasks should be the initial ones plus the new one. sdk_tasks.check_tasks_not_updated( - foldered_name, "data", initial_data_task_ids + [new_data_task_id] + service_name, "data", initial_data_task_ids + [new_data_task_id] ) # Coordinator tasks should not restart. - sdk_tasks.check_tasks_not_updated(foldered_name, "coordinator", initial_coordinator_task_ids) + sdk_tasks.check_tasks_not_updated(service_name, "coordinator", initial_coordinator_task_ids) diff --git a/frameworks/elastic/tests/test_tls.py b/frameworks/elastic/tests/test_tls.py index 655fc6dfb3c..93b743dec0d 100644 --- a/frameworks/elastic/tests/test_tls.py +++ b/frameworks/elastic/tests/test_tls.py @@ -81,7 +81,7 @@ def elastic_service(service_account: Dict[str, Any]) -> Iterator[Dict[str, Any]] yield {**service_options, **{"package_name": package_name}} finally: - sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME) + sdk_install.uninstall(package_name, service_name) @pytest.fixture(scope="module") @@ -109,7 +109,7 @@ def kibana_application(elastic_service: Dict[str, Any]) -> Iterator[None]: yield finally: - sdk_install.uninstall(config.KIBANA_PACKAGE_NAME, config.KIBANA_SERVICE_NAME) + sdk_install.uninstall(package_name, service_name) @pytest.mark.tls @@ -151,19 +151,15 @@ def test_kibana_tls(kibana_application: Dict[str, Any]) -> None: @pytest.mark.tls @pytest.mark.sanity @pytest.mark.recovery -def test_tls_recovery( - elastic_service: Dict[str, Any], - service_account: Dict[str, Any], -) -> None: - rc, stdout, _ = sdk_cmd.svc_cli( - elastic_service["package_name"], elastic_service["service"]["name"], "pod list" - ) +def test_tls_recovery(elastic_service: Dict[str, Any], service_account: Dict[str, Any]) -> None: + service_name = elastic_service["service"]["name"] + package_name = elastic_service["package_name"] + + rc, stdout, _ = sdk_cmd.svc_cli(package_name, service_name, "pod list") + assert rc == 0, "Pod list failed" for pod in json.loads(stdout): sdk_recovery.check_permanent_recovery( - elastic_service["package_name"], - elastic_service["service"]["name"], - pod, - recovery_timeout_s=25 * 60, + package_name, service_name, pod, recovery_timeout_s=25 * 60 ) From 1f499cbcca69ff2c84e4a6cb217c8b305b233a7c Mon Sep 17 00:00:00 2001 From: Murilo Pereira Date: Tue, 26 Mar 2019 22:06:56 +0100 Subject: [PATCH 05/10] Setup Kibana credentials. This fixes the TLS test, so we can remove the "skip" annotation. --- frameworks/elastic/tests/test_tls.py | 53 +++++++++++++++------------- 1 file changed, 28 insertions(+), 25 deletions(-) diff --git a/frameworks/elastic/tests/test_tls.py b/frameworks/elastic/tests/test_tls.py index 93b743dec0d..bf8c19db062 100644 --- a/frameworks/elastic/tests/test_tls.py +++ b/frameworks/elastic/tests/test_tls.py @@ -1,5 +1,6 @@ import json import pytest +from toolz import get_in from typing import Any, Dict, Iterator import sdk_cmd @@ -51,9 +52,9 @@ def elastic_service(service_account: Dict[str, Any]) -> Iterator[Dict[str, Any]] "elasticsearch": {"xpack_security_enabled": True}, } - sdk_install.uninstall(package_name, service_name) - try: + sdk_install.uninstall(package_name, service_name) + sdk_install.install( package_name, service_name=service_name, @@ -73,41 +74,46 @@ def elastic_service(service_account: Dict[str, Any]) -> Iterator[Dict[str, Any]] sdk_service.update_configuration( package_name, service_name, - { - "elasticsearch": {"health_user_password": passwords["elastic"]}, - }, + {"elasticsearch": {"health_user_password": passwords["elastic"]}}, expected_running_tasks, ) - yield {**service_options, **{"package_name": package_name}} + yield {**service_options, **{"package_name": package_name, "passwords": passwords}} finally: sdk_install.uninstall(package_name, service_name) @pytest.fixture(scope="module") -def kibana_application(elastic_service: Dict[str, Any]) -> Iterator[None]: +def kibana_application(elastic_service: Dict[str, Any]) -> Iterator[Dict[str, Any]]: + package_name = config.KIBANA_PACKAGE_NAME + service_name = config.KIBANA_SERVICE_NAME + + elasticsearch_url = "https://" + sdk_hosts.vip_host( + elastic_service["service"]["name"], "coordinator", 9200 + ) + + service_options = { + "kibana": { + "elasticsearch_tls": True, + "elasticsearch_url": elasticsearch_url, + "elasticsearch_xpack_security_enabled": True, + "password": elastic_service["passwords"]["kibana"], + } + } + try: - elasticsearch_url = "https://" + sdk_hosts.vip_host( - config.SERVICE_NAME, "coordinator", 9200 - ) + sdk_install.uninstall(package_name, service_name) - sdk_install.uninstall(config.KIBANA_PACKAGE_NAME, config.KIBANA_SERVICE_NAME) sdk_install.install( - config.KIBANA_PACKAGE_NAME, - service_name=config.KIBANA_SERVICE_NAME, + package_name, + service_name=service_name, expected_running_tasks=0, - additional_options={ - "kibana": { - "elasticsearch_xpack_security_enabled": True, - "elasticsearch_tls": True, - "elasticsearch_url": elasticsearch_url, - } - }, + additional_options=service_options, timeout_seconds=config.KIBANA_DEFAULT_TIMEOUT, wait_for_deployment=False, ) - yield + yield {**service_options, **{"package_name": package_name, "elastic": elastic_service}} finally: sdk_install.uninstall(package_name, service_name) @@ -139,12 +145,9 @@ def test_crud_over_tls(elastic_service: Dict[str, Any]) -> None: @pytest.mark.tls @pytest.mark.sanity -@pytest.mark.skip( - message="Kibana 6.3 with TLS enabled is not working due Admin Router request header. Details in https://jira.mesosphere.com/browse/DCOS-43386" -) def test_kibana_tls(kibana_application: Dict[str, Any]) -> None: config.check_kibana_adminrouter_integration( - "service/{}/login".format(config.KIBANA_SERVICE_NAME) + "service/{}/login".format(kibana_application["service"]["name"]) ) From 47aaa72319e75956ad71461bf9e392a1c179b917 Mon Sep 17 00:00:00 2001 From: Murilo Pereira Date: Tue, 26 Mar 2019 22:10:00 +0100 Subject: [PATCH 06/10] Fix test_crud_over_tls. --- frameworks/elastic/tests/config.py | 20 ++++++++++-------- frameworks/elastic/tests/test_tls.py | 31 +++++++++++++++++----------- 2 files changed, 30 insertions(+), 21 deletions(-) diff --git a/frameworks/elastic/tests/config.py b/frameworks/elastic/tests/config.py index 7f022b63740..da6c910ce75 100644 --- a/frameworks/elastic/tests/config.py +++ b/frameworks/elastic/tests/config.py @@ -513,16 +513,18 @@ def _curl_query( ) -> Optional[Union[str, Dict[str, Any]]]: protocol = "https" if https else "http" - if http_password and not http_user: - raise Exception( - "HTTP authentication won't work with just a password. Needs at least user, or both user AND password" - ) - - credentials = "" - if http_user: - credentials = "-u {}".format(http_user) if http_password: - credentials = "{}:{}".format(credentials, http_password) + if not http_user: + http_user = DEFAULT_ELASTICSEARCH_USER + log.info("Using default basic HTTP user: '%s'", http_user) + + credentials = "-u {}:{}".format(http_user, http_password) + else: + if http_user: + raise Exception( + "HTTP authentication won't work with just a user. Needs both user AND password" + ) + credentials = "" host = sdk_hosts.autoip_host(service_name, task, _master_zero_http_port(service_name)) diff --git a/frameworks/elastic/tests/test_tls.py b/frameworks/elastic/tests/test_tls.py index bf8c19db062..e063bd060dc 100644 --- a/frameworks/elastic/tests/test_tls.py +++ b/frameworks/elastic/tests/test_tls.py @@ -121,26 +121,33 @@ def kibana_application(elastic_service: Dict[str, Any]) -> Iterator[Dict[str, An @pytest.mark.tls @pytest.mark.sanity def test_crud_over_tls(elastic_service: Dict[str, Any]) -> None: + service_name = elastic_service["service"]["name"] + http_password = elastic_service["passwords"]["elastic"] + index_name = config.DEFAULT_INDEX_NAME + index_type = config.DEFAULT_INDEX_TYPE + index = config.DEFAULT_SETTINGS_MAPPINGS + document_fields = {"name": "Loren", "role": "developer"} + document_id = 1 + config.create_index( - config.DEFAULT_INDEX_NAME, - config.DEFAULT_SETTINGS_MAPPINGS, - service_name=config.SERVICE_NAME, - https=True, + index_name, index, service_name=service_name, https=True, http_password=http_password ) + config.create_document( - config.DEFAULT_INDEX_NAME, - config.DEFAULT_INDEX_TYPE, - 1, - {"name": "Loren", "role": "developer"}, - service_name=config.SERVICE_NAME, + index_name, + index_type, + document_id, + document_fields, + service_name=service_name, https=True, + http_password=http_password, ) + document = config.get_document( - config.DEFAULT_INDEX_NAME, config.DEFAULT_INDEX_TYPE, 1, https=True + index_name, index_type, document_id, https=True, http_password=http_password ) - assert document - assert document["_source"]["name"] == "Loren" + assert get_in(["_source", "name"], document) == document_fields["name"] @pytest.mark.tls From 4271515aa8df9b88e2b9858eca4962ea0107ca73 Mon Sep 17 00:00:00 2001 From: Murilo Pereira Date: Tue, 26 Mar 2019 22:14:06 +0100 Subject: [PATCH 07/10] Set Kibana's `server.rewriteBasePath: false` and fix health check. --- frameworks/elastic/tests/test_sanity.py | 2 +- frameworks/elastic/universe-kibana/marathon.json.mustache | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/frameworks/elastic/tests/test_sanity.py b/frameworks/elastic/tests/test_sanity.py index 888ac67831e..b2d6e771c53 100644 --- a/frameworks/elastic/tests/test_sanity.py +++ b/frameworks/elastic/tests/test_sanity.py @@ -220,7 +220,7 @@ def test_security_toggle_with_kibana(default_populated_index: None) -> None: ) # Verify that it works. - config.check_kibana_adminrouter_integration("service/{}/".format(kibana_package_name)) + config.check_kibana_adminrouter_integration("service/{}/app/kibana".format(kibana_package_name)) # Uninstall it. sdk_install.uninstall(kibana_package_name, kibana_package_name) diff --git a/frameworks/elastic/universe-kibana/marathon.json.mustache b/frameworks/elastic/universe-kibana/marathon.json.mustache index d2e13909586..7d2e87fa207 100644 --- a/frameworks/elastic/universe-kibana/marathon.json.mustache +++ b/frameworks/elastic/universe-kibana/marathon.json.mustache @@ -4,7 +4,7 @@ "mem": {{kibana.mem}}, "instances": 1, "user": "{{service.user}}", - "cmd": "echo -e \"elasticsearch.url: $ELASTICSEARCH_URL\nelasticsearch.username: $KIBANA_USER\nelasticsearch.password: $KIBANA_PASSWORD\nserver.host: 0.0.0.0\nserver.port: $PORT_KIBANA\nserver.basePath: /service/$FRAMEWORK_NAME\" > $MESOS_SANDBOX/kibana-$ELASTIC_VERSION-linux-x86_64/config/kibana.yml && echo -e \"\nxpack.security.encryptionKey: $MESOS_FRAMEWORK_ID\nxpack.reporting.encryptionKey: $MESOS_FRAMEWORK_ID\n\" >> $MESOS_SANDBOX/kibana-$ELASTIC_VERSION-linux-x86_64/config/kibana.yml && if [ \"$KIBANA_ELASTICSEARCH_TLS\" = true ]; then echo -e \"\nelasticsearch.ssl.certificateAuthorities: $MESOS_SANDBOX/.ssl/ca-bundle.crt\n\" >> $MESOS_SANDBOX/kibana-$ELASTIC_VERSION-linux-x86_64/config/kibana.yml; fi && env && $MESOS_SANDBOX/kibana-$ELASTIC_VERSION-linux-x86_64/bin/kibana", + "cmd": "echo -e \"elasticsearch.url: $ELASTICSEARCH_URL\nelasticsearch.username: $KIBANA_USER\nelasticsearch.password: $KIBANA_PASSWORD\nserver.host: 0.0.0.0\nserver.port: $PORT_KIBANA\nserver.basePath: /service/$FRAMEWORK_NAME\nserver.rewriteBasePath: false\" > $MESOS_SANDBOX/kibana-$ELASTIC_VERSION-linux-x86_64/config/kibana.yml && echo -e \"\nxpack.security.encryptionKey: $MESOS_FRAMEWORK_ID\nxpack.reporting.encryptionKey: $MESOS_FRAMEWORK_ID\n\" >> $MESOS_SANDBOX/kibana-$ELASTIC_VERSION-linux-x86_64/config/kibana.yml && if [ \"$KIBANA_ELASTICSEARCH_TLS\" = true ]; then echo -e \"\nelasticsearch.ssl.certificateAuthorities: $MESOS_SANDBOX/.ssl/ca-bundle.crt\n\" >> $MESOS_SANDBOX/kibana-$ELASTIC_VERSION-linux-x86_64/config/kibana.yml; fi && env && $MESOS_SANDBOX/kibana-$ELASTIC_VERSION-linux-x86_64/bin/kibana", "labels": { "DCOS_SERVICE_NAME": "{{service.name}}", "DCOS_SERVICE_PORT_INDEX": "0", @@ -41,7 +41,7 @@ { "protocol": "MESOS_HTTP", {{^kibana.elasticsearch_xpack_security_enabled}} - "path": "/", + "path": "/app/kibana", {{/kibana.elasticsearch_xpack_security_enabled}} {{#kibana.elasticsearch_xpack_security_enabled}} "path": "/login", From 01093c3f4397335da24c571171b3e65c2b3ff73c Mon Sep 17 00:00:00 2001 From: Murilo Pereira Date: Wed, 27 Mar 2019 09:44:18 +0100 Subject: [PATCH 08/10] Make sure service name is there for tests to use it. --- frameworks/elastic/tests/test_tls.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/frameworks/elastic/tests/test_tls.py b/frameworks/elastic/tests/test_tls.py index e063bd060dc..421be578925 100644 --- a/frameworks/elastic/tests/test_tls.py +++ b/frameworks/elastic/tests/test_tls.py @@ -93,6 +93,9 @@ def kibana_application(elastic_service: Dict[str, Any]) -> Iterator[Dict[str, An ) service_options = { + "service": { + "name": service_name, + }, "kibana": { "elasticsearch_tls": True, "elasticsearch_url": elasticsearch_url, From 8d2941690b96829bfd6d8de99c12ba4e7b99e9d0 Mon Sep 17 00:00:00 2001 From: Murilo Pereira Date: Wed, 27 Mar 2019 21:10:02 +0100 Subject: [PATCH 09/10] Enable sdk_upgrade functions to take both 'from' and 'to' versions. --- frameworks/cassandra/tests/test_racks.py | 2 +- frameworks/cassandra/tests/test_sanity.py | 2 +- frameworks/elastic/tests/test_sanity.py | 2 +- frameworks/hdfs/tests/test_sanity.py | 2 +- .../helloworld/tests/test_decommission.py | 2 +- .../tests/test_mono_to_multi_migrate.py | 8 +- frameworks/helloworld/tests/test_sanity.py | 2 +- frameworks/helloworld/tests/test_sidecar.py | 4 +- testing/sdk_service.py | 4 +- testing/sdk_upgrade.py | 125 +++++++----------- 10 files changed, 62 insertions(+), 91 deletions(-) diff --git a/frameworks/cassandra/tests/test_racks.py b/frameworks/cassandra/tests/test_racks.py index cc3aa71e149..51d261bb6e8 100644 --- a/frameworks/cassandra/tests/test_racks.py +++ b/frameworks/cassandra/tests/test_racks.py @@ -54,5 +54,5 @@ def test_custom_rack_upgrade(): config.PACKAGE_NAME, foldered_service_name, config.DEFAULT_TASK_COUNT, - additional_options=service_options, + from_options=service_options, ) diff --git a/frameworks/cassandra/tests/test_sanity.py b/frameworks/cassandra/tests/test_sanity.py index f61caa41025..127ad9d8a66 100644 --- a/frameworks/cassandra/tests/test_sanity.py +++ b/frameworks/cassandra/tests/test_sanity.py @@ -32,7 +32,7 @@ def configure_package(configure_security): config.PACKAGE_NAME, config.get_foldered_service_name(), config.DEFAULT_TASK_COUNT, - additional_options={"service": {"name": config.get_foldered_service_name()}}, + from_options={"service": {"name": config.get_foldered_service_name()}}, ) yield # let the test session execute diff --git a/frameworks/elastic/tests/test_sanity.py b/frameworks/elastic/tests/test_sanity.py index b2d6e771c53..22dd9d8e48f 100644 --- a/frameworks/elastic/tests/test_sanity.py +++ b/frameworks/elastic/tests/test_sanity.py @@ -40,7 +40,7 @@ def configure_package(configure_security: None) -> Iterator[None]: package_name, service_name, current_expected_task_count, - additional_options={"service": {"name": service_name}}, + from_options={"service": {"name": service_name}}, ) yield # let the test session execute diff --git a/frameworks/hdfs/tests/test_sanity.py b/frameworks/hdfs/tests/test_sanity.py index 12dc15a25a7..5fe8f3161a3 100644 --- a/frameworks/hdfs/tests/test_sanity.py +++ b/frameworks/hdfs/tests/test_sanity.py @@ -40,7 +40,7 @@ def configure_package(configure_security): config.PACKAGE_NAME, foldered_name, config.DEFAULT_TASK_COUNT, - additional_options={"service": {"name": foldered_name}}, + from_options={"service": {"name": foldered_name}}, timeout_seconds=30 * 60, ) diff --git a/frameworks/helloworld/tests/test_decommission.py b/frameworks/helloworld/tests/test_decommission.py index a0cefdf326c..6c9713ed19a 100644 --- a/frameworks/helloworld/tests/test_decommission.py +++ b/frameworks/helloworld/tests/test_decommission.py @@ -21,7 +21,7 @@ def configure_package(configure_security): config.PACKAGE_NAME, foldered_name, config.DEFAULT_TASK_COUNT, - additional_options={ + from_options={ "service": {"name": foldered_name, "scenario": "CUSTOM_DECOMMISSION"} }, ) diff --git a/frameworks/helloworld/tests/test_mono_to_multi_migrate.py b/frameworks/helloworld/tests/test_mono_to_multi_migrate.py index 5eedf1ec352..3f32d7dddb9 100644 --- a/frameworks/helloworld/tests/test_mono_to_multi_migrate.py +++ b/frameworks/helloworld/tests/test_mono_to_multi_migrate.py @@ -38,8 +38,8 @@ def test_old_tasks_not_relaunched(): sdk_upgrade.update_or_upgrade_or_downgrade( config.PACKAGE_NAME, config.SERVICE_NAME, - to_package_version=None, - additional_options={"service": {"yaml": "", "yamls": "svc,foobar_service_name"}}, + to_version=None, + to_options={"service": {"yaml": "", "yamls": "svc,foobar_service_name"}}, expected_running_tasks=4, wait_for_deployment=False, ) @@ -63,8 +63,8 @@ def test_old_tasks_get_relaunched_with_new_config(): sdk_upgrade.update_or_upgrade_or_downgrade( config.PACKAGE_NAME, config.SERVICE_NAME, - to_package_version=None, - additional_options={ + to_version=None, + to_options={ "service": {"yaml": "", "yamls": "svc,foobar_service_name"}, "hello": {"cpus": 0.2}, }, diff --git a/frameworks/helloworld/tests/test_sanity.py b/frameworks/helloworld/tests/test_sanity.py index f14cf5b0b4b..bedcb612602 100644 --- a/frameworks/helloworld/tests/test_sanity.py +++ b/frameworks/helloworld/tests/test_sanity.py @@ -29,7 +29,7 @@ def configure_package(configure_security): config.PACKAGE_NAME, foldered_name, config.DEFAULT_TASK_COUNT, - additional_options=service_options, + from_options=service_options, ) yield {"package_name": config.PACKAGE_NAME, **service_options} diff --git a/frameworks/helloworld/tests/test_sidecar.py b/frameworks/helloworld/tests/test_sidecar.py index 5083f448abf..f63bdf8ede0 100644 --- a/frameworks/helloworld/tests/test_sidecar.py +++ b/frameworks/helloworld/tests/test_sidecar.py @@ -60,8 +60,8 @@ def assert_envvar_has_value(envvar: str, expected_value: str): sdk_upgrade.update_or_upgrade_or_downgrade( config.PACKAGE_NAME, config.SERVICE_NAME, - to_package_version=None, - additional_options={ + to_version=None, + to_options={ "service": {"name": config.SERVICE_NAME, "sleep": sleep_duration, "yaml": "sidecar"} }, expected_running_tasks=2, diff --git a/testing/sdk_service.py b/testing/sdk_service.py index 1ccc50c9275..ab1a360a753 100644 --- a/testing/sdk_service.py +++ b/testing/sdk_service.py @@ -23,8 +23,8 @@ def update_configuration( sdk_upgrade.update_or_upgrade_or_downgrade( package_name=package_name, service_name=service_name, - to_package_version=None, - additional_options=configuration, + to_version=None, + to_options=configuration, expected_running_tasks=expected_task_count, wait_for_deployment=wait_for_deployment, timeout_seconds=timeout_seconds, diff --git a/testing/sdk_upgrade.py b/testing/sdk_upgrade.py index d63c12a804a..0caccd4a6c0 100644 --- a/testing/sdk_upgrade.py +++ b/testing/sdk_upgrade.py @@ -31,32 +31,45 @@ def test_upgrade( package_name: str, service_name: str, expected_running_tasks: int, - additional_options: Dict[str, Any] = {}, - test_version_additional_options: Optional[Dict[str, Any]] = None, + from_version: str = None, + from_options: Dict[str, Any] = {}, + to_version: str = None, + to_options: Optional[Dict[str, Any]] = None, timeout_seconds: int = TIMEOUT_SECONDS, wait_for_deployment: bool = True, ) -> None: - # Allow providing different options dicts to the universe version vs the test version. - test_version_additional_options = test_version_additional_options or additional_options - sdk_install.uninstall(package_name, service_name) + log.info( + "Called with 'from' version '{}' and 'to' version '{}'".format(from_version, to_version) + ) + universe_version = None try: - # Move the Universe repo to the top of the repo list so that we can first install the - # release version. + # Move the Universe repo to the top of the repo list so that we can first install the latest + # released version. test_version, universe_version = sdk_repository.move_universe_repo( package_name, universe_repo_index=0 ) - log.info("Found test version: {}".format(test_version)) + log.info("Found 'test' version: {}".format(test_version)) + log.info("Found 'universe' version: {}".format(universe_version)) + + from_version = from_version or universe_version + to_version = to_version or test_version + + log.info( + "Will upgrade {} from version '{}' to '{}'".format( + package_name, from_version, to_version + ) + ) - log.info("Installing Universe version: {}={}".format(package_name, universe_version)) + log.info("Installing {} 'from' version: {}".format(package_name, from_version)) sdk_install.install( package_name, service_name, expected_running_tasks, - package_version=universe_version, - additional_options=additional_options, + package_version=from_version, + additional_options=from_options, timeout_seconds=timeout_seconds, wait_for_deployment=wait_for_deployment, ) @@ -64,55 +77,16 @@ def test_upgrade( if universe_version: # Return the Universe repo back to the bottom of the repo list so that we can upgrade to # the build version. - universe_version, test_version = sdk_repository.move_universe_repo(package_name) - - log.info("Upgrading {}: {} => {}".format(package_name, universe_version, test_version)) - update_or_upgrade_or_downgrade( - package_name, - service_name, - test_version, - test_version_additional_options, - expected_running_tasks, - wait_for_deployment, - timeout_seconds, - ) - + sdk_repository.move_universe_repo(package_name) -# In the soak cluster, we assume that the Universe version of the framework is already installed. -# Also, we assume that the Universe is the default repo (at --index=0) and the stub repos are -# already in place, so we don't need to add or remove any repos. -# -# (1) Upgrades to test version of framework. -# (2) Downgrades to Universe version. -def soak_upgrade_downgrade( - package_name: str, - service_name: str, - expected_running_tasks: int, - additional_options: Dict[str, Any] = {}, - timeout_seconds: int = TIMEOUT_SECONDS, - wait_for_deployment: bool = True, -) -> None: - sdk_cmd.run_cli("package install --cli {} --yes".format(package_name)) - version = "stub-universe" - log.info("Upgrading to test version: {} {}".format(package_name, version)) - update_or_upgrade_or_downgrade( - package_name, - service_name, - version, - additional_options, - expected_running_tasks, - wait_for_deployment, - timeout_seconds, + log.info( + "Upgrading {} from version '{}' to '{}'".format(package_name, from_version, to_version) ) - - # Default Universe is at --index=0 - version = sdk_repository._get_pkg_version(package_name) - log.info("Downgrading to Universe version: {} {}".format(package_name, version)) update_or_upgrade_or_downgrade( package_name, service_name, - version, - additional_options, + to_version, + to_options, expected_running_tasks, wait_for_deployment, timeout_seconds, @@ -132,8 +106,10 @@ def get_config(package_name: str, service_name: str) -> Optional[Dict[str, Any]] if rc != 0: log.error( - "Could not get debug config target. return-code: '%s'\n" - "stdout: '%s'\nstderr: '%s'", rc, stdout, stderr + "Could not get debug config target. return-code: '%s'\n" "stdout: '%s'\nstderr: '%s'", + rc, + stdout, + stderr, ) return None else: @@ -145,49 +121,44 @@ def get_config(package_name: str, service_name: str) -> Optional[Dict[str, Any]] assert isinstance(result, dict) return result except Exception as e: - log.error("Could parse debug config target as JSON\n" - "error: %s\n json to parse: %s", str(e), stdout) + log.error( + "Could parse debug config target as JSON\n" "error: %s\n json to parse: %s", + str(e), + stdout, + ) return None def update_or_upgrade_or_downgrade( package_name: str, service_name: str, - to_package_version: Optional[str], - additional_options: Dict[str, Any], + to_version: Optional[str], + to_options: Dict[str, Any], expected_running_tasks: int, wait_for_deployment: bool = True, timeout_seconds: int = TIMEOUT_SECONDS, ) -> bool: initial_config = get_config(package_name, service_name) task_ids = sdk_tasks.get_task_ids(service_name, "") - if (to_package_version and not is_cli_supports_service_version_upgrade()) or ( - additional_options and not is_cli_supports_service_options_update() + if (to_version and not is_cli_supports_service_version_upgrade()) or ( + to_options and not is_cli_supports_service_options_update() ): - log.info( - "Using marathon flow to upgrade %s to version [%s]", service_name, to_package_version - ) + log.info("Using marathon flow to upgrade %s to version [%s]", service_name, to_version) sdk_marathon.destroy_app(service_name) sdk_install.install( package_name, service_name, expected_running_tasks, - additional_options=additional_options, - package_version=to_package_version, + additional_options=to_options, + package_version=to_version, timeout_seconds=timeout_seconds, wait_for_deployment=wait_for_deployment, ) else: - _update_service_with_cli(package_name, service_name, to_package_version, additional_options) + _update_service_with_cli(package_name, service_name, to_version, to_options) if wait_for_deployment: - _wait_for_deployment( - package_name, - service_name, - initial_config, - task_ids, - timeout_seconds, - ) + _wait_for_deployment(package_name, service_name, initial_config, task_ids, timeout_seconds) return not wait_for_deployment @@ -196,7 +167,7 @@ def _update_service_with_cli( package_name: str, service_name: str, to_package_version: Optional[str] = None, - additional_options: Optional[Dict[str, Any]] = None + additional_options: Optional[Dict[str, Any]] = None, ) -> None: update_cmd = ["update", "start"] From c5268deaa34257e5f5139755014eb1710d8850fe Mon Sep 17 00:00:00 2001 From: Murilo Pereira Date: Wed, 27 Mar 2019 21:34:30 +0100 Subject: [PATCH 10/10] Only run X-Pack related upgrade tests for ES5->ES6 upgrades. --- frameworks/elastic/tests/config.py | 49 +++++++++---------- frameworks/elastic/tests/test_upgrade.py | 61 ++++++++++++++++-------- 2 files changed, 64 insertions(+), 46 deletions(-) diff --git a/frameworks/elastic/tests/config.py b/frameworks/elastic/tests/config.py index da6c910ce75..d19e941dbb6 100644 --- a/frameworks/elastic/tests/config.py +++ b/frameworks/elastic/tests/config.py @@ -9,7 +9,6 @@ import sdk_hosts import sdk_install import sdk_networks -import sdk_repository import sdk_service import sdk_upgrade import sdk_utils @@ -559,16 +558,21 @@ def build_errmsg(msg: str) -> str: return None -# TODO(mpereira): it is safe to remove this test after the 6.x release. def test_xpack_enabled_update( - service_name: str, from_xpack_enabled: bool, to_xpack_enabled: bool + service_name: str, + from_xpack_enabled: bool, + to_xpack_enabled: bool, + from_version: str, + to_version: str = "stub-universe", ) -> None: sdk_upgrade.test_upgrade( PACKAGE_NAME, service_name, DEFAULT_TASK_COUNT, - additional_options={"elasticsearch": {"xpack_enabled": from_xpack_enabled}}, - test_version_additional_options={ + from_version=from_version, + from_options={"elasticsearch": {"xpack_enabled": from_xpack_enabled}}, + to_version=to_version, + to_options={ "service": {"update_strategy": "parallel"}, "elasticsearch": {"xpack_enabled": to_xpack_enabled}, }, @@ -577,23 +581,17 @@ def test_xpack_enabled_update( wait_for_expected_nodes_to_exist(service_name=service_name, task_count=DEFAULT_TASK_COUNT) -# TODO(mpereira): change this to xpack_security_enabled to xpack_security_enabled after the 6.x -# release. -def test_update_from_xpack_enabled_to_xpack_security_enabled( - service_name: str, xpack_enabled: bool, xpack_security_enabled: bool +def test_xpack_security_enabled_update( + service_name: str, from_xpack_security_enabled: bool, to_xpack_security_enabled: bool ) -> None: - assert not ( - xpack_enabled is True and xpack_security_enabled is True - ), "This function does not handle the 'xpack_enabled: True' to 'xpack_security_enabled: True' upgrade scenario" - sdk_upgrade.test_upgrade( PACKAGE_NAME, service_name, DEFAULT_TASK_COUNT, - additional_options={"elasticsearch": {"xpack_enabled": xpack_enabled}}, - test_version_additional_options={ + from_options={"elasticsearch": {"xpack_security_enabled": from_xpack_security_enabled}}, + to_options={ "service": {"update_strategy": "parallel"}, - "elasticsearch": {"xpack_security_enabled": xpack_security_enabled}, + "elasticsearch": {"xpack_security_enabled": to_xpack_security_enabled}, }, ) @@ -601,7 +599,12 @@ def test_update_from_xpack_enabled_to_xpack_security_enabled( def test_upgrade_from_xpack_enabled( - package_name: str, service_name: str, options: Dict[str, Any], expected_task_count: int + package_name: str, + service_name: str, + options: Dict[str, Any], + expected_task_count: int, + from_version: str, + to_version: str = "stub-universe", ) -> None: # This test needs to run some code in between the Universe version installation and the upgrade # to the 'stub-universe' version, so it cannot use `sdk_upgrade.test_upgrade`. @@ -610,16 +613,12 @@ def test_upgrade_from_xpack_enabled( sdk_install.uninstall(package_name, service_name) - # Move Universe repo to the top of the repo list so that we can first install the Universe - # version. - _, universe_version = sdk_repository.move_universe_repo(package_name, universe_repo_index=0) - sdk_install.install( package_name, service_name, expected_running_tasks=expected_task_count, additional_options={"elasticsearch": {"xpack_enabled": True}}, - package_version=universe_version, + package_version=from_version, ) document_es_5_id = 1 @@ -647,16 +646,12 @@ def test_upgrade_from_xpack_enabled( http_password=http_password, ) - # Move Universe repo back to the bottom of the repo list so that we can upgrade to the version - # under test. - _, test_version = sdk_repository.move_universe_repo(package_name) - # First we upgrade to "X-Pack security enabled" set to false on ES6, so that we can use the # X-Pack migration assistance and upgrade APIs. sdk_upgrade.update_or_upgrade_or_downgrade( package_name, service_name, - test_version, + to_version, { "service": {"update_strategy": "parallel"}, "elasticsearch": {"xpack_security_enabled": False}, diff --git a/frameworks/elastic/tests/test_upgrade.py b/frameworks/elastic/tests/test_upgrade.py index 64e05f7d924..58bef5b544d 100644 --- a/frameworks/elastic/tests/test_upgrade.py +++ b/frameworks/elastic/tests/test_upgrade.py @@ -32,50 +32,73 @@ def uninstall_packages(configure_security: None) -> Iterator[None]: sdk_install.uninstall(config.PACKAGE_NAME, foldered_name) -# TODO(mpereira): it is safe to remove this test after the 6.x release. @pytest.mark.sanity @pytest.mark.timeout(30 * 60) -def test_xpack_update_matrix() -> None: - # Updating from X-Pack 'enabled' to X-Pack security 'enabled' (the default) is more involved - # than the other cases, so we use `test_upgrade_from_xpack_enabled`. +def test_xpack_enabled_update_matrix() -> None: + from_version = "2.4.0-5.6.9" + to_version = "2.5.0-6.3.2" + + # Updating from X-Pack 'enabled' to X-Pack Security 'enabled' is more involved than the other + # cases, so we use `test_upgrade_from_xpack_enabled`. log.info("Updating X-Pack from 'enabled' to 'enabled'") config.test_upgrade_from_xpack_enabled( config.PACKAGE_NAME, foldered_name, {"elasticsearch": {"xpack_enabled": True}}, expected_task_count, + from_version=from_version, + to_version=to_version, ) log.info("Updating X-Pack from 'enabled' to 'disabled'") - config.test_xpack_enabled_update(foldered_name, True, False) + config.test_xpack_enabled_update(foldered_name, True, False, from_version, to_version) log.info("Updating X-Pack from 'disabled' to 'enabled'") - config.test_xpack_enabled_update(foldered_name, False, True) + config.test_xpack_enabled_update(foldered_name, False, True, from_version, to_version) log.info("Updating X-Pack from 'disabled' to 'disabled'") - config.test_xpack_enabled_update(foldered_name, False, False) + config.test_xpack_enabled_update(foldered_name, False, False, from_version, to_version) -# TODO(mpereira): change this to xpack_security_enabled to xpack_security_enabled after the 6.x -# release. @pytest.mark.sanity @pytest.mark.timeout(30 * 60) -def test_xpack_security_enabled_update_matrix() -> None: - # Updating from X-Pack 'enabled' to X-Pack security 'enabled' is more involved than the other - # cases, so we use `test_upgrade_from_xpack_enabled`. - log.info("Updating from X-Pack 'enabled' to X-Pack security 'enabled'") +def test_xpack_enabled_to_xpack_security_enabled_update_matrix() -> None: + from_version = "2.4.0-5.6.9" + to_version = "2.5.0-6.3.2" + + # Updating from X-Pack 'enabled' to X-Pack Security 'enabled' (the default) is more involved + # than the other cases, so we use `test_upgrade_from_xpack_enabled`. + log.info("Updating X-Pack from 'enabled' to X-Pack Security 'enabled'") config.test_upgrade_from_xpack_enabled( config.PACKAGE_NAME, foldered_name, {"elasticsearch": {"xpack_security_enabled": True}}, expected_task_count, + from_version=from_version, + to_version=to_version, ) - log.info("Updating from X-Pack 'enabled' to X-Pack security 'disabled'") - config.test_update_from_xpack_enabled_to_xpack_security_enabled(foldered_name, True, False) + log.info("Updating from X-Pack to 'enabled' to X-Pack Security 'disabled'") + config.test_xpack_enabled_update(foldered_name, True, False, from_version, to_version) + + log.info("Updating from X-Pack to 'disabled' to X-Pack Security 'enabled'") + config.test_xpack_enabled_update(foldered_name, False, True, from_version, to_version) + + log.info("Updating from X-Pack to 'disabled' to X-Pack Security 'disabled'") + config.test_xpack_enabled_update(foldered_name, False, False, from_version, to_version) + + +@pytest.mark.sanity +@pytest.mark.timeout(30 * 60) +def test_xpack_security_enabled_update_matrix() -> None: + log.info("Updating X-Pack Security from 'enabled' to 'enabled'") + config.test_xpack_security_enabled_update(foldered_name, True, True) + + log.info("Updating X-Pack Security from 'enabled' to 'disabled'") + config.test_xpack_security_enabled_update(foldered_name, True, False) - log.info("Updating from X-Pack 'disabled' to X-Pack security 'enabled'") - config.test_update_from_xpack_enabled_to_xpack_security_enabled(foldered_name, False, True) + log.info("Updating X-Pack Security from 'disabled' to 'enabled'") + config.test_xpack_security_enabled_update(foldered_name, False, True) - log.info("Updating from X-Pack 'disabled' to X-Pack security 'disabled'") - config.test_update_from_xpack_enabled_to_xpack_security_enabled(foldered_name, False, False) + log.info("Updating X-Pack Security from 'disabled' to 'disabled'") + config.test_xpack_security_enabled_update(foldered_name, False, False)