From 1e59c2d93b557d2f2da1bb9c8fd58b076ba0b451 Mon Sep 17 00:00:00 2001 From: Suraj Goel Date: Tue, 5 May 2020 10:38:19 +0530 Subject: [PATCH 1/3] Initial --- qds_sdk/engine.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/qds_sdk/engine.py b/qds_sdk/engine.py index 534c940a..f3684b87 100644 --- a/qds_sdk/engine.py +++ b/qds_sdk/engine.py @@ -28,6 +28,8 @@ def set_engine_config(self, spark_version=None, custom_spark_config=None, hive_version=None, + is_hs2=None, + hs2_thrift_port=None, dbtap_id=None, fernet_key=None, overrides=None, @@ -60,6 +62,10 @@ def set_engine_config(self, hive_version: Version of hive to be used in cluster + is_hs2: Enable HS2 on master + + hs2_thrift_port: Thrift port HS2 on master will run on + dbtap_id: ID of the data store inside QDS fernet_key: Encryption key for sensitive information inside airflow database. @@ -82,7 +88,7 @@ def set_engine_config(self, self.set_hadoop_settings(custom_hadoop_config, use_qubole_placement_policy, is_ha, fairscheduler_config_xml, default_pool, enable_rubix) - self.set_hive_settings(hive_version) + self.set_hive_settings(hive_version, is_hs2, hs2_thrift_port) self.set_presto_settings(presto_version, custom_presto_config) self.set_spark_settings(spark_version, custom_spark_config) self.set_airflow_settings(dbtap_id, fernet_key, overrides, airflow_version, airflow_python_version) @@ -110,8 +116,12 @@ def set_hadoop_settings(self, self.hadoop_settings['enable_rubix'] = enable_rubix def set_hive_settings(self, - hive_version=None): + hive_version=None, + is_hs2=None, + hs2_thrift_port=None): self.hive_settings['hive_version'] = hive_version + self.hive_settings['is_hs2'] = is_hs2 + self.hive_settings['hs2_thrift_port'] = hs2_thrift_port def set_presto_settings(self, presto_version=None, @@ -158,6 +168,8 @@ def set_engine_config_settings(self, arguments): spark_version=arguments.spark_version, custom_spark_config=arguments.custom_spark_config, hive_version=arguments.hive_version, + is_hs2=arguments.is_hs2, + hs2_thrift_port=arguments.hs2_thrift_port, dbtap_id=arguments.dbtap_id, fernet_key=arguments.fernet_key, overrides=arguments.overrides, @@ -234,6 +246,14 @@ def engine_parser(argparser): dest="hive_version", default=None, help="Version of hive for the cluster",) + hive_settings_group.add_argument("--is_hs2", + dest="is_hs2", + default=None, + help="Enable hs2 on master", ) + hive_settings_group.add_argument("--hs2_thrift_port", + dest="hs2_thrift_port", + default=None, + help="thrift port hs2 master will run on", ) spark_settings_group = argparser.add_argument_group("spark settings") spark_settings_group.add_argument("--spark-version", From b5b3df0577f3fa7ac8082b770c46fec3796c0ada Mon Sep 17 00:00:00 2001 From: Suraj Goel Date: Tue, 5 May 2020 12:38:04 +0530 Subject: [PATCH 2/3] changing help argument name for hive settings --- qds_sdk/engine.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qds_sdk/engine.py b/qds_sdk/engine.py index f3684b87..61c863d9 100644 --- a/qds_sdk/engine.py +++ b/qds_sdk/engine.py @@ -241,7 +241,7 @@ def engine_parser(argparser): dest="presto_custom_config_file", help="location of file containg custom" + " presto configuration overrides") - hive_settings_group = argparser.add_argument_group("hive version settings") + hive_settings_group = argparser.add_argument_group("hive settings") hive_settings_group.add_argument("--hive_version", dest="hive_version", default=None, From 3bc427a2ad92ecc32947b4be44445ee1ee1a948b Mon Sep 17 00:00:00 2001 From: Suraj Goel Date: Wed, 6 May 2020 14:10:59 +0530 Subject: [PATCH 3/3] adding test case --- tests/test_clusterv22.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/test_clusterv22.py b/tests/test_clusterv22.py index ab4c03ec..357ec10b 100644 --- a/tests/test_clusterv22.py +++ b/tests/test_clusterv22.py @@ -211,3 +211,22 @@ def test_image_version_v22(self): 'slave_instance_type': 'c1.xlarge', 'cluster_image_version': '1.latest', 'composition': {'min_nodes': {'nodes': [{'percentage': 100, 'type': 'ondemand'}]}, 'master': {'nodes': [{'percentage': 100, 'type': 'ondemand'}]}, 'autoscaling_nodes': {'nodes': [{'percentage': 50, 'type': 'ondemand'}, {'timeout_for_request': 1, 'percentage': 50, 'type': 'spot', 'fallback': 'ondemand', 'maximum_bid_price_percentage': 100, 'allocation_strategy': None}]}}, 'label': ['test_label']}}) + + def test_hive_settings(self): + sys.argv = ['qds.py', '--version', 'v2.2', 'cluster', 'create', '--label', 'test_label', + '--is_hs2', 'true', '--hs2_thrift_port', '10001'] + print_command() + Connection._api_call = Mock(return_value={}) + qds.main() + print("Suraj") + print(Connection._api_call) + Connection._api_call.assert_called_with('POST', 'clusters', + {'engine_config': {'hive_settings': {'is_hs2': 'true', 'hs2_thrift_port': '10001'}}, + 'cluster_info': {'label': ['test_label'], + 'composition': {'master': {'nodes': [{'percentage': 100, 'type': 'ondemand'}]}, + 'min_nodes': {'nodes': [{'percentage': 100, 'type': 'ondemand'}]}, + 'autoscaling_nodes': {'nodes': [{'percentage': 50, 'type': 'ondemand'}, + {'percentage': 50, 'type': 'spot', 'maximum_bid_price_percentage': 100, 'timeout_for_request': 1, 'allocation_strategy': None, 'fallback': 'ondemand'}]}}}}) + +if __name__ == '__main__': + unittest.main()