diff --git a/redash/settings/__init__.py b/redash/settings/__init__.py index 1d5b3c431b..53e96aacd0 100644 --- a/redash/settings/__init__.py +++ b/redash/settings/__init__.py @@ -1,4 +1,5 @@ import os +import importlib from funcy import distinct, remove from flask_talisman import talisman @@ -288,7 +289,8 @@ def email_server_is_configured(): disabled_query_runners = array_from_string(os.environ.get("REDASH_DISABLED_QUERY_RUNNERS", "")) QUERY_RUNNERS = remove(set(disabled_query_runners), distinct(enabled_query_runners + additional_query_runners)) -ADHOC_QUERY_TIME_LIMIT = int_or_none(os.environ.get('REDASH_ADHOC_QUERY_TIME_LIMIT', None)) + +dynamic_settings = importlib.import_module(os.environ.get('REDASH_DYNAMIC_SETTINGS_MODULE', 'redash.settings.dynamic_settings')) # Destinations default_destinations = [ diff --git a/redash/settings/dynamic_settings.py b/redash/settings/dynamic_settings.py new file mode 100644 index 0000000000..fcd9494447 --- /dev/null +++ b/redash/settings/dynamic_settings.py @@ -0,0 +1,10 @@ +import os +from .helpers import int_or_none + + +# Replace this method with your own implementation in case you want to limit the time limit on certain queries or users. +def query_time_limit(is_scheduled, user_id, org_id): + scheduled_time_limit = int_or_none(os.environ.get('REDASH_SCHEDULED_QUERY_TIME_LIMIT', None)) + adhoc_time_limit = int_or_none(os.environ.get('REDASH_ADHOC_QUERY_TIME_LIMIT', None)) + + return scheduled_time_limit if is_scheduled else adhoc_time_limit diff --git a/redash/tasks/queries.py b/redash/tasks/queries.py index 1b0a6f6458..ab16209138 100644 --- a/redash/tasks/queries.py +++ b/redash/tasks/queries.py @@ -120,15 +120,12 @@ def enqueue_query(query, data_source, user_id, is_api_key=False, scheduled_query if not job: pipe.multi() - time_limit = None - if scheduled_query: queue_name = data_source.scheduled_queue_name scheduled_query_id = scheduled_query.id else: queue_name = data_source.queue_name scheduled_query_id = None - time_limit = settings.ADHOC_QUERY_TIME_LIMIT args = (query, data_source.id, metadata, user_id, scheduled_query_id, is_api_key) argsrepr = json_dumps({ @@ -140,6 +137,8 @@ def enqueue_query(query, data_source, user_id, is_api_key=False, scheduled_query 'user_id': user_id }) + time_limit = settings.dynamic_settings.query_time_limit(scheduled_query, user_id, data_source.org_id) + result = execute_query.apply_async(args=args, argsrepr=argsrepr, queue=queue_name, diff --git a/tests/tasks/test_queries.py b/tests/tasks/test_queries.py index c90e87a8f5..758d6e5402 100644 --- a/tests/tasks/test_queries.py +++ b/tests/tasks/test_queries.py @@ -28,6 +28,16 @@ def test_multiple_enqueue_of_same_query(self): self.assertEqual(1, execute_query.apply_async.call_count) + @mock.patch('redash.settings.dynamic_settings.query_time_limit', return_value=60) + def test_limits_query_time(self, _): + query = self.factory.create_query() + execute_query.apply_async = mock.MagicMock(side_effect=gen_hash) + + enqueue_query(query.query_text, query.data_source, query.user_id, False, query, {'Username': 'Arik', 'Query ID': query.id}) + + _, kwargs = execute_query.apply_async.call_args + self.assertEqual(60, kwargs.get('time_limit')) + def test_multiple_enqueue_of_different_query(self): query = self.factory.create_query() execute_query.apply_async = mock.MagicMock(side_effect=gen_hash)