diff --git a/bin/release_manager.py b/bin/release_manager.py index 521a9b8b52..df00169da7 100644 --- a/bin/release_manager.py +++ b/bin/release_manager.py @@ -1,10 +1,10 @@ from __future__ import print_function import os import sys -import json import re import subprocess import requests +import simplejson github_token = os.environ['GITHUB_TOKEN'] auth = (github_token, 'x-oauth-basic') @@ -17,7 +17,7 @@ def _github_request(method, path, params=None, headers={}): url = path if params is not None: - params = json.dumps(params) + params = simplejson.dumps(params) response = requests.request(method, url, data=params, auth=auth) return response diff --git a/migrations/versions/969126bd800f_.py b/migrations/versions/969126bd800f_.py index 55d98bd987..051b97e695 100644 --- a/migrations/versions/969126bd800f_.py +++ b/migrations/versions/969126bd800f_.py @@ -6,7 +6,7 @@ """ from __future__ import print_function -import json +import simplejson from alembic import op import sqlalchemy as sa @@ -26,7 +26,7 @@ def upgrade(): print("Updating dashboards position data:") for dashboard in Dashboard.query: print(" Updating dashboard: {}".format(dashboard.id)) - layout = json.loads(dashboard.layout) + layout = simplejson.loads(dashboard.layout) print(" Building widgets map:") widgets = {} @@ -47,14 +47,14 @@ def upgrade(): if widget is None: continue - options = json.loads(widget.options) or {} + options = simplejson.loads(widget.options) or {} options['position'] = { "row": row_index, "col": column_index * column_size, "sizeX": column_size * widget.width } - widget.options = json.dumps(options) + widget.options = simplejson.dumps(options) db.session.add(widget) diff --git a/old_migrations/0003_update_data_source_config.py b/old_migrations/0003_update_data_source_config.py index 6a9aa239cb..592147daf5 100644 --- a/old_migrations/0003_update_data_source_config.py +++ b/old_migrations/0003_update_data_source_config.py @@ -1,5 +1,5 @@ from __future__ import print_function -import json +import simplejson import jsonschema from jsonschema import ValidationError @@ -15,7 +15,7 @@ def validate_configuration(query_runner_type, configuration_json): try: if isinstance(configuration_json, string_types): - configuration = json.loads(configuration_json) + configuration = simplejson.loads(configuration_json) else: configuration = configuration_json jsonschema.validate(configuration, query_runner_class.configuration_schema()) @@ -24,6 +24,7 @@ def validate_configuration(query_runner_type, configuration_json): return True + def update(data_source): print("[%s] Old options: %s" % (data_source.name, data_source.options)) @@ -40,7 +41,7 @@ def update(data_source): if k == 'port': configuration[k] = int(v) - data_source.options = json.dumps(configuration) + data_source.options = simplejson.dumps(configuration) elif data_source.type == 'mysql': mapping = { @@ -55,10 +56,10 @@ def update(data_source): for value in values: k, v = value.split("=", 1) configuration[mapping[k]] = v - data_source.options = json.dumps(configuration) + data_source.options = simplejson.dumps(configuration) elif data_source.type == 'graphite': - old_config = json.loads(data_source.options) + old_config = simplejson.loads(data_source.options) configuration = { "url": old_config["url"] @@ -70,13 +71,13 @@ def update(data_source): if "auth" in old_config: configuration['username'], configuration['password'] = old_config["auth"] - data_source.options = json.dumps(configuration) + data_source.options = simplejson.dumps(configuration) elif data_source.type == 'url': - data_source.options = json.dumps({"url": data_source.options}) + data_source.options = simplejson.dumps({"url": data_source.options}) elif data_source.type == 'script': - data_source.options = json.dumps({"path": data_source.options}) + data_source.options = simplejson.dumps({"path": data_source.options}) elif data_source.type == 'mongo': data_source.type = 'mongodb' diff --git a/old_migrations/0011_migrate_bigquery_to_json.py b/old_migrations/0011_migrate_bigquery_to_json.py index eaeed67bb4..18f53b8616 100644 --- a/old_migrations/0011_migrate_bigquery_to_json.py +++ b/old_migrations/0011_migrate_bigquery_to_json.py @@ -1,5 +1,5 @@ from base64 import b64encode -import json +import simplejson from redash.models import DataSource @@ -15,23 +15,23 @@ def convert_p12_to_pem(p12file): for ds in DataSource.select(DataSource.id, DataSource.type, DataSource.options): if ds.type == 'bigquery': - options = json.loads(ds.options) + options = simplejson.loads(ds.options) if 'jsonKeyFile' in options: continue new_options = { 'projectId': options['projectId'], - 'jsonKeyFile': b64encode(json.dumps({ + 'jsonKeyFile': b64encode(simplejson.dumps({ 'client_email': options['serviceAccount'], 'private_key': convert_p12_to_pem(options['privateKey']) })) } - ds.options = json.dumps(new_options) + ds.options = simplejson.dumps(new_options) ds.save(only=ds.dirty_fields) elif ds.type == 'google_spreadsheets': - options = json.loads(ds.options) + options = simplejson.loads(ds.options) if 'jsonKeyFile' in options: continue @@ -40,5 +40,5 @@ def convert_p12_to_pem(p12file): 'jsonKeyFile': b64encode(f.read()) } - ds.options = json.dumps(new_options) + ds.options = simplejson.dumps(new_options) ds.save(only=ds.dirty_fields) diff --git a/old_migrations/0013_update_counter_options.py b/old_migrations/0013_update_counter_options.py index be5f77356c..14af2bff6c 100644 --- a/old_migrations/0013_update_counter_options.py +++ b/old_migrations/0013_update_counter_options.py @@ -1,11 +1,11 @@ from __future__ import print_function -import json +import simplejson from redash import models if __name__ == '__main__': for vis in models.Visualization.select(): if vis.type == 'COUNTER': - options = json.loads(vis.options) + options = simplejson.loads(vis.options) print("Before: ", options) if 'rowNumber' in options and options['rowNumber'] is not None: options['rowNumber'] += 1 @@ -20,5 +20,5 @@ options['targetRowNumber'] = options['rowNumber'] print("After: ", options) - vis.options = json.dumps(options) + vis.options = simplejson.dumps(options) vis.save() diff --git a/redash/admin.py b/redash/admin.py index 6e012e7c81..6ce02983bb 100644 --- a/redash/admin.py +++ b/redash/admin.py @@ -1,4 +1,3 @@ -import json from flask_admin import Admin from flask_admin.base import MenuLink from flask_admin.contrib.sqla import ModelView @@ -8,6 +7,7 @@ from redash import models from redash.permissions import require_super_admin +from redash.utils import json_loads class ArrayListField(fields.Field): @@ -30,7 +30,7 @@ class JSONTextAreaField(fields.TextAreaField): def process_formdata(self, valuelist): if valuelist: try: - json.loads(valuelist[0]) + json_loads(valuelist[0]) except ValueError: raise ValueError(self.gettext(u'Invalid JSON')) self.data = valuelist[0] diff --git a/redash/cli/__init__.py b/redash/cli/__init__.py index b12417989b..316e426d5d 100644 --- a/redash/cli/__init__.py +++ b/redash/cli/__init__.py @@ -1,8 +1,6 @@ from __future__ import print_function -import json - - import click +import simplejson from flask.cli import FlaskGroup, run_command from flask import current_app @@ -44,7 +42,7 @@ def version(): @manager.command() def status(): - print(json.dumps(get_status(), indent=2)) + print(simplejson.dumps(get_status(), indent=2)) @manager.command() diff --git a/redash/cli/data_sources.py b/redash/cli/data_sources.py index 6ef186045e..4aaad37b5a 100644 --- a/redash/cli/data_sources.py +++ b/redash/cli/data_sources.py @@ -1,6 +1,5 @@ from __future__ import print_function from sys import exit -import json import click from flask.cli import AppGroup @@ -10,6 +9,7 @@ from redash import models from redash.query_runner import (get_configuration_schema_for_query_runner_type, query_runners) +from redash.utils import json_loads from redash.utils.configuration import ConfigurationContainer manager = AppGroup(help="Data sources management commands.") @@ -129,7 +129,7 @@ def new(name=None, type=None, options=None, organization='default'): options = ConfigurationContainer(options_obj, schema) else: - options = ConfigurationContainer(json.loads(options), schema) + options = ConfigurationContainer(json_loads(options), schema) if not options.is_valid(): print("Error: invalid configuration.") @@ -198,7 +198,7 @@ def edit(name, new_name=None, options=None, type=None, organization='default'): if options is not None: schema = get_configuration_schema_for_query_runner_type( data_source.type) - options = json.loads(options) + options = json_loads(options) data_source.options.set_schema(schema) data_source.options.update(options) diff --git a/redash/destinations/__init__.py b/redash/destinations/__init__.py index 9587f15718..d3b7a5a727 100644 --- a/redash/destinations/__init__.py +++ b/redash/destinations/__init__.py @@ -1,5 +1,4 @@ import logging -import json logger = logging.getLogger(__name__) @@ -55,7 +54,7 @@ def register(destination_class): global destinations if destination_class.enabled(): logger.debug("Registering %s (%s) destinations.", destination_class.name(), destination_class.type()) - destinations[destination_class.type()] = destination_class + destinations[destination_class.type()] = destination_class else: logger.warning("%s destination enabled but not supported, not registering. Either disable or install missing dependencies.", destination_class.name()) diff --git a/redash/destinations/chatwork.py b/redash/destinations/chatwork.py index a513aec1fc..6b94b5f614 100644 --- a/redash/destinations/chatwork.py +++ b/redash/destinations/chatwork.py @@ -1,4 +1,3 @@ -import json import logging import requests diff --git a/redash/destinations/hipchat.py b/redash/destinations/hipchat.py index da9c55aaca..d8f904669f 100644 --- a/redash/destinations/hipchat.py +++ b/redash/destinations/hipchat.py @@ -1,9 +1,9 @@ -import json import logging import requests from redash.destinations import * from redash.models import Alert +from redash.utils import json_dumps colors = { @@ -46,7 +46,7 @@ def notify(self, alert, query, user, new_state, app, host, options): 'color': colors.get(new_state, 'green') } headers = {'Content-Type': 'application/json'} - response = requests.post(options['url'], data=json.dumps(data), headers=headers) + response = requests.post(options['url'], data=json_dumps(data), headers=headers) if response.status_code != 204: logging.error('Bad status code received from HipChat: %d', response.status_code) diff --git a/redash/destinations/mattermost.py b/redash/destinations/mattermost.py index 5b0a04062e..5dfb824f2c 100644 --- a/redash/destinations/mattermost.py +++ b/redash/destinations/mattermost.py @@ -1,8 +1,8 @@ -import json import logging import requests from redash.destinations import * +from redash.utils import json_dumps class Mattermost(BaseDestination): @@ -46,7 +46,7 @@ def notify(self, alert, query, user, new_state, app, host, options): if options.get('channel'): payload['channel'] = options.get('channel') try: - resp = requests.post(options.get('url'), data=json.dumps(payload)) + resp = requests.post(options.get('url'), data=json_dumps(payload)) logging.warning(resp.text) if resp.status_code != 200: diff --git a/redash/destinations/slack.py b/redash/destinations/slack.py index a1be50ebc4..3ab31a3204 100644 --- a/redash/destinations/slack.py +++ b/redash/destinations/slack.py @@ -1,8 +1,8 @@ -import json import logging import requests from redash.destinations import * +from redash.utils import json_dumps class Slack(BaseDestination): @@ -58,7 +58,7 @@ def notify(self, alert, query, user, new_state, app, host, options): else: text = alert.name + " went back to normal" color = "#27ae60" - + payload = {'attachments': [{'text': text, 'color': color, 'fields': fields}]} if options.get('username'): payload['username'] = options.get('username') @@ -67,7 +67,7 @@ def notify(self, alert, query, user, new_state, app, host, options): if options.get('channel'): payload['channel'] = options.get('channel') try: - resp = requests.post(options.get('url'), data=json.dumps(payload)) + resp = requests.post(options.get('url'), data=json_dumps(payload)) logging.warning(resp.text) if resp.status_code != 200: logging.error("Slack send ERROR. status_code => {status}".format(status=resp.status_code)) diff --git a/redash/handlers/admin.py b/redash/handlers/admin.py index c03d944f2b..4d6aa7fea8 100644 --- a/redash/handlers/admin.py +++ b/redash/handlers/admin.py @@ -1,14 +1,14 @@ -import json - from flask import request from flask_login import login_required + from redash import models, redis_connection from redash.handlers import routes from redash.handlers.base import json_response from redash.permissions import require_super_admin from redash.serializers import QuerySerializer -from redash.tasks.queries import QueryTaskTracker from redash.tasks import record_event +from redash.tasks.queries import QueryTaskTracker +from redash.utils import json_loads @routes.route('/api/admin/queries/outdated', methods=['GET']) @@ -16,7 +16,7 @@ @login_required def outdated_queries(): manager_status = redis_connection.hgetall('redash:status') - query_ids = json.loads(manager_status.get('query_ids', '[]')) + query_ids = json_loads(manager_status.get('query_ids', '[]')) if query_ids: outdated_queries = ( models.Query.query.outerjoin(models.QueryResult) diff --git a/redash/handlers/query_results.py b/redash/handlers/query_results.py index 634d2b6359..85a954881a 100644 --- a/redash/handlers/query_results.py +++ b/redash/handlers/query_results.py @@ -1,16 +1,19 @@ import logging -import json import time import pystache from flask import make_response, request from flask_login import current_user from flask_restful import abort -from redash import models, settings, utils +from redash import models, settings from redash.tasks import QueryTask, record_event from redash.permissions import require_permission, not_view_only, has_access, require_access, view_only from redash.handlers.base import BaseResource, get_object_or_404 -from redash.utils import collect_query_parameters, collect_parameters_from_request, gen_query_hash +from redash.utils import (collect_query_parameters, + collect_parameters_from_request, + gen_query_hash, + json_dumps, + utcnow) from redash.tasks.queries import enqueue_query @@ -56,7 +59,7 @@ def run_query_sync(data_source, parameter_values, query_text, max_age=0): run_time = time.time() - started_at query_result, updated_query_ids = models.QueryResult.store_result(data_source.org_id, data_source, query_hash, query_text, data, - run_time, utils.utcnow()) + run_time, utcnow()) models.db.session.commit() return query_result @@ -243,7 +246,7 @@ def get(self, query_id=None, query_result_id=None, filetype='json'): abort(404, message='No cached result found for this query.') def make_json_response(self, query_result): - data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder) + data = json_dumps({'query_result': query_result.to_dict()}) headers = {'Content-Type': "application/json"} return make_response(data, 200, headers) diff --git a/redash/handlers/visualizations.py b/redash/handlers/visualizations.py index 9780c7c20e..35c9d6610c 100644 --- a/redash/handlers/visualizations.py +++ b/redash/handlers/visualizations.py @@ -1,5 +1,3 @@ -import json - from flask import request from redash import models @@ -7,6 +5,7 @@ from redash.serializers import serialize_visualization from redash.permissions import (require_object_modify_permission, require_permission) +from redash.utils import json_dumps class VisualizationListResource(BaseResource): @@ -17,7 +16,7 @@ def post(self): query = get_object_or_404(models.Query.get_by_id_and_org, kwargs.pop('query_id'), self.current_org) require_object_modify_permission(query, self.current_user) - kwargs['options'] = json.dumps(kwargs['options']) + kwargs['options'] = json_dumps(kwargs['options']) kwargs['query_rel'] = query vis = models.Visualization(**kwargs) @@ -34,7 +33,7 @@ def post(self, visualization_id): kwargs = request.get_json(force=True) if 'options' in kwargs: - kwargs['options'] = json.dumps(kwargs['options']) + kwargs['options'] = json_dumps(kwargs['options']) kwargs.pop('id', None) kwargs.pop('query_id', None) diff --git a/redash/handlers/webpack.py b/redash/handlers/webpack.py index 4f0b3622db..fec0d72abb 100644 --- a/redash/handlers/webpack.py +++ b/redash/handlers/webpack.py @@ -1,5 +1,5 @@ import os -import json +import simplejson from flask import url_for WEBPACK_MANIFEST_PATH = os.path.join(os.path.dirname(__file__), '../../client/dist/', 'asset-manifest.json') @@ -14,7 +14,7 @@ def get_asset(path): if assets is None or app.debug: try: with open(WEBPACK_MANIFEST_PATH) as fp: - assets = json.load(fp) + assets = simplejson.load(fp) except IOError: app.logger.exception('Unable to load webpack manifest') assets = {} diff --git a/redash/handlers/widgets.py b/redash/handlers/widgets.py index 53886c1159..ef81d7cb88 100644 --- a/redash/handlers/widgets.py +++ b/redash/handlers/widgets.py @@ -1,12 +1,12 @@ -import json - from flask import request + from redash import models from redash.handlers.base import BaseResource from redash.serializers import serialize_widget from redash.permissions import (require_access, require_object_modify_permission, require_permission, view_only) +from redash.utils import json_dumps class WidgetListResource(BaseResource): @@ -27,7 +27,7 @@ def post(self): dashboard = models.Dashboard.get_by_id_and_org(widget_properties.pop('dashboard_id'), self.current_org) require_object_modify_permission(dashboard, self.current_user) - widget_properties['options'] = json.dumps(widget_properties['options']) + widget_properties['options'] = json_dumps(widget_properties['options']) widget_properties.pop('id', None) widget_properties['dashboard'] = dashboard @@ -63,7 +63,7 @@ def post(self, widget_id): require_object_modify_permission(widget.dashboard, self.current_user) widget_properties = request.get_json(force=True) widget.text = widget_properties['text'] - widget.options = json.dumps(widget_properties['options']) + widget.options = json_dumps(widget_properties['options']) models.db.session.commit() return serialize_widget(widget) diff --git a/redash/metrics/celery.py b/redash/metrics/celery.py index ef7ed68cf2..6760d96b68 100644 --- a/redash/metrics/celery.py +++ b/redash/metrics/celery.py @@ -1,12 +1,12 @@ from __future__ import absolute_import -import json import logging import socket import time from celery.signals import task_postrun, task_prerun from redash import settings, statsd_client +from redash.utils import json_dumps tasks_start_time = {} @@ -45,7 +45,7 @@ def task_postrun_handler(signal, sender, task_id, task, args, kwargs, retval, st normalized_task_name = task.name.replace('redash.tasks.', '').replace('.', '_') metric = "celery.task_runtime.{}".format(normalized_task_name) - logging.debug("metric=%s", json.dumps({'metric': metric, 'tags': tags, 'value': run_time})) + logging.debug("metric=%s", json_dumps({'metric': metric, 'tags': tags, 'value': run_time})) statsd_client.timing(metric_name(metric, tags), run_time) statsd_client.incr(metric_name('celery.task.{}.{}'.format(normalized_task_name, state), tags)) except Exception: diff --git a/redash/models.py b/redash/models.py index d608248f09..7115ecd606 100644 --- a/redash/models.py +++ b/redash/models.py @@ -4,7 +4,6 @@ import functools import hashlib import itertools -import json import logging import time from functools import reduce @@ -22,7 +21,7 @@ from redash.metrics import database # noqa: F401 from redash.query_runner import (get_configuration_schema_for_query_runner_type, get_query_runner) -from redash.utils import generate_token, json_dumps +from redash.utils import generate_token, json_dumps, json_loads from redash.utils.configuration import ConfigurationContainer from redash.settings.organization import settings as org_settings @@ -141,7 +140,7 @@ def process_bind_param(self, value, dialect): def process_result_value(self, value, dialect): if not value: return value - return json.loads(value) + return json_loads(value) class MutableDict(Mutable, dict): @@ -648,9 +647,9 @@ def get_schema(self, refresh=False): query_runner = self.query_runner schema = sorted(query_runner.get_schema(get_stats=refresh), key=lambda t: t['name']) - redis_connection.set(key, json.dumps(schema)) + redis_connection.set(key, json_dumps(schema)) else: - schema = json.loads(cache) + schema = json_loads(cache) return schema @@ -738,7 +737,7 @@ def to_dict(self): 'id': self.id, 'query_hash': self.query_hash, 'query': self.query_text, - 'data': json.loads(self.data), + 'data': json_loads(self.data), 'data_source_id': self.data_source_id, 'runtime': self.runtime, 'retrieved_at': self.retrieved_at @@ -807,7 +806,7 @@ def groups(self): def make_csv_content(self): s = cStringIO.StringIO() - query_data = json.loads(self.data) + query_data = json_loads(self.data) writer = csv.DictWriter(s, extrasaction="ignore", fieldnames=[col['name'] for col in query_data['columns']]) writer.writer = utils.UnicodeWriter(s) writer.writeheader() @@ -819,7 +818,7 @@ def make_csv_content(self): def make_excel_content(self): s = cStringIO.StringIO() - query_data = json.loads(self.data) + query_data = json_loads(self.data) book = xlsxwriter.Workbook(s, {'constant_memory': True}) sheet = book.add_worksheet("result") @@ -1297,7 +1296,7 @@ def get_by_id_and_org(cls, id, org): return db.session.query(Alert).join(Query).filter(Alert.id == id, Query.org == org).one() def evaluate(self): - data = json.loads(self.query_rel.latest_query_data.data) + data = json_loads(self.query_rel.latest_query_data.data) if data['rows']: value = data['rows'][0][self.options['column']] op = self.options['op'] diff --git a/redash/query_runner/__init__.py b/redash/query_runner/__init__.py index bdf8728599..65b44a0fdd 100644 --- a/redash/query_runner/__init__.py +++ b/redash/query_runner/__init__.py @@ -1,10 +1,8 @@ import logging -import json -import sys - import requests from redash import settings +from redash.utils import json_loads logger = logging.getLogger(__name__) @@ -115,7 +113,7 @@ def _run_query_internal(self, query): if error is not None: raise Exception("Failed running query [%s]." % query) - return json.loads(results)['rows'] + return json_loads(results)['rows'] @classmethod def to_dict(cls): diff --git a/redash/query_runner/athena.py b/redash/query_runner/athena.py index a07952934f..075a17aaba 100644 --- a/redash/query_runner/athena.py +++ b/redash/query_runner/athena.py @@ -1,11 +1,9 @@ -import json import logging import os -import simplejson from redash.query_runner import * from redash.settings import parse_boolean -from redash.utils import SimpleJSONEncoder +from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) ANNOTATE_QUERY = parse_boolean(os.environ.get('ATHENA_ANNOTATE_QUERY', 'true')) @@ -152,7 +150,7 @@ def get_schema(self, get_stats=False): if error is not None: raise Exception("Failed getting schema.") - results = json.loads(results) + results = json_loads(results) for row in results['rows']: table_name = '{0}.{1}'.format(row['table_schema'], row['table_name']) if table_name not in schema: @@ -195,7 +193,7 @@ def run_query(self, query, user): 'athena_query_id': athena_query_id } } - json_data = simplejson.dumps(data, ignore_nan=True, cls=SimpleJSONEncoder) + json_data = json_dumps(data, ignore_nan=True) error = None except KeyboardInterrupt: if cursor.query_id: diff --git a/redash/query_runner/axibase_tsd.py b/redash/query_runner/axibase_tsd.py index afa9dc89e9..78f533fdbf 100644 --- a/redash/query_runner/axibase_tsd.py +++ b/redash/query_runner/axibase_tsd.py @@ -1,12 +1,11 @@ from io import StringIO -import json import logging import sys import uuid import csv from redash.query_runner import * -from redash.utils import JSONEncoder +from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -59,7 +58,7 @@ def generate_rows_and_columns(csv_response): meta_with_padding = meta + '=' * (4 - len(meta) % 4) meta_decoded = meta_with_padding.decode('base64') - meta_json = json.loads(meta_decoded) + meta_json = json_loads(meta_decoded) meta_columns = meta_json['tableSchema']['columns'] reader = csv.reader(data.splitlines()) @@ -162,7 +161,7 @@ def run_query(self, query, user): columns, rows = generate_rows_and_columns(data) data = {'columns': columns, 'rows': rows} - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) error = None except SQLException as e: diff --git a/redash/query_runner/big_query.py b/redash/query_runner/big_query.py index 1d486f1a9f..fa2595ce8b 100644 --- a/redash/query_runner/big_query.py +++ b/redash/query_runner/big_query.py @@ -1,5 +1,4 @@ import datetime -import json import logging import sys import time @@ -10,7 +9,7 @@ from redash import settings from redash.query_runner import * -from redash.utils import JSONEncoder +from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -141,7 +140,7 @@ def _get_bigquery_service(self): "https://www.googleapis.com/auth/drive" ] - key = json.loads(b64decode(self.configuration['jsonKeyFile'])) + key = json_loads(b64decode(self.configuration['jsonKeyFile'])) creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope) http = httplib2.Http(timeout=settings.BIGQUERY_HTTP_TIMEOUT) @@ -296,11 +295,11 @@ def run_query(self, query, user): data = self._get_query_result(jobs, query) error = None - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) except apiclient.errors.HttpError as e: json_data = None if e.resp.status == 400: - error = json.loads(e.content)['error']['message'] + error = json_loads(e.content)['error']['message'] else: error = e.content except KeyboardInterrupt: diff --git a/redash/query_runner/cass.py b/redash/query_runner/cass.py index 37bcc98cbb..0f0c72ff66 100644 --- a/redash/query_runner/cass.py +++ b/redash/query_runner/cass.py @@ -1,9 +1,7 @@ -import json import logging -import uuid from redash.query_runner import BaseQueryRunner, register -from redash.utils import JSONEncoder +from redash.utils import JSONEncoder, json_dumps, json_loads logger = logging.getLogger(__name__) @@ -18,8 +16,6 @@ class CassandraJSONEncoder(JSONEncoder): def default(self, o): - if isinstance(o, uuid.UUID): - return str(o) if isinstance(o, sortedset): return list(o) return super(CassandraJSONEncoder, self).default(o) @@ -79,7 +75,7 @@ def get_schema(self, get_stats=False): select release_version from system.local; """ results, error = self.run_query(query, None) - results = json.loads(results) + results = json_loads(results) release_version = results['rows'][0]['release_version'] query = """ @@ -96,7 +92,7 @@ def get_schema(self, get_stats=False): """.format(self.configuration['keyspace']) results, error = self.run_query(query, None) - results = json.loads(results) + results = json_loads(results) schema = {} for row in results['rows']: @@ -135,7 +131,7 @@ def run_query(self, query, user): rows = [dict(zip(column_names, row)) for row in result] data = {'columns': columns, 'rows': rows} - json_data = json.dumps(data, cls=CassandraJSONEncoder) + json_data = json_dumps(data, cls=CassandraJSONEncoder) error = None except KeyboardInterrupt: diff --git a/redash/query_runner/clickhouse.py b/redash/query_runner/clickhouse.py index 00fc2b578b..7f05adccd6 100644 --- a/redash/query_runner/clickhouse.py +++ b/redash/query_runner/clickhouse.py @@ -1,9 +1,11 @@ -import json import logging -from redash.query_runner import * -from redash.utils import JSONEncoder -import requests import re + +import requests + +from redash.query_runner import * +from redash.utils import json_dumps, json_loads + logger = logging.getLogger(__name__) @@ -47,7 +49,7 @@ def _get_tables(self, schema): if error is not None: raise Exception("Failed getting schema.") - results = json.loads(results) + results = json_loads(results) for row in results['rows']: table_name = '{}.{}'.format(row['database'], row['table']) @@ -107,7 +109,7 @@ def run_query(self, query, user): return json_data, error try: q = self._clickhouse_query(query) - data = json.dumps(q, cls=JSONEncoder) + data = json_dumps(q) error = None except Exception as e: data = None diff --git a/redash/query_runner/dynamodb_sql.py b/redash/query_runner/dynamodb_sql.py index 22eb950f66..5f7c8f09d8 100644 --- a/redash/query_runner/dynamodb_sql.py +++ b/redash/query_runner/dynamodb_sql.py @@ -1,9 +1,8 @@ -import json import logging import sys from redash.query_runner import * -from redash.utils import JSONEncoder +from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -119,7 +118,7 @@ def run_query(self, query, user): rows.append(item) data = {'columns': columns, 'rows': rows} - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) error = None except ParseException as e: error = u"Error parsing query at line {} (column {}):\n{}".format(e.lineno, e.column, e.line) diff --git a/redash/query_runner/elasticsearch.py b/redash/query_runner/elasticsearch.py index 07e2412cfe..e9327e504a 100644 --- a/redash/query_runner/elasticsearch.py +++ b/redash/query_runner/elasticsearch.py @@ -3,10 +3,10 @@ import urllib import requests -import simplejson as json from requests.auth import HTTPBasicAuth from redash.query_runner import * +from redash.utils import json_dumps, json_loads try: import http.client as http_client @@ -315,7 +315,7 @@ def run_query(self, query, user): error = None logger.debug(query) - query_params = json.loads(query) + query_params = json_loads(query) index_name = query_params["index"] query_data = query_params["query"] @@ -334,7 +334,6 @@ def run_query(self, query, user): mappings, error = self._get_query_mappings(mapping_url) if error: return None, error - #logger.debug(json.dumps(mappings, indent=4)) if sort: url += "&sort={0}".format(urllib.quote_plus(sort)) @@ -358,7 +357,7 @@ def run_query(self, query, user): # TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST) raise Exception("Advanced queries are not supported") - json_data = json.dumps({ + json_data = json_dumps({ "columns": result_columns, "rows": result_rows }) @@ -396,7 +395,7 @@ def run_query(self, query, user): error = None logger.debug(query) - query_dict = json.loads(query) + query_dict = json_loads(query) index_name = query_dict.pop("index", "") result_fields = query_dict.pop("result_fields", None) @@ -422,7 +421,7 @@ def run_query(self, query, user): result_rows = [] self._parse_results(mappings, result_fields, r.json(), result_columns, result_rows) - json_data = json.dumps({ + json_data = json_dumps({ "columns": result_columns, "rows": result_rows }) diff --git a/redash/query_runner/google_analytics.py b/redash/query_runner/google_analytics.py index cd14724b66..e8b70eb01f 100644 --- a/redash/query_runner/google_analytics.py +++ b/redash/query_runner/google_analytics.py @@ -1,13 +1,12 @@ # -*- coding: utf-8 -*- -import json import logging from base64 import b64decode from datetime import datetime from urlparse import parse_qs, urlparse from redash.query_runner import * -from redash.utils import JSONEncoder +from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -115,7 +114,7 @@ def __init__(self, configuration): def _get_analytics_service(self): scope = ['https://www.googleapis.com/auth/analytics.readonly'] - key = json.loads(b64decode(self.configuration['jsonKeyFile'])) + key = json_loads(b64decode(self.configuration['jsonKeyFile'])) creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope) return build('analytics', 'v3', http=creds.authorize(httplib2.Http())) @@ -147,7 +146,7 @@ def test_connection(self): def run_query(self, query, user): logger.debug("Analytics is about to execute query: %s", query) try: - params = json.loads(query) + params = json_loads(query) except: params = parse_qs(urlparse(query).query, keep_blank_values=True) for key in params.keys(): @@ -171,7 +170,7 @@ def run_query(self, query, user): response = api.get(**params).execute() data = parse_ga_response(response) error = None - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) except HttpError as e: # Make sure we return a more readable error to the end user error = e._get_reason() diff --git a/redash/query_runner/google_spreadsheets.py b/redash/query_runner/google_spreadsheets.py index 61b7a62d5d..c180f1d8e1 100644 --- a/redash/query_runner/google_spreadsheets.py +++ b/redash/query_runner/google_spreadsheets.py @@ -1,4 +1,3 @@ -import json import logging from base64 import b64decode @@ -7,7 +6,7 @@ from xlsxwriter.utility import xl_col_to_name from redash.query_runner import * -from redash.utils import json_dumps +from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -23,7 +22,7 @@ def _load_key(filename): with open(filename, "rb") as f: - return json.loads(f.read()) + return json_loads(f.read()) def _get_columns_and_column_names(row): @@ -179,7 +178,7 @@ def _get_spreadsheet_service(self): 'https://spreadsheets.google.com/feeds', ] - key = json.loads(b64decode(self.configuration['jsonKeyFile'])) + key = json_loads(b64decode(self.configuration['jsonKeyFile'])) creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope) timeout_session = HTTPSession() diff --git a/redash/query_runner/graphite.py b/redash/query_runner/graphite.py index 023ec04940..6b394e81ec 100644 --- a/redash/query_runner/graphite.py +++ b/redash/query_runner/graphite.py @@ -1,9 +1,10 @@ -import json import datetime -import requests import logging + +import requests + from redash.query_runner import * -from redash.utils import JSONEncoder +from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -21,7 +22,7 @@ def _transform_result(response): rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]}) data = {'columns': columns, 'rows': rows} - return json.dumps(data, cls=JSONEncoder) + return json_dumps(data) class Graphite(BaseQueryRunner): diff --git a/redash/query_runner/hive_ds.py b/redash/query_runner/hive_ds.py index 613b6668ed..746ce1485c 100644 --- a/redash/query_runner/hive_ds.py +++ b/redash/query_runner/hive_ds.py @@ -1,10 +1,9 @@ -import json import logging import sys import base64 from redash.query_runner import * -from redash.utils import JSONEncoder +from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -167,7 +166,7 @@ def run_query(self, query, user): rows = [dict(zip(column_names, row)) for row in cursor] data = {'columns': columns, 'rows': rows} - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) error = None except KeyboardInterrupt: connection.cancel() diff --git a/redash/query_runner/impala_ds.py b/redash/query_runner/impala_ds.py index 0f412ffac6..5b8b590777 100644 --- a/redash/query_runner/impala_ds.py +++ b/redash/query_runner/impala_ds.py @@ -1,9 +1,7 @@ -import json import logging -import sys from redash.query_runner import * -from redash.utils import JSONEncoder +from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -118,7 +116,7 @@ def run_query(self, query, user): rows = [dict(zip(column_names, row)) for row in cursor] data = {'columns': columns, 'rows': rows} - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) error = None cursor.close() except DatabaseError as e: diff --git a/redash/query_runner/influx_db.py b/redash/query_runner/influx_db.py index 5d830daa46..47f3a4201f 100644 --- a/redash/query_runner/influx_db.py +++ b/redash/query_runner/influx_db.py @@ -1,8 +1,7 @@ -import json import logging from redash.query_runner import * -from redash.utils import JSONEncoder +from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -42,10 +41,10 @@ def _transform_result(results): result_row[column] = value result_rows.append(result_row) - return json.dumps({ + return json_dumps({ "columns": [{'name': c} for c in result_columns], "rows": result_rows - }, cls=JSONEncoder) + }) class InfluxDB(BaseQueryRunner): diff --git a/redash/query_runner/jql.py b/redash/query_runner/jql.py index 9beb7c53ae..2022f8f13a 100644 --- a/redash/query_runner/jql.py +++ b/redash/query_runner/jql.py @@ -1,9 +1,8 @@ -import json import re - from collections import OrderedDict from redash.query_runner import * +from redash.utils import json_dumps, json_loads # TODO: make this more general and move into __init__.py @@ -23,7 +22,7 @@ def add_column(self, column, column_type=TYPE_STRING): self.columns[column] = {'name': column, 'type': column_type, 'friendly_name': column} def to_json(self): - return json.dumps({'rows': self.rows, 'columns': self.columns.values()}) + return json_dumps({'rows': self.rows, 'columns': self.columns.values()}) def parse_issue(issue, field_mapping): @@ -39,7 +38,7 @@ def parse_issue(issue, field_mapping): # if field mapping with dict member mappings defined get value of each member for member_name in member_names: if member_name in v: - result[field_mapping.get_dict_output_field_name(k,member_name)] = v[member_name] + result[field_mapping.get_dict_output_field_name(k, member_name)] = v[member_name] else: # these special mapping rules are kept for backwards compatibility @@ -64,7 +63,7 @@ def parse_issue(issue, field_mapping): if member_name in listItem: listValues.append(listItem[member_name]) if len(listValues) > 0: - result[field_mapping.get_dict_output_field_name(k,member_name)] = ','.join(listValues) + result[field_mapping.get_dict_output_field_name(k, member_name)] = ','.join(listValues) else: # otherwise support list values only for non-dict items @@ -160,7 +159,7 @@ def run_query(self, query, user): jql_url = '{}/rest/api/2/search'.format(self.configuration["url"]) try: - query = json.loads(query) + query = json_loads(query) query_type = query.pop('queryType', 'select') field_mapping = FieldMapping(query.pop('fieldMapping', {})) diff --git a/redash/query_runner/mapd.py b/redash/query_runner/mapd.py index 48c72b207f..84ed2e480a 100644 --- a/redash/query_runner/mapd.py +++ b/redash/query_runner/mapd.py @@ -1,6 +1,4 @@ from __future__ import absolute_import -import sys -import json try: import pymapd diff --git a/redash/query_runner/memsql_ds.py b/redash/query_runner/memsql_ds.py index 06058d8fe0..bbec2836d4 100644 --- a/redash/query_runner/memsql_ds.py +++ b/redash/query_runner/memsql_ds.py @@ -1,9 +1,8 @@ -import json import logging import sys from redash.query_runner import * -from redash.utils import JSONEncoder +from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -131,7 +130,7 @@ def run_query(self, query, user): }) data = {'columns': columns, 'rows': rows} - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) error = None except KeyboardInterrupt: cursor.close() diff --git a/redash/query_runner/mongodb.py b/redash/query_runner/mongodb.py index 91323a8a19..2bb1e71358 100644 --- a/redash/query_runner/mongodb.py +++ b/redash/query_runner/mongodb.py @@ -1,12 +1,11 @@ import datetime -import json import logging import re from dateutil.parser import parse from redash.query_runner import * -from redash.utils import JSONEncoder, parse_human_time +from redash.utils import JSONEncoder, json_dumps, json_loads, parse_human_time logger = logging.getLogger(__name__) @@ -70,7 +69,7 @@ def datetime_parser(dct): def parse_query_json(query): - query_data = json.loads(query, object_hook=datetime_parser) + query_data = json_loads(query, object_hook=datetime_parser) return query_data @@ -312,7 +311,7 @@ def run_query(self, query, user): "rows": rows } error = None - json_data = json.dumps(data, cls=MongoDBJSONEncoder) + json_data = json_dumps(data, cls=MongoDBJSONEncoder) return json_data, error diff --git a/redash/query_runner/mssql.py b/redash/query_runner/mssql.py index aa017ef7f5..007aa825b6 100644 --- a/redash/query_runner/mssql.py +++ b/redash/query_runner/mssql.py @@ -1,10 +1,9 @@ -import json import logging import sys import uuid from redash.query_runner import * -from redash.utils import JSONEncoder +from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -26,13 +25,6 @@ } -class MSSQLJSONEncoder(JSONEncoder): - def default(self, o): - if isinstance(o, uuid.UUID): - return str(o) - return super(MSSQLJSONEncoder, self).default(o) - - class SqlServer(BaseSQLQueryRunner): noop_query = "SELECT 1" @@ -105,7 +97,7 @@ def _get_tables(self, schema): if error is not None: raise Exception("Failed getting schema.") - results = json.loads(results) + results = json_loads(results) for row in results['rows']: if row['table_schema'] != self.configuration['db']: @@ -151,7 +143,7 @@ def run_query(self, query, user): rows = [dict(zip((c['name'] for c in columns), row)) for row in data] data = {'columns': columns, 'rows': rows} - json_data = json.dumps(data, cls=MSSQLJSONEncoder) + json_data = json_dumps(data) error = None else: error = "No data was returned." diff --git a/redash/query_runner/mssql_odbc.py b/redash/query_runner/mssql_odbc.py index 43b8dddd2a..9be2278f22 100644 --- a/redash/query_runner/mssql_odbc.py +++ b/redash/query_runner/mssql_odbc.py @@ -1,11 +1,10 @@ -import json import logging import sys import uuid from redash.query_runner import * -from redash.utils import JSONEncoder -from redash.query_runner.mssql import MSSQLJSONEncoder, types_map +from redash.query_runner.mssql import types_map +from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -88,7 +87,7 @@ def _get_tables(self, schema): if error is not None: raise Exception("Failed getting schema.") - results = json.loads(results) + results = json_loads(results) for row in results['rows']: if row['table_schema'] != self.configuration['db']: @@ -133,7 +132,7 @@ def run_query(self, query, user): rows = [dict(zip((c['name'] for c in columns), row)) for row in data] data = {'columns': columns, 'rows': rows} - json_data = json.dumps(data, cls=MSSQLJSONEncoder) + json_data = json_dumps(data) error = None else: error = "No data was returned." diff --git a/redash/query_runner/mysql.py b/redash/query_runner/mysql.py index 2565c67dea..e5f8d54803 100644 --- a/redash/query_runner/mysql.py +++ b/redash/query_runner/mysql.py @@ -1,10 +1,9 @@ -import json import logging import os from redash.query_runner import * from redash.settings import parse_boolean -from redash.utils import JSONEncoder +from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) types_map = { @@ -111,7 +110,7 @@ def _get_tables(self, schema): if error is not None: raise Exception("Failed getting schema.") - results = json.loads(results) + results = json_loads(results) for row in results['rows']: if row['table_schema'] != self.configuration['db']: @@ -154,7 +153,7 @@ def run_query(self, query, user): rows = [dict(zip((c['name'] for c in columns), row)) for row in data] data = {'columns': columns, 'rows': rows} - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) error = None else: json_data = None diff --git a/redash/query_runner/oracle.py b/redash/query_runner/oracle.py index 5bb8f70f2f..eff9250042 100644 --- a/redash/query_runner/oracle.py +++ b/redash/query_runner/oracle.py @@ -1,9 +1,7 @@ -import json import logging -import sys +from redash.utils import json_dumps, json_loads from redash.query_runner import * -from redash.utils import JSONEncoder try: import cx_Oracle @@ -100,7 +98,7 @@ def _get_tables(self, schema): if error is not None: raise Exception("Failed getting schema.") - results = json.loads(results) + results = json_loads(results) for row in results['rows']: if row['OWNER'] != None: @@ -148,13 +146,13 @@ def run_query(self, query, user): rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor] data = {'columns': columns, 'rows': rows} error = None - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) else: columns = [{'name': 'Row(s) Affected', 'type': 'TYPE_INTEGER'}] rows = [{'Row(s) Affected': rows_count}] data = {'columns': columns, 'rows': rows} - json_data = json.dumps(data, cls=JSONEncoder) - connection.commit() + json_data = json_dumps(data) + connection.commit() except cx_Oracle.DatabaseError as err: error = u"Query failed. {}.".format(err.message) json_data = None diff --git a/redash/query_runner/pg.py b/redash/query_runner/pg.py index 3ea2bbcc30..b20ddc6166 100644 --- a/redash/query_runner/pg.py +++ b/redash/query_runner/pg.py @@ -1,12 +1,11 @@ import os -import json import logging import select import psycopg2 from redash.query_runner import * -from redash.utils import JSONEncoder +from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -92,7 +91,7 @@ def _get_definitions(self, schema, query): if error is not None: raise Exception("Failed getting schema.") - results = json.loads(results) + results = json_loads(results) for row in results['rows']: if row['table_schema'] != 'public': @@ -166,7 +165,7 @@ def run_query(self, query, user): data = {'columns': columns, 'rows': rows} error = None - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) else: error = 'Query completed but it returned no data.' json_data = None diff --git a/redash/query_runner/presto.py b/redash/query_runner/presto.py index e915fa9a2e..bef714dfa3 100644 --- a/redash/query_runner/presto.py +++ b/redash/query_runner/presto.py @@ -1,7 +1,5 @@ -import json - -from redash.utils import JSONEncoder from redash.query_runner import * +from redash.utils import json_dumps, json_loads import logging logger = logging.getLogger(__name__) @@ -79,7 +77,7 @@ def get_schema(self, get_stats=False): if error is not None: raise Exception("Failed getting schema.") - results = json.loads(results) + results = json_loads(results) for row in results['rows']: table_name = '{}.{}'.format(row['table_schema'], row['table_name']) @@ -108,7 +106,7 @@ def run_query(self, query, user): columns = self.fetch_columns(column_tuples) rows = [dict(zip(([c['name'] for c in columns]), r)) for i, r in enumerate(cursor.fetchall())] data = {'columns': columns, 'rows': rows} - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) error = None except DatabaseError as db: json_data = None diff --git a/redash/query_runner/python.py b/redash/query_runner/python.py index f8e69f96ce..51f475fba9 100644 --- a/redash/query_runner/python.py +++ b/redash/query_runner/python.py @@ -1,19 +1,17 @@ import datetime -import json +import importlib import logging import sys from redash.query_runner import * -from redash.utils import json_dumps +from redash.utils import json_dumps, json_loads from redash import models +from RestrictedPython import compile_restricted +from RestrictedPython.Guards import safe_builtins -import importlib logger = logging.getLogger(__name__) -from RestrictedPython import compile_restricted -from RestrictedPython.Guards import safe_builtins - class CustomPrint(object): """CustomPrint redirect "print" calls to be sent as "log" on the result object.""" @@ -173,8 +171,8 @@ def execute_query(data_source_name_or_id, query): if error is not None: raise Exception(error) - # TODO: allow avoiding the json.dumps/loads in same process - return json.loads(data) + # TODO: allow avoiding the JSON dumps/loads in same process + return json_loads(data) @staticmethod def get_source_schema(data_source_name_or_id): @@ -211,7 +209,7 @@ def get_query_result(query_id): if query.latest_query_data.data is None: raise Exception("Query does not have results yet.") - return json.loads(query.latest_query_data.data) + return json_loads(query.latest_query_data.data) def test_connection(self): pass diff --git a/redash/query_runner/query_results.py b/redash/query_runner/query_results.py index 97fa7b39eb..4766267d33 100644 --- a/redash/query_runner/query_results.py +++ b/redash/query_runner/query_results.py @@ -1,4 +1,3 @@ -import json import logging import numbers import re @@ -12,7 +11,7 @@ from redash.query_runner import (TYPE_BOOLEAN, TYPE_DATETIME, TYPE_FLOAT, TYPE_INTEGER, TYPE_STRING, BaseQueryRunner, register) -from redash.utils import JSONEncoder +from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -73,13 +72,12 @@ def get_query_results(user, query_id, bring_from_cache): results = query.latest_query_data.data else: raise Exception("No cached result available for query {}.".format(query.id)) - - else: + else: results, error = query.data_source.query_runner.run_query(query.query_text, user) if error: raise Exception("Failed loading results for query id {}.".format(query.id)) - return json.loads(results) + return json_loads(results) def create_tables_from_query_ids(user, connection, query_ids, cached_query_ids=[]): @@ -170,7 +168,7 @@ def run_query(self, query, user): data = {'columns': columns, 'rows': rows} error = None - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) else: error = 'Query completed but it returned no data.' json_data = None diff --git a/redash/query_runner/snowflake.py b/redash/query_runner/snowflake.py index a1a7ca447e..3bf2bd64aa 100644 --- a/redash/query_runner/snowflake.py +++ b/redash/query_runner/snowflake.py @@ -1,5 +1,4 @@ from __future__ import absolute_import -import json try: import snowflake.connector @@ -10,7 +9,7 @@ from redash.query_runner import BaseQueryRunner, register from redash.query_runner import TYPE_STRING, TYPE_DATE, TYPE_DATETIME, TYPE_INTEGER, TYPE_FLOAT, TYPE_BOOLEAN -from redash.utils import json_dumps +from redash.utils import json_dumps, json_loads TYPES_MAP = { 0: TYPE_INTEGER, @@ -98,7 +97,7 @@ def get_schema(self, get_stats=False): raise Exception("Failed getting schema.") schema = {} - results = json.loads(results) + results = json_loads(results) for row in results['rows']: table_name = '{}.{}'.format(row['TABLE_SCHEMA'], row['TABLE_NAME']) diff --git a/redash/query_runner/sqlite.py b/redash/query_runner/sqlite.py index ab54210915..c1933d81e6 100644 --- a/redash/query_runner/sqlite.py +++ b/redash/query_runner/sqlite.py @@ -1,14 +1,11 @@ -import json import logging import sqlite3 import sys from six import reraise -from redash.query_runner import BaseSQLQueryRunner -from redash.query_runner import register - -from redash.utils import JSONEncoder +from redash.query_runner import BaseSQLQueryRunner, register +from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -47,7 +44,7 @@ def _get_tables(self, schema): if error is not None: raise Exception("Failed getting schema.") - results = json.loads(results) + results = json_loads(results) for row in results['rows']: table_name = row['tbl_name'] @@ -56,7 +53,7 @@ def _get_tables(self, schema): if error is not None: raise Exception("Failed getting schema.") - results_table = json.loads(results_table) + results_table = json_loads(results_table) for row_column in results_table['rows']: schema[table_name]['columns'].append(row_column['name']) @@ -76,7 +73,7 @@ def run_query(self, query, user): data = {'columns': columns, 'rows': rows} error = None - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) else: error = 'Query completed but it returned no data.' json_data = None diff --git a/redash/query_runner/treasuredata.py b/redash/query_runner/treasuredata.py index 0ecfacd25d..927410074c 100644 --- a/redash/query_runner/treasuredata.py +++ b/redash/query_runner/treasuredata.py @@ -1,8 +1,7 @@ -import json import logging from redash.query_runner import * -from redash.utils import JSONEncoder +from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -113,7 +112,7 @@ def run_query(self, query, user): else: rows = [dict(zip(([c[0] for c in columns_data]), r)) for i, r in enumerate(cursor.fetchall())] data = {'columns': columns, 'rows': rows} - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) error = None except errors.InternalError as e: json_data = None diff --git a/redash/query_runner/vertica.py b/redash/query_runner/vertica.py index 96a09d49c3..4a3ddda1c2 100644 --- a/redash/query_runner/vertica.py +++ b/redash/query_runner/vertica.py @@ -1,8 +1,7 @@ import sys -import json import logging -from redash.utils import JSONEncoder +from redash.utils import json_loads, json_dumps from redash.query_runner import * logger = logging.getLogger(__name__) @@ -83,7 +82,7 @@ def _get_tables(self, schema): if error is not None: raise Exception("Failed getting schema.") - results = json.loads(results) + results = json_loads(results) for row in results['rows']: table_name = '{}.{}'.format(row['table_schema'], row['table_name']) @@ -128,7 +127,7 @@ def run_query(self, query, user): 'type': types_map.get(col[1], None)} for col in columns_data] data = {'columns': columns, 'rows': rows} - json_data = json.dumps(data, cls=JSONEncoder) + json_data = json_dumps(data) error = None else: json_data = None diff --git a/redash/query_runner/yandex_metrica.py b/redash/query_runner/yandex_metrica.py index c64181bda6..814b1b8662 100644 --- a/redash/query_runner/yandex_metrica.py +++ b/redash/query_runner/yandex_metrica.py @@ -1,10 +1,12 @@ -import json -import yaml import logging -from redash.query_runner import * -from redash.utils import JSONEncoder -import requests +import yaml from urlparse import parse_qs, urlparse + +import requests + +from redash.query_runner import * +from redash.utils import json_dumps + logger = logging.getLogger(__name__) COLUMN_TYPES = { @@ -140,7 +142,7 @@ def run_query(self, query, user): return data, error try: - data = json.dumps(parse_ym_response(self._send_query(**params)), cls=JSONEncoder) + data = json_dumps(parse_ym_response(self._send_query(**params))) error = None except Exception as e: logging.exception(e) diff --git a/redash/serializers.py b/redash/serializers.py index c42316c89e..d809a1f73e 100644 --- a/redash/serializers.py +++ b/redash/serializers.py @@ -3,19 +3,20 @@ classes we have. This will ensure cleaner code and better separation of concerns. """ - -import json from funcy import project + from flask_login import current_user + from redash import models from redash.permissions import has_access, view_only +from redash.utils import json_loads def public_widget(widget): res = { 'id': widget.id, 'width': widget.width, - 'options': json.loads(widget.options), + 'options': json_loads(widget.options), 'text': widget.text, 'updated_at': widget.updated_at, 'created_at': widget.created_at @@ -27,7 +28,7 @@ def public_widget(widget): 'type': widget.visualization.type, 'name': widget.visualization.name, 'description': widget.visualization.description, - 'options': json.loads(widget.visualization.options), + 'options': json_loads(widget.visualization.options), 'updated_at': widget.visualization.updated_at, 'created_at': widget.visualization.created_at, 'query': { @@ -65,7 +66,7 @@ class QuerySerializer(Serializer): def __init__(self, object_or_list, **kwargs): self.object_or_list = object_or_list self.options = kwargs - + def serialize(self): if isinstance(self.object_or_list, models.Query): result = serialize_query(self.object_or_list, **self.options) @@ -77,7 +78,7 @@ def serialize(self): favorite_ids = models.Favorite.are_favorites(current_user.id, self.object_or_list) for query in result: query['is_favorite'] = query['id'] in favorite_ids - + return result @@ -132,7 +133,7 @@ def serialize_visualization(object, with_query=True): 'type': object.type, 'name': object.name, 'description': object.description, - 'options': json.loads(object.options), + 'options': json_loads(object.options), 'updated_at': object.updated_at, 'created_at': object.created_at } @@ -147,7 +148,7 @@ def serialize_widget(object): d = { 'id': object.id, 'width': object.width, - 'options': json.loads(object.options), + 'options': json_loads(object.options), 'dashboard_id': object.dashboard_id, 'text': object.text, 'updated_at': object.updated_at, @@ -181,8 +182,9 @@ def serialize_alert(alert, full=True): return d + def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state=True): - layout = json.loads(obj.layout) + layout = json_loads(obj.layout) widgets = [] diff --git a/redash/settings/helpers.py b/redash/settings/helpers.py index 408e95610a..98946d81e4 100644 --- a/redash/settings/helpers.py +++ b/redash/settings/helpers.py @@ -1,4 +1,3 @@ -import json import os @@ -19,8 +18,15 @@ def set_from_string(s): return set(array_from_string(s)) -def parse_boolean(str): - return json.loads(str.lower()) +def parse_boolean(s): + """Takes a string and returns the equivalent as a boolean value.""" + s = s.strip().lower() + if s in ('yes', 'true', 'on', '1'): + return True + elif s in ('no', 'false', 'off', '0', 'none'): + return False + else: + raise ValueError('Invalid boolean value %r' % s) def int_or_none(value): diff --git a/redash/tasks/queries.py b/redash/tasks/queries.py index ec00063d32..6ef20726b2 100644 --- a/redash/tasks/queries.py +++ b/redash/tasks/queries.py @@ -1,20 +1,19 @@ -import json import logging import signal import time import pystache import redis - from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded from celery.result import AsyncResult from celery.utils.log import get_task_logger from six import text_type -from redash import models, redis_connection, settings, statsd_client, utils + +from redash import models, redis_connection, settings, statsd_client from redash.query_runner import InterruptException -from redash.utils import gen_query_hash -from redash.worker import celery from redash.tasks.alerts import check_alerts_for_query +from redash.utils import gen_query_hash, json_dumps, json_loads, utcnow +from redash.worker import celery logger = get_task_logger(__name__) @@ -60,7 +59,7 @@ def save(self, connection=None): self.data['updated_at'] = time.time() key_name = self._key_name(self.data['task_id']) - connection.set(key_name, utils.json_dumps(self.data)) + connection.set(key_name, json_dumps(self.data)) connection.zadd(self._get_list(), time.time(), key_name) for l in self.ALL_LISTS: @@ -97,7 +96,7 @@ def get_by_task_id(cls, task_id, connection=None): @classmethod def create_from_data(cls, data): if data: - data = json.loads(data) + data = json_loads(data) return cls(data) return None @@ -307,7 +306,7 @@ def refresh_queries(): redis_connection.hmset('redash:status', { 'outdated_queries_count': outdated_queries_count, 'last_refresh_at': now, - 'query_ids': json.dumps(query_ids) + 'query_ids': json_dumps(query_ids) }) statsd_client.gauge('manager.seconds_since_refresh', now - float(status.get('last_refresh_at', now))) @@ -483,7 +482,7 @@ def run(self): query_result, updated_query_ids = models.QueryResult.store_result( self.data_source.org_id, self.data_source, self.query_hash, self.query, data, - run_time, utils.utcnow()) + run_time, utcnow()) models.db.session.commit() # make sure that alert sees the latest query result self._log_progress('checking_alerts') for query_id in updated_query_ids: diff --git a/redash/utils/__init__.py b/redash/utils/__init__.py index 46ad637f1c..607c1c9161 100644 --- a/redash/utils/__init__.py +++ b/redash/utils/__init__.py @@ -1,17 +1,17 @@ import cStringIO import csv import codecs -import decimal import datetime -import json +import decimal +import hashlib +import os import random import re -import hashlib -import pytz +import uuid + import pystache -import os +import pytz import simplejson - from funcy import distinct, select_values from six import string_types from sqlalchemy.orm.query import Query @@ -68,47 +68,34 @@ def generate_token(length): return ''.join(rand.choice(chars) for x in range(length)) -class JSONEncoderMixin: - """Custom JSON encoding class, to handle Decimal and datetime.date instances.""" - - def process_default(self, o): - # Some SQLAlchemy collections are lazy. - if isinstance(o, Query): - return True, list(o) - if isinstance(o, decimal.Decimal): - return True, float(o) - - if isinstance(o, (datetime.date, datetime.time)): - return True, o.isoformat() - - if isinstance(o, datetime.timedelta): - return True, str(o) - - return False, None # default processing - - -class JSONEncoder(JSONEncoderMixin, json.JSONEncoder): - """Adapter for `json.dumps`.""" - - def default(self, o): - processed, result = self.process_default(o) - if not processed: - result = super(JSONEncoder, self).default(o) - return result - - -class SimpleJSONEncoder(JSONEncoderMixin, simplejson.JSONEncoder): +class JSONEncoder(simplejson.JSONEncoder): """Adapter for `simplejson.dumps`.""" def default(self, o): - processed, result = self.process_default(o) - if not processed: - result = super(SimpleJSONEncoder, self).default(o) - return result - - -def json_dumps(data): - return json.dumps(data, cls=JSONEncoder) + # Some SQLAlchemy collections are lazy. + if isinstance(o, Query): + return list(o) + elif isinstance(o, decimal.Decimal): + return float(o) + elif isinstance(o, (datetime.timedelta, uuid.UUID)): + return str(o) + elif isinstance(o, (datetime.date, datetime.time)): + return o.isoformat() + else: + return super(JSONEncoder, self).default(o) + + +def json_loads(data, *args, **kwargs): + """A custom JSON loading function which passes all parameters to the + simplejson.loads function.""" + return simplejson.loads(data, *args, **kwargs) + + +def json_dumps(data, *args, **kwargs): + """A custom JSON dumping function which passes all parameters to the + simplejson.dumps function.""" + kwargs.setdefault('cls', JSONEncoder) + return simplejson.dumps(data, *args, **kwargs) def build_url(request, host, path): diff --git a/redash/utils/configuration.py b/redash/utils/configuration.py index 615cc162f8..f19c281053 100644 --- a/redash/utils/configuration.py +++ b/redash/utils/configuration.py @@ -1,9 +1,9 @@ -import json import jsonschema from jsonschema import ValidationError - from sqlalchemy.ext.mutable import Mutable +from redash.utils import json_dumps, json_loads + SECRET_PLACEHOLDER = '--------' @@ -45,7 +45,7 @@ def validate(self): jsonschema.validate(self._config, self._schema) def to_json(self): - return json.dumps(self._config, sort_keys=True) + return json_dumps(self._config, sort_keys=True) def iteritems(self): return self._config.iteritems() @@ -92,4 +92,4 @@ def __contains__(self, item): @classmethod def from_json(cls, config_in_json): - return cls(json.loads(config_in_json)) + return cls(json_loads(config_in_json)) diff --git a/redash/utils/human_time.py b/redash/utils/human_time.py index 3868623c35..0fa1dfe81f 100644 --- a/redash/utils/human_time.py +++ b/redash/utils/human_time.py @@ -8,5 +8,3 @@ def parse_human_time(s): time_struct, _ = cal.parse(s) return datetime.fromtimestamp(mktime(time_struct)) - - diff --git a/tests/__init__.py b/tests/__init__.py index 04ed5ad5bf..ca89d0645f 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,6 +1,5 @@ import os import datetime -import json import logging from unittest import TestCase from contextlib import contextmanager @@ -17,7 +16,7 @@ from redash import create_app from redash import redis_connection from redash.models import db -from redash.utils import json_dumps +from redash.utils import json_dumps, json_loads from tests.factories import Factory, user_factory @@ -94,7 +93,7 @@ def make_request(self, method, path, org=None, user=None, data=None, ) if response.data and is_json: - response.json = json.loads(response.data) + response.json = json_loads(response.data) return response @@ -112,7 +111,8 @@ def post_request(self, path, data=None, org=None, headers=None): def assertResponseEqual(self, expected, actual): for k, v in expected.iteritems(): - if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime): + if isinstance(v, datetime.datetime) or isinstance(actual[k], + datetime.datetime): continue if isinstance(v, list): diff --git a/tests/handlers/test_dashboards.py b/tests/handlers/test_dashboards.py index 0cd38a5fea..08e0c5cce2 100644 --- a/tests/handlers/test_dashboards.py +++ b/tests/handlers/test_dashboards.py @@ -1,8 +1,9 @@ -import json from tests import BaseTestCase + from redash.models import ApiKey, Dashboard, AccessPermission, db from redash.permissions import ACCESS_TYPE_MODIFY from redash.serializers import serialize_dashboard +from redash.utils import json_loads class TestDashboardListResource(BaseTestCase): @@ -25,7 +26,7 @@ def test_returns_dashboards(self): assert len(rv.json['results']) == 3 assert set(map(lambda d: d['id'], rv.json['results'])) == set([d1.id, d2.id, d3.id]) - + def test_filters_with_tags(self): d1 = self.factory.create_dashboard(tags=[u'test']) d2 = self.factory.create_dashboard() @@ -34,7 +35,7 @@ def test_filters_with_tags(self): rv = self.make_request('get', '/api/dashboards?tags=test') assert len(rv.json['results']) == 1 assert set(map(lambda d: d['id'], rv.json['results'])) == set([d1.id]) - + def test_search_term(self): d1 = self.factory.create_dashboard(name="Sales") d2 = self.factory.create_dashboard(name="Q1 sales") @@ -52,7 +53,7 @@ def test_get_dashboard(self): self.assertEquals(rv.status_code, 200) expected = serialize_dashboard(d1, with_widgets=True, with_favorite_state=False) - actual = json.loads(rv.data) + actual = json_loads(rv.data) self.assertResponseEqual(expected, actual) diff --git a/tests/handlers/test_query_results.py b/tests/handlers/test_query_results.py index 2f596213eb..1af3c1a3a5 100644 --- a/tests/handlers/test_query_results.py +++ b/tests/handlers/test_query_results.py @@ -1,6 +1,7 @@ -import json from tests import BaseTestCase + from redash.models import db +from redash.utils import json_dumps class TestQueryResultsCacheHeaders(BaseTestCase): @@ -162,7 +163,17 @@ def test_renders_excel_file(self): def test_renders_excel_file_when_rows_have_missing_columns(self): query = self.factory.create_query() - query_result = self.factory.create_query_result(data=json.dumps({'rows': [{'test': 1}, {'test': 2, 'test2': 3}], 'columns': [{'name': 'test'}, {'name': 'test2'}]})) + data = { + 'rows': [ + {'test': 1}, + {'test': 2, 'test2': 3}, + ], + 'columns': [ + {'name': 'test'}, + {'name': 'test2'}, + ], + } + query_result = self.factory.create_query_result(data=json_dumps(data)) rv = self.make_request('get', '/api/queries/{}/results/{}.xlsx'.format(query.id, query_result.id), is_json=False) self.assertEquals(rv.status_code, 200) diff --git a/tests/query_runner/test_mongodb.py b/tests/query_runner/test_mongodb.py index e39d785cf1..244ff29e94 100644 --- a/tests/query_runner/test_mongodb.py +++ b/tests/query_runner/test_mongodb.py @@ -1,10 +1,10 @@ import datetime -import json from unittest import TestCase + from pytz import utc -from redash.query_runner.mongodb import parse_query_json, parse_results, _get_column_by_name -from redash.utils import parse_human_time +from redash.query_runner.mongodb import parse_query_json, parse_results, _get_column_by_name +from redash.utils import json_dumps, parse_human_time class TestParseQueryJson(TestCase): @@ -18,7 +18,7 @@ def test_ignores_non_isodate_fields(self): } } - query_data = parse_query_json(json.dumps(query)) + query_data = parse_query_json(json_dumps(query)) self.assertDictEqual(query_data, query) def test_parses_isodate_fields(self): @@ -32,7 +32,7 @@ def test_parses_isodate_fields(self): 'testIsoDate': "ISODate(\"2014-10-03T00:00\")" } - query_data = parse_query_json(json.dumps(query)) + query_data = parse_query_json(json_dumps(query)) self.assertEqual(query_data['testIsoDate'], datetime.datetime(2014, 10, 3, 0, 0)) @@ -49,7 +49,7 @@ def test_parses_isodate_in_nested_fields(self): 'testIsoDate': "ISODate(\"2014-10-03T00:00\")" } - query_data = parse_query_json(json.dumps(query)) + query_data = parse_query_json(json_dumps(query)) self.assertEqual(query_data['testIsoDate'], datetime.datetime(2014, 10, 3, 0, 0)) self.assertEqual(query_data['test_dict']['b']['date'], datetime.datetime(2014, 10, 4, 0, 0)) @@ -71,7 +71,7 @@ def test_handles_nested_fields(self): ] } - query_data = parse_query_json(json.dumps(query)) + query_data = parse_query_json(json_dumps(query)) self.assertDictEqual(query, query_data) @@ -91,7 +91,7 @@ def test_supports_extended_json_types(self): '$undefined': None } } - query_data = parse_query_json(json.dumps(query)) + query_data = parse_query_json(json_dumps(query)) self.assertEqual(query_data['test$undefined'], None) self.assertEqual(query_data['test$date'], datetime.datetime(2014, 10, 3, 0, 0).replace(tzinfo=utc)) @@ -101,7 +101,7 @@ def test_supports_relative_timestamps(self): } one_hour_ago = parse_human_time("1 hour ago") - query_data = parse_query_json(json.dumps(query)) + query_data = parse_query_json(json_dumps(query)) self.assertEqual(query_data['ts'], one_hour_ago) @@ -119,7 +119,7 @@ def test_parses_regular_results(self): self.assertIsNotNone(_get_column_by_name(columns, 'column')) self.assertIsNotNone(_get_column_by_name(columns, 'column2')) self.assertIsNotNone(_get_column_by_name(columns, 'column3')) - + def test_parses_nested_results(self): raw_results = [ {'column': 1, 'column2': 'test', 'nested': { @@ -143,4 +143,4 @@ def test_parses_nested_results(self): self.assertIsNotNone(_get_column_by_name(columns, 'column3')) self.assertIsNotNone(_get_column_by_name(columns, 'nested.a')) self.assertIsNotNone(_get_column_by_name(columns, 'nested.b')) - self.assertIsNotNone(_get_column_by_name(columns, 'nested.c')) \ No newline at end of file + self.assertIsNotNone(_get_column_by_name(columns, 'nested.c')) diff --git a/tests/query_runner/test_prometheus.py b/tests/query_runner/test_prometheus.py index 32cec3fe9a..21c8dcdb92 100644 --- a/tests/query_runner/test_prometheus.py +++ b/tests/query_runner/test_prometheus.py @@ -1,6 +1,6 @@ import datetime -import json from unittest import TestCase + from redash.query_runner.prometheus import get_instant_rows, get_range_rows