Fix a few more inconsistencies when loading and dumping JSON. (#3626)

* Fix a few more inconsistencies when loading and dumping JSON.

Refs #2807. Original work in: #2817

These change have been added since c2429e92d2.

* Review fixes.
This commit is contained in:
Jannis Leidel 2019-03-27 16:14:32 +01:00 committed by Arik Fraimovich
parent 73c8e3096d
commit 77c53130a4
6 changed files with 16 additions and 24 deletions

View File

@ -1,7 +1,7 @@
import logging
import json
import jwt
import requests
import simplejson
logger = logging.getLogger('jwt_auth')
@ -21,7 +21,7 @@ def get_public_keys(url):
if 'keys' in data:
public_keys = []
for key_dict in data['keys']:
public_key = jwt.algorithms.RSAAlgorithm.from_jwk(json.dumps(key_dict))
public_key = jwt.algorithms.RSAAlgorithm.from_jwk(simplejson.dumps(key_dict))
public_keys.append(public_key)
get_public_keys.key_cache[url] = public_keys

View File

@ -1,10 +1,8 @@
import ast
import itertools
import json
import base64
from sqlalchemy import union_all
from redash import redis_connection, __version__, settings
from redash.models import db, DataSource, Query, QueryResult, Dashboard, Widget
from redash.utils import json_loads
from redash.worker import celery
@ -74,9 +72,9 @@ def get_status():
def get_waiting_in_queue(queue_name):
jobs = []
for raw in redis_connection.lrange(queue_name, 0, -1):
job = json.loads(raw)
job = json_loads(raw)
try:
args = json.loads(job['headers']['argsrepr'])
args = json_loads(job['headers']['argsrepr'])
if args.get('query_id') == 'adhoc':
args['query_id'] = None
except ValueError:
@ -114,7 +112,7 @@ def parse_tasks(task_lists, state):
if task['name'] == 'redash.tasks.execute_query':
try:
args = json.loads(task['args'])
args = json_loads(task['args'])
except ValueError:
args = {}

View File

@ -1,9 +1,7 @@
import os
import json
import logging
from redash.query_runner import *
from redash.utils import JSONEncoder
from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
@ -82,7 +80,7 @@ class DB2(BaseSQLQueryRunner):
if error is not None:
raise Exception("Failed getting schema.")
results = json.loads(results)
results = json_loads(results)
for row in results['rows']:
if row['TABLE_SCHEMA'] != u'public':
@ -129,7 +127,7 @@ class DB2(BaseSQLQueryRunner):
data = {'columns': columns, 'rows': rows}
error = None
json_data = json.dumps(data, cls=JSONEncoder)
json_data = json_dumps(data)
else:
error = 'Query completed but it returned no data.'
json_data = None

View File

@ -1,12 +1,11 @@
import os
import json
import logging
import requests
from requests.auth import HTTPBasicAuth
from redash import settings
from redash.query_runner import *
from redash.utils import JSONEncoder
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@ -102,7 +101,7 @@ class Kylin(BaseQueryRunner):
columns = self.get_columns(data['columnMetas'])
rows = self.get_rows(columns, data['results'])
return json.dumps({'columns': columns, 'rows': rows}), None
return json_dumps({'columns': columns, 'rows': rows}), None
def get_schema(self, get_stats=False):
url = self.configuration['url']

View File

@ -1,8 +1,6 @@
import requests
import os
from redash.query_runner import *
from redash.utils import JSONEncoder
import json
from redash.utils import json_dumps
def _get_type(value):
@ -96,7 +94,7 @@ class Rockset(BaseSQLQueryRunner):
columns = []
for k in rows[0]:
columns.append({'name': k, 'friendly_name': k, 'type': _get_type(rows[0][k])})
data = json.dumps({'columns': columns, 'rows': rows}, cls=JSONEncoder)
data = json_dumps({'columns': columns, 'rows': rows})
return data, None

View File

@ -1,7 +1,6 @@
from redash.query_runner import *
from redash.utils import json_dumps
from redash.utils import json_dumps, json_loads
import json
import jwt
import datetime
import requests
@ -93,7 +92,7 @@ class Uptycs(BaseSQLQueryRunner):
True))
if response.status_code == 200:
response_output = json.loads(response.content)
response_output = json_loads(response.content)
else:
error = 'status_code ' + str(response.status_code) + '\n'
error = error + "failed to connect"
@ -124,7 +123,7 @@ class Uptycs(BaseSQLQueryRunner):
verify=self.configuration.get('verify_ssl',
True))
redash_json = []
schema = json.loads(response.content)
schema = json_loads(response.content)
for each_def in schema['tables']:
table_name = each_def['name']
columns = []