mirror of
https://github.com/valitydev/redash.git
synced 2024-11-07 01:25:16 +00:00
Move schema fetching to DataSource + tests
This commit is contained in:
parent
1fe4f291f2
commit
e3cc3ef9a4
@ -1,6 +1,3 @@
|
||||
from flask import make_response
|
||||
from functools import update_wrapper
|
||||
|
||||
ONE_YEAR = 60 * 60 * 24 * 365.25
|
||||
|
||||
headers = {
|
||||
|
@ -22,7 +22,7 @@ from redash.wsgi import app, auth, api
|
||||
from redash.tasks import QueryTask, record_event
|
||||
from redash.cache import headers as cache_headers
|
||||
from redash.permissions import require_permission
|
||||
from redash.query_runner import query_runners, validate_configuration, get_query_runner
|
||||
from redash.query_runner import query_runners, validate_configuration
|
||||
|
||||
|
||||
@app.route('/ping', methods=['GET'])
|
||||
@ -225,8 +225,7 @@ api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources'
|
||||
class DataSourceSchemaAPI(BaseResource):
|
||||
def get(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
query_runner = get_query_runner(data_source.type, data_source.options)
|
||||
schema = query_runner.get_schema()
|
||||
schema = data_source.get_schema()
|
||||
|
||||
return schema
|
||||
|
||||
|
@ -13,7 +13,8 @@ from playhouse.postgres_ext import ArrayField, DateTimeTZField, PostgresqlExtDat
|
||||
from flask.ext.login import UserMixin, AnonymousUserMixin
|
||||
import psycopg2
|
||||
|
||||
from redash import utils, settings
|
||||
from redash import utils, settings, redis_connection
|
||||
from redash.query_runner import get_query_runner
|
||||
|
||||
|
||||
class Database(object):
|
||||
@ -241,6 +242,23 @@ class DataSource(BaseModel):
|
||||
'type': self.type
|
||||
}
|
||||
|
||||
def get_schema(self, refresh=False):
|
||||
key = "data_source:schema:{}".format(self.id)
|
||||
|
||||
cache = None
|
||||
if not refresh:
|
||||
cache = redis_connection.get(key)
|
||||
|
||||
if cache is None:
|
||||
query_runner = get_query_runner(self.type, self.options)
|
||||
schema = query_runner.get_schema()
|
||||
|
||||
redis_connection.set(key, json.dumps(schema))
|
||||
else:
|
||||
schema = json.loads(cache)
|
||||
|
||||
return schema
|
||||
|
||||
@classmethod
|
||||
def all(cls):
|
||||
return cls.select().order_by(cls.id.asc())
|
||||
|
@ -1,4 +1,3 @@
|
||||
from collections import defaultdict
|
||||
import json
|
||||
import logging
|
||||
import psycopg2
|
||||
|
@ -218,6 +218,17 @@ def cleanup_query_results():
|
||||
logger.info("Deleted %d unused query results out of total of %d." % (deleted_count, total_unused_query_results))
|
||||
|
||||
|
||||
@celery.task(base=BaseTask)
|
||||
def refresh_schemas():
|
||||
"""
|
||||
Refershs the datasources schema.
|
||||
"""
|
||||
|
||||
for ds in models.DataSource.all():
|
||||
logger.info("Refreshing schema for: {}".format(ds.name))
|
||||
ds.get_schema(refresh=True)
|
||||
|
||||
|
||||
@celery.task(bind=True, base=BaseTask, track_started=True)
|
||||
def execute_query(self, query, data_source_id):
|
||||
# TODO: maybe this should be a class?
|
||||
|
@ -15,6 +15,10 @@ celery_schedule = {
|
||||
'cleanup_tasks': {
|
||||
'task': 'redash.tasks.cleanup_tasks',
|
||||
'schedule': timedelta(minutes=5)
|
||||
},
|
||||
'refresh_schemas': {
|
||||
'task': 'redash.tasks.refresh_schemas',
|
||||
'schedule': timedelta(minutes=30)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -47,7 +47,7 @@ user_factory = ModelFactory(redash.models.User,
|
||||
data_source_factory = ModelFactory(redash.models.DataSource,
|
||||
name='Test',
|
||||
type='pg',
|
||||
options='')
|
||||
options='{"dbname": "test"}')
|
||||
|
||||
|
||||
dashboard_factory = ModelFactory(redash.models.Dashboard,
|
||||
|
@ -2,10 +2,12 @@
|
||||
import datetime
|
||||
import json
|
||||
from unittest import TestCase
|
||||
import mock
|
||||
from tests import BaseTestCase
|
||||
from redash import models
|
||||
from factories import dashboard_factory, query_factory, data_source_factory, query_result_factory, user_factory, widget_factory
|
||||
from redash.utils import gen_query_hash
|
||||
from redash import query_runner
|
||||
|
||||
|
||||
class DashboardTest(BaseTestCase):
|
||||
@ -195,6 +197,43 @@ class QueryArchiveTest(BaseTestCase):
|
||||
self.assertEqual(None, query.schedule)
|
||||
|
||||
|
||||
class DataSourceTest(BaseTestCase):
|
||||
def test_get_schema(self):
|
||||
return_value = "{}"
|
||||
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
|
||||
patched_get_schema.return_value = return_value
|
||||
|
||||
ds = data_source_factory.create()
|
||||
schema = ds.get_schema()
|
||||
|
||||
self.assertEqual(return_value, schema)
|
||||
|
||||
def test_get_schema_uses_cache(self):
|
||||
return_value = "{}"
|
||||
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
|
||||
patched_get_schema.return_value = return_value
|
||||
|
||||
ds = data_source_factory.create()
|
||||
ds.get_schema()
|
||||
schema = ds.get_schema()
|
||||
|
||||
self.assertEqual(return_value, schema)
|
||||
self.assertEqual(patched_get_schema.call_count, 1)
|
||||
|
||||
def test_get_schema_skips_cache_with_refresh_true(self):
|
||||
return_value = "{}"
|
||||
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
|
||||
patched_get_schema.return_value = return_value
|
||||
|
||||
ds = data_source_factory.create()
|
||||
ds.get_schema()
|
||||
new_return_value = '{"new":true}'
|
||||
patched_get_schema.return_value = new_return_value
|
||||
schema = ds.get_schema(refresh=True)
|
||||
|
||||
self.assertEqual(new_return_value, schema)
|
||||
self.assertEqual(patched_get_schema.call_count, 2)
|
||||
|
||||
|
||||
class QueryResultTest(BaseTestCase):
|
||||
def setUp(self):
|
||||
|
Loading…
Reference in New Issue
Block a user