mirror of
https://github.com/valitydev/redash.git
synced 2024-11-07 01:25:16 +00:00
5a5fdecdde
* add rq and an rq_worker service
* add rq_scheduler and an rq_scheduler service
* move beat schedule to periodic_jobs queue
* move version checks to RQ
* move query result cleanup to RQ
* use timedelta and DRY up a bit
* move custom tasks to RQ
* do actual schema refreshes in rq
* rename 'period_jobs' to 'periodic', as it obviously holds jobs
* move send_email to rq
* DRY up enqueues
* ditch and use a partially applied decorator
* move subscribe to rq
* move check_alerts_for_query to rq
* move record_event to rq
* make tests play nicely with rq
* 👋 beat
* rename rq_scheduler to plain scheduler, now that there's no Celery scheduler entrypoint
* add some color to rq-worker's output
* add logging context to rq jobs (while keeping execute_query context via get_task_logger for now)
* move schedule to its own module
* cancel previously scheduled periodic jobs. not sure this is a good idea.
* rename redash.scheduler to redash.schedule
* allow custom dynamic jobs to be added decleratively
* add basic monitoring to rq queues
* add worker monitoring
* pleasing the CodeClimate overlords
* adjust cypress docker-compose.yml to include rq changes
* DRY up Cypress docker-compose
* add rq dependencies to cypress docker-compose service
* an odd attempt at watching docker-compose logs when running with Cypress
* Revert "an odd attempt at watching docker-compose logs when running with Cypress"
This reverts commit 016bd1a93e3efa84a9f27d0f2acb972ce1957bcd.
* show docker-compose logs at Cypress shutdown
* Revert "DRY up Cypress docker-compose"
This reverts commit 43abac7084c207ab9e39192ac79d520448c2c527.
* minimal version for binding is 3.2
* remove unneccesary code reloads on cypress
* add a command which errors if any of the workers running inside the current machine haven't been active in the last minute
* SCHEMAS_REFRESH_QUEUE is no longer a required setting
* split tasks/queries.py to execution.py and maintenance.py
* fix tests after query execution split
* pleasing the CodeClimate overlords
* rename worker to celery_worker and rq_worker to worker
* use /rq_status instead of /jobs
* show started jobs' time ago according to UTC
* replace all spaces in column names
* fix query tests after execution split
* exit with an int
* general lint
* add an entrypoint for rq_healthcheck
* fix indentation
* delete all existing periodic jobs before scheduling them
* remove some unrequired requires
* move schedule example to redash.schedule
* add RQ integration to Sentry's setup
* pleasing the CodeClimate overlords
* remove replication settings from docker-compose - a proper way to scale using docker-compose would be the --scale CLI option, which will be described in the knowledge based
* revert to calling a function in dynamic settings to allow periodic jobs to be scheduled after app has been loaded
* don't need to depend on context when templating failure reports
* set the timeout_ttl to double the interval to avoid job results from expiring and having periodic jobs not reschedule
* whoops, bad merge
* describe custom jobs and don't actually schedule them
* fix merge
129 lines
5.8 KiB
Python
129 lines
5.8 KiB
Python
from unittest import TestCase
|
|
from collections import namedtuple
|
|
import uuid
|
|
|
|
import mock
|
|
|
|
from tests import BaseTestCase
|
|
from redash import redis_connection, models
|
|
from redash.utils import json_dumps
|
|
from redash.query_runner.pg import PostgreSQL
|
|
from redash.tasks.queries.execution import QueryExecutionError, enqueue_query, execute_query
|
|
|
|
|
|
FakeResult = namedtuple('FakeResult', 'id')
|
|
|
|
|
|
def gen_hash(*args, **kwargs):
|
|
return FakeResult(uuid.uuid4().hex)
|
|
|
|
|
|
class TestEnqueueTask(BaseTestCase):
|
|
def test_multiple_enqueue_of_same_query(self):
|
|
query = self.factory.create_query()
|
|
execute_query.apply_async = mock.MagicMock(side_effect=gen_hash)
|
|
|
|
enqueue_query(query.query_text, query.data_source, query.user_id, False, query, {'Username': 'Arik', 'Query ID': query.id})
|
|
enqueue_query(query.query_text, query.data_source, query.user_id, False, query, {'Username': 'Arik', 'Query ID': query.id})
|
|
enqueue_query(query.query_text, query.data_source, query.user_id, False, query, {'Username': 'Arik', 'Query ID': query.id})
|
|
|
|
self.assertEqual(1, execute_query.apply_async.call_count)
|
|
|
|
@mock.patch('redash.settings.dynamic_settings.query_time_limit', return_value=60)
|
|
def test_limits_query_time(self, _):
|
|
query = self.factory.create_query()
|
|
execute_query.apply_async = mock.MagicMock(side_effect=gen_hash)
|
|
|
|
enqueue_query(query.query_text, query.data_source, query.user_id, False, query, {'Username': 'Arik', 'Query ID': query.id})
|
|
|
|
_, kwargs = execute_query.apply_async.call_args
|
|
self.assertEqual(60, kwargs.get('soft_time_limit'))
|
|
|
|
def test_multiple_enqueue_of_different_query(self):
|
|
query = self.factory.create_query()
|
|
execute_query.apply_async = mock.MagicMock(side_effect=gen_hash)
|
|
|
|
enqueue_query(query.query_text, query.data_source, query.user_id, False, None, {'Username': 'Arik', 'Query ID': query.id})
|
|
enqueue_query(query.query_text + '2', query.data_source, query.user_id, False, None, {'Username': 'Arik', 'Query ID': query.id})
|
|
enqueue_query(query.query_text + '3', query.data_source, query.user_id, False, None, {'Username': 'Arik', 'Query ID': query.id})
|
|
|
|
self.assertEqual(3, execute_query.apply_async.call_count)
|
|
|
|
|
|
class QueryExecutorTests(BaseTestCase):
|
|
|
|
def test_success(self):
|
|
"""
|
|
``execute_query`` invokes the query runner and stores a query result.
|
|
"""
|
|
cm = mock.patch("celery.app.task.Context.delivery_info", {'routing_key': 'test'})
|
|
with cm, mock.patch.object(PostgreSQL, "run_query") as qr:
|
|
query_result_data = {"columns": [], "rows": []}
|
|
qr.return_value = (json_dumps(query_result_data), None)
|
|
result_id = execute_query("SELECT 1, 2", self.factory.data_source.id, {})
|
|
self.assertEqual(1, qr.call_count)
|
|
result = models.QueryResult.query.get(result_id)
|
|
self.assertEqual(result.data, query_result_data)
|
|
|
|
def test_success_scheduled(self):
|
|
"""
|
|
Scheduled queries remember their latest results.
|
|
"""
|
|
cm = mock.patch("celery.app.task.Context.delivery_info",
|
|
{'routing_key': 'test'})
|
|
q = self.factory.create_query(query_text="SELECT 1, 2", schedule={"interval": 300})
|
|
with cm, mock.patch.object(PostgreSQL, "run_query") as qr:
|
|
qr.return_value = ([1, 2], None)
|
|
result_id = execute_query(
|
|
"SELECT 1, 2",
|
|
self.factory.data_source.id, {},
|
|
scheduled_query_id=q.id)
|
|
q = models.Query.get_by_id(q.id)
|
|
self.assertEqual(q.schedule_failures, 0)
|
|
result = models.QueryResult.query.get(result_id)
|
|
self.assertEqual(q.latest_query_data, result)
|
|
|
|
def test_failure_scheduled(self):
|
|
"""
|
|
Scheduled queries that fail have their failure recorded.
|
|
"""
|
|
cm = mock.patch("celery.app.task.Context.delivery_info",
|
|
{'routing_key': 'test'})
|
|
q = self.factory.create_query(query_text="SELECT 1, 2", schedule={"interval": 300})
|
|
with cm, mock.patch.object(PostgreSQL, "run_query") as qr:
|
|
qr.side_effect = ValueError("broken")
|
|
with self.assertRaises(QueryExecutionError):
|
|
execute_query("SELECT 1, 2", self.factory.data_source.id, {},
|
|
scheduled_query_id=q.id)
|
|
q = models.Query.get_by_id(q.id)
|
|
self.assertEqual(q.schedule_failures, 1)
|
|
with self.assertRaises(QueryExecutionError):
|
|
execute_query("SELECT 1, 2", self.factory.data_source.id, {},
|
|
scheduled_query_id=q.id)
|
|
q = models.Query.get_by_id(q.id)
|
|
self.assertEqual(q.schedule_failures, 2)
|
|
|
|
def test_success_after_failure(self):
|
|
"""
|
|
Query execution success resets the failure counter.
|
|
"""
|
|
cm = mock.patch("celery.app.task.Context.delivery_info",
|
|
{'routing_key': 'test'})
|
|
q = self.factory.create_query(query_text="SELECT 1, 2", schedule={"interval": 300})
|
|
with cm, mock.patch.object(PostgreSQL, "run_query") as qr:
|
|
qr.side_effect = ValueError("broken")
|
|
with self.assertRaises(QueryExecutionError):
|
|
execute_query("SELECT 1, 2",
|
|
self.factory.data_source.id, {},
|
|
scheduled_query_id=q.id)
|
|
q = models.Query.get_by_id(q.id)
|
|
self.assertEqual(q.schedule_failures, 1)
|
|
|
|
with cm, mock.patch.object(PostgreSQL, "run_query") as qr:
|
|
qr.return_value = ([1, 2], None)
|
|
execute_query("SELECT 1, 2",
|
|
self.factory.data_source.id, {},
|
|
scheduled_query_id=q.id)
|
|
q = models.Query.get_by_id(q.id)
|
|
self.assertEqual(q.schedule_failures, 0)
|