Merge pull request #88 from EverythingMe/feature_graphite_v2

Feature: graphite query runner
This commit is contained in:
Arik Fraimovich 2014-02-11 11:47:11 +02:00
commit 640557df4f
6 changed files with 74 additions and 20 deletions

View File

@ -79,24 +79,22 @@
'type': Visualization.prototype.TYPES.CHART,
'name': '',
'description': q.description || '',
'options': newOptions()
'options': newOptions(Visualization.prototype.TYPES.CHART)
};
}
}, true);
}
function newOptions(chartType) {
if (chartType === Visualization.prototype.TYPES.COHORT) {
// empty config at the moment
return {};
if (chartType === Visualization.prototype.TYPES.CHART) {
return {
'series': {
'type': 'column'
}
};
}
// Chart
return {
'series': {
'type': scope.seriesTypes[0]
}
};
return {};
}
scope.$watch('vis.type', function(type) {

View File

@ -95,12 +95,15 @@
},
line: {
marker: {
radius: 3,
radius: 1
},
lineWidth: 1,
lineWidth: 2,
states: {
hover: {
lineWidth: 2
lineWidth: 2,
marker: {
radius: 3
}
}
}
},

View File

@ -153,9 +153,12 @@ class Manager(object):
if self.workers:
return self.workers
if getattr(settings, 'CONNECTION_ADAPTER', None) == "mysql":
if connection_type == 'mysql':
from redash.data import query_runner_mysql
runner = query_runner_mysql.mysql(connection_string)
elif connection_type == 'graphite':
from redash.data import query_runner_graphite
runner = query_runner_graphite.graphite(connection_string)
else:
from redash.data import query_runner
runner = query_runner.redshift(connection_string)

View File

@ -0,0 +1,46 @@
"""
QueryRunner for Graphite.
"""
import json
import datetime
import requests
from redash.utils import JSONEncoder
def graphite(connection_params):
def transform_result(response):
columns = [{'name': 'Time::x'}, {'name': 'value::y'}, {'name': 'name::series'}]
rows = []
for series in response.json():
for values in series['datapoints']:
timestamp = datetime.datetime.fromtimestamp(int(values[1]))
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
data = {'columns': columns, 'rows': rows}
return json.dumps(data, cls=JSONEncoder)
def query_runner(query):
base_url = "%s/render?format=json&" % connection_params['url']
url = "%s%s" % (base_url, "&".join(query.split("\n")))
error = None
data = None
try:
response = requests.get(url, auth=connection_params['auth'],
verify=connection_params['verify'])
if response.status_code == 200:
data = transform_result(response)
else:
error = "Failed getting results (%d)" % response.status_code
except Exception, ex:
data = None
error = ex.message
return data, error
query_runner.annotate_query = False
return query_runner

View File

@ -234,8 +234,11 @@ class Worker(threading.Thread):
start_time = time.time()
self.set_title("running query %s" % job_id)
annotated_query = "/* Pid: %s, Job Id: %s, Query hash: %s, Priority: %s */ %s" % \
(pid, job.id, job.query_hash, job.priority, job.query)
if getattr(self.query_runner, 'annotate_query', True):
annotated_query = "/* Pid: %s, Job Id: %s, Query hash: %s, Priority: %s */ %s" % \
(pid, job.id, job.query_hash, job.priority, job.query)
else:
annotated_query = job.query
# TODO: here's the part that needs to be forked, not all of the worker process...
data, error = self.query_runner(annotated_query)

View File

@ -3,11 +3,12 @@ Example settings module. You should make your own copy as settings.py and enter
"""
REDIS_URL = "redis://localhost:6379"
# Either "pg" or "mysql"
# "pg", "graphite" or "mysql"
CONNECTION_ADAPTER = "pg"
# Connection string for the database that is used to run queries against
# -- example mysql CONNECTION_STRING = "Server=;User=;Pwd=;Database="
# -- example pg CONNECTION_STRING = "user= password= host= port=5439 dbname="
# Connection string for the database that is used to run queries against. Examples:
# -- mysql: CONNECTION_STRING = "Server=;User=;Pwd=;Database="
# -- pg: CONNECTION_STRING = "user= password= host= port=5439 dbname="
# -- graphite: CONNECTION_STRING = {'url': 'https://graphite.yourcompany.com', 'auth': ('user', 'password'), 'verify': True}
CONNECTION_STRING = "user= password= host= port=5439 dbname="
# Connection settings for re:dash's own database (where we store the queries, results, etc)
DATABASE_CONFIG = {