mirror of
https://github.com/valitydev/redash.git
synced 2024-11-06 00:55:16 +00:00
feat: New support databend for redash (#5902)
* feat: New support databend for redash * fix
This commit is contained in:
parent
c08ef9b502
commit
8487876e7f
@ -52,6 +52,7 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
|||||||
- Exasol
|
- Exasol
|
||||||
- Microsoft Excel
|
- Microsoft Excel
|
||||||
- Firebolt
|
- Firebolt
|
||||||
|
- Databend
|
||||||
- Google Analytics
|
- Google Analytics
|
||||||
- Google BigQuery
|
- Google BigQuery
|
||||||
- Google Spreadsheets
|
- Google Spreadsheets
|
||||||
|
118
redash/query_runner/databend.py
Normal file
118
redash/query_runner/databend.py
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
try:
|
||||||
|
from databend_sqlalchemy import connector
|
||||||
|
import re
|
||||||
|
|
||||||
|
enabled = True
|
||||||
|
except ImportError:
|
||||||
|
enabled = False
|
||||||
|
|
||||||
|
from redash.query_runner import BaseQueryRunner, register
|
||||||
|
from redash.query_runner import TYPE_STRING, TYPE_INTEGER, TYPE_BOOLEAN, TYPE_FLOAT, TYPE_DATETIME, TYPE_DATE
|
||||||
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
|
|
||||||
|
class Databend(BaseQueryRunner):
|
||||||
|
noop_query = "SELECT 1"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def configuration_schema(cls):
|
||||||
|
return {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"host": {"type": "string", "default": "localhost"},
|
||||||
|
"port": {"type": "int", "default": 8000},
|
||||||
|
"username": {"type": "string"},
|
||||||
|
"password": {"type": "string", "default": ""},
|
||||||
|
"database": {"type": "string"},
|
||||||
|
"secure": {"type": "string", "default": False},
|
||||||
|
},
|
||||||
|
"order": ["username", "password", "host", "port", "database"],
|
||||||
|
"required": ["username", "database"],
|
||||||
|
"secret": ["password"],
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def name(cls):
|
||||||
|
return "Databend"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def type(cls):
|
||||||
|
return "Databend"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def enabled(cls):
|
||||||
|
return enabled
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _define_column_type(column_type):
|
||||||
|
c = column_type.lower()
|
||||||
|
f = re.search(r"^nullable\((.*)\)$", c)
|
||||||
|
if f is not None:
|
||||||
|
c = f.group(1)
|
||||||
|
if c.startswith("int") or c.startswith("uint"):
|
||||||
|
return TYPE_INTEGER
|
||||||
|
elif c.startswith("float"):
|
||||||
|
return TYPE_FLOAT
|
||||||
|
elif c == "datetime":
|
||||||
|
return TYPE_DATETIME
|
||||||
|
elif c == "date":
|
||||||
|
return TYPE_DATE
|
||||||
|
else:
|
||||||
|
return TYPE_STRING
|
||||||
|
|
||||||
|
def run_query(self, query, user):
|
||||||
|
host = (self.configuration.get("host") or "localhost"),
|
||||||
|
port = (self.configuration.get("port") or 8000),
|
||||||
|
username = (self.configuration.get("username") or None),
|
||||||
|
password = (self.configuration.get("password") or None),
|
||||||
|
database = (self.configuration.get("database") or None),
|
||||||
|
secure = (self.configuration.get("secure") or False),
|
||||||
|
connection = connector.connect(f"databend://{username}:{password}@{host}:{port}/{database}?secure={secure}")
|
||||||
|
cursor = connection.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
cursor.execute(query)
|
||||||
|
columns = self.fetch_columns(
|
||||||
|
[(i[0], self._define_column_type(i[1])) for i in cursor.description]
|
||||||
|
)
|
||||||
|
rows = [
|
||||||
|
dict(zip((column["name"] for column in columns), row)) for row in cursor
|
||||||
|
]
|
||||||
|
|
||||||
|
data = {"columns": columns, "rows": rows}
|
||||||
|
error = None
|
||||||
|
json_data = json_dumps(data)
|
||||||
|
finally:
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
return json_data, error
|
||||||
|
|
||||||
|
def _get_tables(self):
|
||||||
|
query = """
|
||||||
|
SELECT TABLE_SCHEMA,
|
||||||
|
TABLE_NAME,
|
||||||
|
COLUMN_NAME
|
||||||
|
FROM INFORMATION_SCHEMA.COLUMNS
|
||||||
|
WHERE TABLE_SCHEMA NOT IN ('information_schema', 'system')
|
||||||
|
"""
|
||||||
|
|
||||||
|
results, error = self.run_query(query, None)
|
||||||
|
|
||||||
|
if error is not None:
|
||||||
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
|
schema = {}
|
||||||
|
results = json_loads(results)
|
||||||
|
|
||||||
|
for row in results["rows"]:
|
||||||
|
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
||||||
|
|
||||||
|
if table_name not in schema:
|
||||||
|
schema[table_name] = {"name": table_name, "columns": []}
|
||||||
|
|
||||||
|
schema[table_name]["columns"].append(row["column_name"])
|
||||||
|
|
||||||
|
return list(schema.values())
|
||||||
|
|
||||||
|
|
||||||
|
register(Databend)
|
@ -390,6 +390,7 @@ default_query_runners = [
|
|||||||
"redash.query_runner.excel",
|
"redash.query_runner.excel",
|
||||||
"redash.query_runner.csv",
|
"redash.query_runner.csv",
|
||||||
"redash.query_runner.firebolt",
|
"redash.query_runner.firebolt",
|
||||||
|
"redash.query_runner.databend",
|
||||||
"redash.query_runner.nz",
|
"redash.query_runner.nz",
|
||||||
"redash.query_runner.arango"
|
"redash.query_runner.arango"
|
||||||
]
|
]
|
||||||
|
@ -41,6 +41,7 @@ cmem-cmempy==21.2.3
|
|||||||
xlrd==2.0.1
|
xlrd==2.0.1
|
||||||
openpyxl==3.0.7
|
openpyxl==3.0.7
|
||||||
firebolt-sdk
|
firebolt-sdk
|
||||||
|
databend-sqlalchemy
|
||||||
pandas==1.3.4
|
pandas==1.3.4
|
||||||
nzpy>=1.15
|
nzpy>=1.15
|
||||||
nzalchemy
|
nzalchemy
|
||||||
|
Loading…
Reference in New Issue
Block a user