Merge pull request #196 from EverythingMe/fixes

Bug fixes (#91, #195)
This commit is contained in:
Arik Fraimovich 2014-05-01 17:56:34 +03:00
commit cddd7e909d
2 changed files with 41 additions and 11 deletions

View File

@ -205,19 +205,35 @@
return this.columns;
}
QueryResult.prototype.getColumnCleanName = function (column) {
QueryResult.prototype.getColumnNameWithoutType = function (column) {
var parts = column.split('::');
var name = parts[1];
if (parts[0] != '') {
// TODO: it's probably time to generalize this.
// see also getColumnFriendlyName
name = parts[0].replace(/%/g, '__pct').replace(/ /g, '_').replace(/\?/g, '');
return parts[0];
};
var charConversionMap = {
'__pct': /%/g,
'_': / /g,
'__qm': /\?/g,
'__brkt': /[\(\)\[\]]/g,
'__dash': /-/g,
'__amp': /&/g
};
QueryResult.prototype.getColumnCleanName = function (column) {
var name = this.getColumnNameWithoutType(column);
if (name != '') {
_.each(charConversionMap, function(regex, replacement) {
name = name.replace(regex, replacement);
});
}
return name;
}
QueryResult.prototype.getColumnFriendlyName = function (column) {
return this.getColumnCleanName(column).replace('__pct', '%').replace(/_/g, ' ').replace(/(?:^|\s)\S/g, function (a) {
return this.getColumnNameWithoutType(column).replace(/(?:^|\s)\S/g, function (a) {
return a.toUpperCase();
});
}

View File

@ -43,12 +43,26 @@ def pg(connection_string):
cursor.execute(query)
wait(connection)
column_names = [col.name for col in cursor.description]
column_names = set()
columns = []
duplicates_counter = 1
for column in cursor.description:
# TODO: this deduplication needs to be generalized and reused in all query runners.
column_name = column.name
if column_name in column_names:
column_name = column_name + str(duplicates_counter)
duplicates_counter += 1
column_names.add(column_name)
columns.append({
'name': column_name,
'friendly_name': column_friendly_name(column_name),
'type': None
})
rows = [dict(zip(column_names, row)) for row in cursor]
columns = [{'name': col.name,
'friendly_name': column_friendly_name(col.name),
'type': None} for col in cursor.description]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)