Using own version of cursor class to allow us to take care of the

duplicate name in column description.
This commit is contained in:
Ashesh Vashi 2016-07-16 10:42:11 +05:30
parent d15dfac60f
commit 9ba6bafb2b
4 changed files with 283 additions and 61 deletions

View File

@ -410,7 +410,8 @@ def poll(trans_id):
# rollback to cleanup # rollback to cleanup
if isinstance(trans_obj, QueryToolCommand): if isinstance(trans_obj, QueryToolCommand):
trans_status = conn.transaction_status() trans_status = conn.transaction_status()
if trans_status == TX_STATUS_INERROR and trans_obj.auto_rollback: if (trans_status == TX_STATUS_INERROR and
trans_obj.auto_rollback):
conn.execute_void("ROLLBACK;") conn.execute_void("ROLLBACK;")
elif status == ASYNC_EXECUTION_ABORTED: elif status == ASYNC_EXECUTION_ABORTED:
status = 'Cancel' status = 'Cancel'
@ -426,10 +427,11 @@ def poll(trans_id):
rows_affected = conn.rows_affected() rows_affected = conn.rows_affected()
for col in col_info: for col in col_info:
items = list(col.items())
col_type = dict() col_type = dict()
col_type['type_code'] = col[1] col_type['type_code'] = items[1][1]
col_type['type_name'] = None col_type['type_name'] = None
columns[col[0]] = col_type columns[items[0][1]] = col_type
# As we changed the transaction object we need to # As we changed the transaction object we need to
# restore it and update the session variable. # restore it and update the session variable.
@ -440,9 +442,9 @@ def poll(trans_id):
result = conn.status_message() result = conn.status_message()
additional_result = conn.messages() additional_result = conn.messages()
""" """
'Procedure/Function output may comes in the form of Notices from the Procedure/Function output may comes in the form of Notices from the
database server, so we need to append those outputs with the original database server, so we need to append those outputs with the
result. original result.
""" """
if isinstance(additional_result, list) \ if isinstance(additional_result, list) \
and len(additional_result) > 0: and len(additional_result) > 0:
@ -450,9 +452,13 @@ def poll(trans_id):
rows_affected = conn.rows_affected() rows_affected = conn.rows_affected()
return make_json_response(data={'status': status, 'result': result, return make_json_response(
data={
'status': status, 'result': result,
'colinfo': col_info, 'primary_keys': primary_keys, 'colinfo': col_info, 'primary_keys': primary_keys,
'rows_affected': rows_affected}) 'rows_affected': rows_affected
}
)
@blueprint.route('/fetch/types/<int:trans_id>', methods=["GET"]) @blueprint.route('/fetch/types/<int:trans_id>', methods=["GET"])

View File

@ -1523,7 +1523,7 @@ define(
column_type = document.createElement('span'), column_type = document.createElement('span'),
col_label = '', col_label = '',
col_type = ''; col_type = '';
label_text.innerText = c.name; label_text.innerText = c.display_name;
var type = pg_types[c.type_code] ? var type = pg_types[c.type_code] ?
pg_types[c.type_code][0] : pg_types[c.type_code][0] :

View File

@ -30,6 +30,7 @@ import config
from pgadmin.model import Server, User from pgadmin.model import Server, User
from .keywords import ScanKeyword from .keywords import ScanKeyword
from ..abstract import BaseDriver, BaseConnection from ..abstract import BaseDriver, BaseConnection
from .cursor import DictCursor
_ = gettext _ = gettext
@ -63,12 +64,10 @@ def register_date_typecasters(connection):
return value return value
cursor = connection.cursor() cursor = connection.cursor()
cursor.execute('SELECT NULL::date') cursor.execute('SELECT NULL::date, NULL::timestamp, NULL::timestamptz')
date_oid = cursor.description[0][1] date_oid = cursor.description[0][1]
cursor.execute('SELECT NULL::timestamp') timestamp_oid = cursor.description[1][1]
timestamp_oid = cursor.description[0][1] timestamptz_oid = cursor.description[2][1]
cursor.execute('SELECT NULL::timestamptz')
timestamptz_oid = cursor.description[0][1]
oids = (date_oid, timestamp_oid, timestamptz_oid) oids = (date_oid, timestamp_oid, timestamptz_oid)
new_type = psycopg2.extensions.new_type(oids, 'DATE', cast_date) new_type = psycopg2.extensions.new_type(oids, 'DATE', cast_date)
psycopg2.extensions.register_type(new_type) psycopg2.extensions.register_type(new_type)
@ -415,7 +414,7 @@ Attempt to reconnect failed with the error:
return False, msg return False, msg
try: try:
cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur = self.conn.cursor(cursor_factory=DictCursor)
except psycopg2.Error as pe: except psycopg2.Error as pe:
errmsg = gettext(""" errmsg = gettext("""
Failed to create cursor for psycopg2 connection with error message for the \ Failed to create cursor for psycopg2 connection with error message for the \
@ -475,7 +474,8 @@ Attempt to reconnect it failed with the error:
return False, str(cur) return False, str(cur)
query_id = random.randint(1, 9999999) query_id = random.randint(1, 9999999)
current_app.logger.log(25, current_app.logger.log(
25,
"Execute (scalar) for server #{server_id} - {conn_id} (Query-id: {query_id}):\n{query}".format( "Execute (scalar) for server #{server_id} - {conn_id} (Query-id: {query_id}):\n{query}".format(
server_id=self.manager.sid, server_id=self.manager.sid,
conn_id=self.conn_id, conn_id=self.conn_id,
@ -486,6 +486,7 @@ Attempt to reconnect it failed with the error:
try: try:
self.__internal_blocking_execute(cur, query, params) self.__internal_blocking_execute(cur, query, params)
except psycopg2.Error as pe: except psycopg2.Error as pe:
current_app.logger.exception(pe)
cur.close() cur.close()
errmsg = self._formatted_exception_msg(pe, formatted_exception_msg) errmsg = self._formatted_exception_msg(pe, formatted_exception_msg)
current_app.logger.error( current_app.logger.error(
@ -523,9 +524,9 @@ Attempt to reconnect it failed with the error:
return False, str(cur) return False, str(cur)
query_id = random.randint(1, 9999999) query_id = random.randint(1, 9999999)
current_app.logger.log(25, """ current_app.logger.log(
Execute (async) for server #{server_id} - {conn_id} (Query-id: {query_id}):\n{query} 25,
""".format( "Execute (async) for server #{server_id} - {conn_id} (Query-id: {query_id}):\n{query}".format(
server_id=self.manager.sid, server_id=self.manager.sid,
conn_id=self.conn_id, conn_id=self.conn_id,
query=query, query=query,
@ -573,9 +574,9 @@ Failed to execute query (execute_async) for the server #{server_id} - {conn_id}
return False, str(cur) return False, str(cur)
query_id = random.randint(1, 9999999) query_id = random.randint(1, 9999999)
current_app.logger.log(25, """ current_app.logger.log(
Execute (void) for server #{server_id} - {conn_id} (Query-id: {query_id}):\n{query} 25,
""".format( "Execute (void) for server #{server_id} - {conn_id} (Query-id: {query_id}):\n{query}".format(
server_id=self.manager.sid, server_id=self.manager.sid,
conn_id=self.conn_id, conn_id=self.conn_id,
query=query, query=query,
@ -613,7 +614,8 @@ Failed to execute query (execute_void) for the server #{server_id} - {conn_id}
return False, str(cur) return False, str(cur)
query_id = random.randint(1, 9999999) query_id = random.randint(1, 9999999)
current_app.logger.log(25, current_app.logger.log(
25,
"Execute (2darray) for server #{server_id} - {conn_id} (Query-id: {query_id}):\n{query}".format( "Execute (2darray) for server #{server_id} - {conn_id} (Query-id: {query_id}):\n{query}".format(
server_id=self.manager.sid, server_id=self.manager.sid,
conn_id=self.conn_id, conn_id=self.conn_id,
@ -637,10 +639,9 @@ Failed to execute query (execute_void) for the server #{server_id} - {conn_id}
) )
return False, errmsg return False, errmsg
import copy
# Get Resultset Column Name, Type and size # Get Resultset Column Name, Type and size
columns = cur.description and [ columns = cur.description and [
copy.deepcopy(desc._asdict()) for desc in cur.description desc.to_dict() for desc in cur.ordered_description()
] or [] ] or []
rows = [] rows = []
@ -658,7 +659,8 @@ Failed to execute query (execute_void) for the server #{server_id} - {conn_id}
if not status: if not status:
return False, str(cur) return False, str(cur)
query_id = random.randint(1, 9999999) query_id = random.randint(1, 9999999)
current_app.logger.log(25, current_app.logger.log(
25,
"Execute (dict) for server #{server_id} - {conn_id} (Query-id: {query_id}):\n{query}".format( "Execute (dict) for server #{server_id} - {conn_id} (Query-id: {query_id}):\n{query}".format(
server_id=self.manager.sid, server_id=self.manager.sid,
conn_id=self.conn_id, conn_id=self.conn_id,
@ -681,10 +683,9 @@ Failed to execute query (execute_void) for the server #{server_id} - {conn_id}
) )
return False, errmsg return False, errmsg
import copy
# Get Resultset Column Name, Type and size # Get Resultset Column Name, Type and size
columns = cur.description and [ columns = cur.description and [
copy.deepcopy(desc._asdict()) for desc in cur.description desc.to_dict() for desc in cur.ordered_description()
] or [] ] or []
rows = [] rows = []
@ -873,7 +874,9 @@ Failed to reset the connection to the server due to following error:
# Fetch the column information # Fetch the column information
if cur.description is not None: if cur.description is not None:
colinfo = [desc for desc in cur.description] colinfo = [
desc.to_dict() for desc in cur.ordered_description()
]
self.row_count = cur.rowcount self.row_count = cur.rowcount
if cur.rowcount > 0: if cur.rowcount > 0:

View File

@ -0,0 +1,213 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""
Implementation of an extended cursor, which returns ordered dictionary when
fetching results from it, and also takes care of the duplicate column name in
result.
"""
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
from psycopg2.extensions import cursor as _cursor
class _WrapperColumn(object):
"""
class _WrapperColumn(object)
A wrapper class, which wraps the individual description column object,
to allow identify the duplicate column name, created by PostgreSQL database
server implicitly during query execution.
Methods:
-------
* __init__(_col, _name)
- Initialize the wrapper around the description column object, which will
present the dummy name when available instead of the duplicate name.
* __getattribute__(name)
- Get attributes from the original column description (which is a named
tuple) except for few of the attributes of this object (i.e. orig_col,
dummy_name, __class__, to_dict) are part of this object.
* __getitem__(idx)
- Get the item from the original object except for the 0th index item,
which is for 'name'.
* __setitem__(idx, value)
* __delitem__(idx)
- Override them to make the operations on original object.
* to_dict()
- Converts original objects data as OrderedDict (except the name will same
as dummy name (if available), and one more parameter as 'display_name'.
"""
def __init__(self, _col, _name):
"""Initializer for _WrapperColumn"""
self.orig_col = _col
self.dummy_name = _name
def __getattribute__(self, name):
"""Getting the attributes from the original object. (except few)"""
if (name == 'orig_col' or name == 'dummy_name' or
name == '__class__' or name == 'to_dict'):
return object.__getattribute__(self, name)
elif name == 'name':
res = object.__getattribute__(self, 'dummy_name')
if res is not None:
return res
return self.orig_col.__getattribute__(name)
def __getitem__(self, idx):
"""Overrides __getitem__ to fetch item from original object"""
if idx == 0 and self.dummy_name is not None:
return self.name
return self.orig_col.__getitem__(idx)
def __setitem__(self, *args, **kwargs):
"""Orverrides __setitem__ to do the operations on original object."""
return self.orig_col.__setitem__(*args, **kwargs)
def __delitem__(self, *args, **kwargs):
"""Orverrides __delitem__ to do the operations on original object."""
return self.orig_col.__delitem__(*args, **kwargs)
def to_dict(self):
"""
Generates an OrderedDict from the fields of the original objects
with avoiding the duplicate name.
"""
ores = OrderedDict(self.orig_col._asdict())
name = ores['name']
if self.dummy_name:
ores['name'] = self.dummy_name
ores['display_name'] = name
return ores
class DictCursor(_cursor):
"""
DictCursor
A class to generate the dictionary from the tuple, and also takes care of
the duplicate column name in result description.
Methods:
-------
* __init__()
- Initialize the cursor object
* _dict_tuple(tuple)
- Generate a dictionary object from a tuple, based on the column
description.
* _ordered_description()
- Generates the _WrapperColumn object from the description column, and
identifies duplicate column name
"""
def __init__(self, *args, **kwargs):
"""
Initialize the cursor object.
"""
self._odt_desc = None
_cursor.__init__(self, *args, **kwargs)
def _dict_tuple(self, tup):
"""
Transform the tuple into a dictionary object.
"""
if self._odt_desc is None:
self._ordered_description()
return {k[0]: v for k, v in zip(self._odt_desc, tup)}
def _ordered_description(self):
"""
Transform the regular description to wrapper object, which handles
duplicate column name.
"""
self._odt_desc = _cursor.__getattribute__(self, 'description')
desc = self._odt_desc
if desc is None or len(desc) == 0:
return
res = list()
od = {d[0]: 0 for d in desc}
for d in desc:
dummy = None
idx = od[d.name]
if idx == 0:
od[d.name] = 1
else:
name = ("%s-%s" % (d.name, idx))
while name in od:
idx += 1
name = ("%s-%s" % (d.name, idx))
od[d.name] = idx
dummy = name
res.append(_WrapperColumn(d, dummy))
self._odt_desc = tuple(res)
def ordered_description(self):
"""
Use this to fetch the description
"""
if self._odt_desc is None:
self._ordered_description()
return self._odt_desc
def execute(self, query, params=None):
"""
Execute function
"""
self._odt_desc = None
return _cursor.execute(self, query, params)
def executemany(self, query, params=None):
"""
Execute many function of regular cursor.
"""
self._odt_desc = None
return _cursor.executemany(self, query, params)
def callproc(self, proname, params=None):
"""
Call a procedure by a name.
"""
self._odt_desc = None
return _cursor.callproc(self, proname, params)
def fetchmany(self, size=None):
"""
Fetch many tuples as ordered dictionary list.
"""
tuples = _cursor.fetchmany(self, size)
if tuples is not None:
return [self._dict_tuple(t) for t in tuples]
return None
def fetchall(self):
"""
Fetch all tuples as orderd dictionary list.
"""
tuples = _cursor.fetchall(self)
if tuples is not None:
return [self._dict_tuple(t) for t in tuples]
def __iter__(self):
it = _cursor.__iter__(self)
yield self._dict_tuple(next(it))
while 1:
yield self._dict_tuple(next(it))