Fix handling of SQL_ASCII data in the query tool. Fixes #3289. Fixes #3250

This commit is contained in:
Aditya Toshniwal
2018-06-21 08:26:01 -04:00
committed by Dave Page
parent 155f82676d
commit df85392bbf
6 changed files with 188 additions and 38 deletions

View File

@@ -45,7 +45,8 @@ def get_no_cache_header():
def make_json_response(
success=1, errormsg='', info='', result=None, data=None, status=200
success=1, errormsg='', info='', result=None, data=None, status=200,
encoding='utf-8'
):
"""Create a HTML response document describing the results of a request and
containing the data."""
@@ -58,7 +59,7 @@ def make_json_response(
return Response(
response=json.dumps(doc, cls=DataTypeJSONEncoder,
separators=(',', ':')),
separators=(',', ':'), encoding=encoding),
status=status,
mimetype="application/json",
headers=get_no_cache_header()

View File

@@ -50,7 +50,6 @@ else:
_ = gettext
# Register global type caster which will be applicable to all connections.
register_global_typecasters()
@@ -398,10 +397,29 @@ class Connection(BaseConnection):
if self.use_binary_placeholder:
register_binary_typecasters(self.conn)
status = _execute(cur, "SET DateStyle=ISO;"
"SET client_min_messages=notice;"
"SET bytea_output=escape;"
"SET client_encoding='UNICODE';")
if self.conn.encoding in ('SQL_ASCII', 'SQLASCII',
'MULE_INTERNAL', 'MULEINTERNAL'):
status = _execute(cur, "SET DateStyle=ISO;"
"SET client_min_messages=notice;"
"SET bytea_output=escape;"
"SET client_encoding='{0}';"
.format(self.conn.encoding))
self.python_encoding = 'raw_unicode_escape'
else:
status = _execute(cur, "SET DateStyle=ISO;"
"SET client_min_messages=notice;"
"SET bytea_output=escape;"
"SET client_encoding='UNICODE';")
self.python_encoding = 'utf-8'
# Replace the python encoding for original name and renamed encodings
# psycopg2 removes the underscore in conn.encoding
# Setting the encodings dict value will only help for select statements
# because for parameterized DML, param values are converted based on
# python encoding of pyscopg2s internal encodings dict.
for key, val in encodings.items():
if key.replace('_', '') == self.conn.encoding:
encodings[key] = self.python_encoding
if status is not None:
self.conn.close()
@@ -599,6 +617,21 @@ WHERE
return True, cur
def escape_params_sqlascii(self, params):
# The data is unescaped using string_typecasters when selected
# We need to esacpe the data so that it does not fail when
# it is encoded with python ascii
# unicode_escape helps in escaping and unescaping
if self.conn.encoding in ('SQL_ASCII', 'SQLASCII',
'MULE_INTERNAL', 'MULEINTERNAL')\
and params is not None and type(params) == dict:
params = {
key: val.encode('unicode_escape')
.decode('raw_unicode_escape')
for key, val in params.items()
}
return params
def __internal_blocking_execute(self, cur, query, params):
"""
This function executes the query using cursor's execute function,
@@ -618,6 +651,7 @@ WHERE
else:
query = query.encode('utf-8')
params = self.escape_params_sqlascii(params)
cur.execute(query, params)
if self.async == 1:
self._wait(cur.connection)
@@ -735,7 +769,7 @@ WHERE
header = []
json_columns = []
conn_encoding = cur.connection.encoding
conn_encoding = encodings[cur.connection.encoding]
for c in cur.ordered_description():
# This is to handle the case in which column name is non-ascii
@@ -881,6 +915,9 @@ WHERE
else:
query = query.encode('utf-8')
# Convert the params based on python_encoding
params = self.escape_params_sqlascii(params)
self.__async_cursor = None
status, cur = self.__cursor()

View File

@@ -164,46 +164,37 @@ def register_global_typecasters():
def register_string_typecasters(connection):
if connection.encoding != 'UTF8':
# In python3 when database encoding is other than utf-8 and client
# encoding is set to UNICODE then we need to map data from database
# encoding to utf-8.
# This is required because when client encoding is set to UNICODE then
# psycopg assumes database encoding utf-8 and not the actual encoding.
# Not sure whether it's bug or feature in psycopg for python3.
# raw_unicode_escape used for SQL ASCII will escape the
# characters. Here we unescape them using unicode_escape
# and send ahead. When insert update is done, the characters
# are escaped again and sent to the DB.
if connection.encoding in ('SQL_ASCII', 'SQLASCII',
'MULE_INTERNAL', 'MULEINTERNAL'):
if sys.version_info >= (3,):
def return_as_unicode(value, cursor):
def non_ascii_escape(value, cursor):
if value is None:
return None
# Treat value as byte sequence of database encoding and then
# decode it as utf-8 to get correct unicode value.
return bytes(
value, encodings[cursor.connection.encoding]
).decode('utf-8')
unicode_type = psycopg2.extensions.new_type(
# "char", name, text, character, character varying
(19, 18, 25, 1042, 1043, 0),
'UNICODE', return_as_unicode)
).decode('unicode_escape')
else:
def return_as_unicode(value, cursor):
def non_ascii_escape(value, cursor):
if value is None:
return None
# Decode it as utf-8 to get correct unicode value.
return value.decode('utf-8')
return value.decode('unicode_escape')
unicode_type = psycopg2.extensions.new_type(
# "char", name, text, character, character varying
(19, 18, 25, 1042, 1043, 0),
'UNICODE', return_as_unicode)
unicode_type = psycopg2.extensions.new_type(
# "char", name, text, character, character varying
(19, 18, 25, 1042, 1043, 0),
'UNICODE', non_ascii_escape)
unicode_array_type = psycopg2.extensions.new_array_type(
# "char"[], name[], text[], character[], character varying[]
(1002, 1003, 1009, 1014, 1015, 0
), 'UNICODEARRAY', unicode_type)
psycopg2.extensions.register_type(unicode_type)
psycopg2.extensions.register_type(unicode_array_type)
psycopg2.extensions.register_type(unicode_type, connection)
psycopg2.extensions.register_type(unicode_array_type, connection)
def register_binary_typecasters(connection):