mirror of
https://github.com/pgadmin-org/pgadmin4.git
synced 2025-02-25 18:55:31 -06:00
This commit is contained in:
committed by
Dave Page
parent
155f82676d
commit
df85392bbf
@@ -574,7 +574,8 @@ def poll(trans_id):
|
||||
'client_primary_key': client_primary_key,
|
||||
'has_oids': has_oids,
|
||||
'oids': oids
|
||||
}
|
||||
},
|
||||
encoding=conn.python_encoding
|
||||
)
|
||||
|
||||
|
||||
@@ -646,7 +647,8 @@ def fetch(trans_id, fetch_all=None):
|
||||
'has_more_rows': has_more_rows,
|
||||
'rows_fetched_from': rows_fetched_from,
|
||||
'rows_fetched_to': rows_fetched_to
|
||||
}
|
||||
},
|
||||
encoding=conn.python_encoding
|
||||
)
|
||||
|
||||
|
||||
|
||||
113
web/pgadmin/tools/sqleditor/tests/test_encoding_charset.py
Normal file
113
web/pgadmin/tools/sqleditor/tests/test_encoding_charset.py
Normal file
@@ -0,0 +1,113 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils
|
||||
import json
|
||||
from pgadmin.utils import server_utils
|
||||
|
||||
|
||||
class TestEncodingCharset(BaseTestGenerator):
|
||||
"""
|
||||
This class validates character support in pgAdmin4 for
|
||||
different PostgresDB encodings
|
||||
"""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
(
|
||||
'With Encoding UTF8',
|
||||
dict(
|
||||
db_encoding='UTF8',
|
||||
lc_collate='C',
|
||||
test_str='A'
|
||||
)),
|
||||
(
|
||||
'With Encoding WIN1252',
|
||||
dict(
|
||||
db_encoding='WIN1252',
|
||||
lc_collate='C',
|
||||
test_str='A'
|
||||
)),
|
||||
(
|
||||
'With Encoding EUC_CN',
|
||||
dict(
|
||||
db_encoding='EUC_CN',
|
||||
lc_collate='C',
|
||||
test_str='A'
|
||||
)),
|
||||
(
|
||||
'With Encoding SQL_ASCII',
|
||||
dict(
|
||||
db_encoding='SQL_ASCII',
|
||||
lc_collate='C',
|
||||
test_str='\\255'
|
||||
)),
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
self.encode_db_name = 'encoding_' + self.db_encoding
|
||||
self.encode_sid = self.server_information['server_id']
|
||||
|
||||
server_con = server_utils.connect_server(self, self.encode_sid)
|
||||
if hasattr(self, 'skip_on_database'):
|
||||
if 'data' in server_con and 'type' in server_con['data']:
|
||||
if server_con['data']['type'] in self.skip_on_database:
|
||||
self.skipTest('cannot run in: %s' %
|
||||
server_con['data']['type'])
|
||||
|
||||
self.encode_did = test_utils.create_database(
|
||||
self.server, self.encode_db_name,
|
||||
(self.db_encoding, self.lc_collate))
|
||||
|
||||
def runTest(self):
|
||||
|
||||
db_con = database_utils.connect_database(self,
|
||||
test_utils.SERVER_GROUP,
|
||||
self.encode_sid,
|
||||
self.encode_did)
|
||||
if not db_con["info"] == "Database connected.":
|
||||
raise Exception("Could not connect to the database.")
|
||||
|
||||
# Initialize query tool
|
||||
url = '/datagrid/initialize/query_tool/{0}/{1}/{2}'.format(
|
||||
test_utils.SERVER_GROUP, self.encode_sid, self.encode_did)
|
||||
response = self.tester.post(url)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
|
||||
response_data = json.loads(response.data.decode('utf-8'))
|
||||
self.trans_id = response_data['data']['gridTransId']
|
||||
|
||||
# Check character
|
||||
url = "/sqleditor/query_tool/start/{0}".format(self.trans_id)
|
||||
sql = "select E'{0}';".format(self.test_str)
|
||||
response = self.tester.post(url, data=json.dumps({"sql": sql}),
|
||||
content_type='html/json')
|
||||
self.assertEquals(response.status_code, 200)
|
||||
url = '/sqleditor/poll/{0}'.format(self.trans_id)
|
||||
response = self.tester.get(url)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
response_data = json.loads(response.data.decode('utf-8'))
|
||||
self.assertEquals(response_data['data']['rows_fetched_to'], 1)
|
||||
|
||||
database_utils.disconnect_database(self, self.encode_sid,
|
||||
self.encode_did)
|
||||
|
||||
def tearDown(self):
|
||||
main_conn = test_utils.get_db_connection(
|
||||
self.server['db'],
|
||||
self.server['username'],
|
||||
self.server['db_password'],
|
||||
self.server['host'],
|
||||
self.server['port'],
|
||||
self.server['sslmode']
|
||||
)
|
||||
test_utils.drop_database(main_conn, self.encode_db_name)
|
||||
@@ -45,7 +45,8 @@ def get_no_cache_header():
|
||||
|
||||
|
||||
def make_json_response(
|
||||
success=1, errormsg='', info='', result=None, data=None, status=200
|
||||
success=1, errormsg='', info='', result=None, data=None, status=200,
|
||||
encoding='utf-8'
|
||||
):
|
||||
"""Create a HTML response document describing the results of a request and
|
||||
containing the data."""
|
||||
@@ -58,7 +59,7 @@ def make_json_response(
|
||||
|
||||
return Response(
|
||||
response=json.dumps(doc, cls=DataTypeJSONEncoder,
|
||||
separators=(',', ':')),
|
||||
separators=(',', ':'), encoding=encoding),
|
||||
status=status,
|
||||
mimetype="application/json",
|
||||
headers=get_no_cache_header()
|
||||
|
||||
@@ -50,7 +50,6 @@ else:
|
||||
|
||||
_ = gettext
|
||||
|
||||
|
||||
# Register global type caster which will be applicable to all connections.
|
||||
register_global_typecasters()
|
||||
|
||||
@@ -398,10 +397,29 @@ class Connection(BaseConnection):
|
||||
if self.use_binary_placeholder:
|
||||
register_binary_typecasters(self.conn)
|
||||
|
||||
status = _execute(cur, "SET DateStyle=ISO;"
|
||||
"SET client_min_messages=notice;"
|
||||
"SET bytea_output=escape;"
|
||||
"SET client_encoding='UNICODE';")
|
||||
if self.conn.encoding in ('SQL_ASCII', 'SQLASCII',
|
||||
'MULE_INTERNAL', 'MULEINTERNAL'):
|
||||
status = _execute(cur, "SET DateStyle=ISO;"
|
||||
"SET client_min_messages=notice;"
|
||||
"SET bytea_output=escape;"
|
||||
"SET client_encoding='{0}';"
|
||||
.format(self.conn.encoding))
|
||||
self.python_encoding = 'raw_unicode_escape'
|
||||
else:
|
||||
status = _execute(cur, "SET DateStyle=ISO;"
|
||||
"SET client_min_messages=notice;"
|
||||
"SET bytea_output=escape;"
|
||||
"SET client_encoding='UNICODE';")
|
||||
self.python_encoding = 'utf-8'
|
||||
|
||||
# Replace the python encoding for original name and renamed encodings
|
||||
# psycopg2 removes the underscore in conn.encoding
|
||||
# Setting the encodings dict value will only help for select statements
|
||||
# because for parameterized DML, param values are converted based on
|
||||
# python encoding of pyscopg2s internal encodings dict.
|
||||
for key, val in encodings.items():
|
||||
if key.replace('_', '') == self.conn.encoding:
|
||||
encodings[key] = self.python_encoding
|
||||
|
||||
if status is not None:
|
||||
self.conn.close()
|
||||
@@ -599,6 +617,21 @@ WHERE
|
||||
|
||||
return True, cur
|
||||
|
||||
def escape_params_sqlascii(self, params):
|
||||
# The data is unescaped using string_typecasters when selected
|
||||
# We need to esacpe the data so that it does not fail when
|
||||
# it is encoded with python ascii
|
||||
# unicode_escape helps in escaping and unescaping
|
||||
if self.conn.encoding in ('SQL_ASCII', 'SQLASCII',
|
||||
'MULE_INTERNAL', 'MULEINTERNAL')\
|
||||
and params is not None and type(params) == dict:
|
||||
params = {
|
||||
key: val.encode('unicode_escape')
|
||||
.decode('raw_unicode_escape')
|
||||
for key, val in params.items()
|
||||
}
|
||||
return params
|
||||
|
||||
def __internal_blocking_execute(self, cur, query, params):
|
||||
"""
|
||||
This function executes the query using cursor's execute function,
|
||||
@@ -618,6 +651,7 @@ WHERE
|
||||
else:
|
||||
query = query.encode('utf-8')
|
||||
|
||||
params = self.escape_params_sqlascii(params)
|
||||
cur.execute(query, params)
|
||||
if self.async == 1:
|
||||
self._wait(cur.connection)
|
||||
@@ -735,7 +769,7 @@ WHERE
|
||||
|
||||
header = []
|
||||
json_columns = []
|
||||
conn_encoding = cur.connection.encoding
|
||||
conn_encoding = encodings[cur.connection.encoding]
|
||||
|
||||
for c in cur.ordered_description():
|
||||
# This is to handle the case in which column name is non-ascii
|
||||
@@ -881,6 +915,9 @@ WHERE
|
||||
else:
|
||||
query = query.encode('utf-8')
|
||||
|
||||
# Convert the params based on python_encoding
|
||||
params = self.escape_params_sqlascii(params)
|
||||
|
||||
self.__async_cursor = None
|
||||
status, cur = self.__cursor()
|
||||
|
||||
|
||||
@@ -164,46 +164,37 @@ def register_global_typecasters():
|
||||
|
||||
|
||||
def register_string_typecasters(connection):
|
||||
if connection.encoding != 'UTF8':
|
||||
# In python3 when database encoding is other than utf-8 and client
|
||||
# encoding is set to UNICODE then we need to map data from database
|
||||
# encoding to utf-8.
|
||||
# This is required because when client encoding is set to UNICODE then
|
||||
# psycopg assumes database encoding utf-8 and not the actual encoding.
|
||||
# Not sure whether it's bug or feature in psycopg for python3.
|
||||
# raw_unicode_escape used for SQL ASCII will escape the
|
||||
# characters. Here we unescape them using unicode_escape
|
||||
# and send ahead. When insert update is done, the characters
|
||||
# are escaped again and sent to the DB.
|
||||
if connection.encoding in ('SQL_ASCII', 'SQLASCII',
|
||||
'MULE_INTERNAL', 'MULEINTERNAL'):
|
||||
if sys.version_info >= (3,):
|
||||
def return_as_unicode(value, cursor):
|
||||
def non_ascii_escape(value, cursor):
|
||||
if value is None:
|
||||
return None
|
||||
# Treat value as byte sequence of database encoding and then
|
||||
# decode it as utf-8 to get correct unicode value.
|
||||
return bytes(
|
||||
value, encodings[cursor.connection.encoding]
|
||||
).decode('utf-8')
|
||||
|
||||
unicode_type = psycopg2.extensions.new_type(
|
||||
# "char", name, text, character, character varying
|
||||
(19, 18, 25, 1042, 1043, 0),
|
||||
'UNICODE', return_as_unicode)
|
||||
).decode('unicode_escape')
|
||||
else:
|
||||
def return_as_unicode(value, cursor):
|
||||
def non_ascii_escape(value, cursor):
|
||||
if value is None:
|
||||
return None
|
||||
# Decode it as utf-8 to get correct unicode value.
|
||||
return value.decode('utf-8')
|
||||
return value.decode('unicode_escape')
|
||||
|
||||
unicode_type = psycopg2.extensions.new_type(
|
||||
# "char", name, text, character, character varying
|
||||
(19, 18, 25, 1042, 1043, 0),
|
||||
'UNICODE', return_as_unicode)
|
||||
unicode_type = psycopg2.extensions.new_type(
|
||||
# "char", name, text, character, character varying
|
||||
(19, 18, 25, 1042, 1043, 0),
|
||||
'UNICODE', non_ascii_escape)
|
||||
|
||||
unicode_array_type = psycopg2.extensions.new_array_type(
|
||||
# "char"[], name[], text[], character[], character varying[]
|
||||
(1002, 1003, 1009, 1014, 1015, 0
|
||||
), 'UNICODEARRAY', unicode_type)
|
||||
|
||||
psycopg2.extensions.register_type(unicode_type)
|
||||
psycopg2.extensions.register_type(unicode_array_type)
|
||||
psycopg2.extensions.register_type(unicode_type, connection)
|
||||
psycopg2.extensions.register_type(unicode_array_type, connection)
|
||||
|
||||
|
||||
def register_binary_typecasters(connection):
|
||||
|
||||
Reference in New Issue
Block a user