Re-hash the way that we handle rendering of special types such as arrays. Fixes #2782. Fixes #2822.

This commit is contained in:
Harshal Dhumal 2017-12-13 11:53:27 +00:00 committed by Dave Page
parent 0c5a5ba9c2
commit 55254a649f
9 changed files with 294 additions and 265 deletions

View File

@ -16,11 +16,11 @@
"13": ["", "false", "bool"],
"14": ["", "[null]", "text[]"],
"15": ["{}", "{}", "text[]"],
"16": ["{data,,'',\"\",\\'\\',\\\"\\\"}", "{data,[null],,,'',\"\"}", "text[]"],
"16": ["{data,NULL,'',\"\"}", "{data,NULL,'',\"\"}", "text[]"],
"17": ["{}", "{}", "int[]"],
"18": ["{123,,456}", "{123,[null],456}", "int[]"],
"18": ["{123,,456}", "{123,NULL,456}", "int[]"],
"19": ["", "[null]", "boolean[]"],
"20": ["{false,,true}", "{false,[null],true}", "boolean[]"]
"20": ["{false,null,true}", "{f,NULL,t}", "boolean[]"]
}
}
}

View File

@ -210,28 +210,13 @@
$input.select();
}
} else {
var data = [];
for (var k in item[args.column.field]) {
if (_.isUndefined(item[args.column.field][k]) || _.isNull(item[args.column.field][k])) {
data.push('');
} else if (item[args.column.field][k] === "") {
data.push("''");
} else if (item[args.column.field][k] === "''") {
data.push("\\'\\'");
} else if (item[args.column.field][k] === '""') {
data.push('\\"\\"');
} else {
data.push(item[args.column.field][k]);
$input.select();
}
}
defaultValue = data;
$input.val('{' + data.join() +'}');
$input.val(defaultValue = item[args.column.field]);
$input.select();
}
};
this.serializeValue = function () {
var value = $input.val();
// If empty return null
if (value === "") {
@ -249,31 +234,7 @@
return value;
}
} else {
// Remove leading { and trailing }.
// Also remove leading and trailing whitespaces.
var value = $.trim(value.slice(1, -1));
if(value == '') {
return [];
}
var data = [];
value = value.split(',');
for (var k in value) {
if (value[k] == "") {
data.push(null); //empty string from editor is null value.
} else if (value[k] === "''" || value[k] === '""') {
data.push(''); // double quote from editor is blank string;
} else if (value[k] === "\\'\\'") {
data.push("''");
} else if (value[k] === '\\"\\"') {
data.push('""');
} else {
data.push(value[k]);
}
}
return data;
return $.trim(value);
}
};
@ -943,14 +904,16 @@
};
this.serializeValue = function () {
if ($input.val() === "") {
var value = $input.val();
if (value === "") {
return null;
}
if(args.column.is_array) {
// Remove leading { and trailing }.
// Also remove leading and trailing whitespaces.
var val = $.trim($input.val().slice(1, -1));
var val = $.trim(value.slice(1, -1));
if(val == '') {
return [];
@ -964,7 +927,7 @@
return val;
}
return $input.val();
return value;
};
this.applyValue = function (item, state) {

View File

@ -15,9 +15,6 @@
"Checkmark": CheckmarkFormatter,
"Text": TextFormatter,
"Binary": BinaryFormatter,
"JsonStringArray": JsonArrayFormatter,
"NumbersArray": NumbersArrayFormatter,
"TextArray": TextArrayFormatter,
}
}
});
@ -73,36 +70,6 @@
}
}
function JsonArrayFormatter(row, cell, value, columnDef, dataContext) {
// If column has default value, set placeholder
var data = NullAndDefaultFormatter(row, cell, value, columnDef, dataContext);
if (data) {
return data;
} else {
var data = [];
for (var k in value) {
// Stringify only if it's json object
var v = value[k];
if (typeof v === "object" && !Array.isArray(v)) {
return data.push(_.escape(JSON.stringify(v)));
} else if (Array.isArray(v)) {
var temp = [];
$.each(v, function(i, val) {
if (typeof val === "object") {
temp.push(JSON.stringify(val));
} else {
temp.push(val)
}
});
return data.push(_.escape("[" + temp.join() + "]"));
} else {
return data.push(_.escape(v));
}
}
return '{' + data.join() + '}';
}
}
function NumbersFormatter(row, cell, value, columnDef, dataContext) {
// If column has default value, set placeholder
var data = NullAndDefaultNumberFormatter(row, cell, value, columnDef, dataContext);
@ -113,24 +80,6 @@
}
}
function NumbersArrayFormatter(row, cell, value, columnDef, dataContext) {
// If column has default value, set placeholder
var data = NullAndDefaultNumberFormatter(row, cell, value, columnDef, dataContext);
if (data) {
return data;
} else {
data = [];
for(var k in value) {
if (value[k] == null) {
data.push("<span class='disabled_cell'>[null]</span>");
} else {
data.push(_.escape(value[k]));
}
}
return "<span style='float:right'>{" + data.join() + "}</span>";
}
}
function CheckmarkFormatter(row, cell, value, columnDef, dataContext) {
/* Checkbox has 3 states
* 1) checked=true
@ -155,24 +104,6 @@
}
}
function TextArrayFormatter(row, cell, value, columnDef, dataContext) {
// If column has default value, set placeholder
var data = NullAndDefaultFormatter(row, cell, value, columnDef, dataContext);
if (data) {
return data;
} else {
data = [];
for(var k in value) {
if (value[k] === null) {
data.push("<span class='disabled_cell'>[null]</span>");
} else {
data.push(_.escape(value[k]));
}
}
return "{" + data.join() + "}";
}
}
function BinaryFormatter(row, cell, value, columnDef, dataContext) {
// If column has default value, set placeholder
var data = NullAndDefaultFormatter(row, cell, value, columnDef, dataContext);

View File

@ -119,7 +119,8 @@ def initialize_datagrid(cmd_type, obj_type, sid, did, obj_id):
try:
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid)
conn = manager.connection(did=did, conn_id=conn_id,
use_binary_placeholder=True)
use_binary_placeholder=True,
array_to_string=True)
except Exception as e:
return internal_server_error(errormsg=str(e))

View File

@ -379,7 +379,8 @@ def check_transaction_status(trans_id):
try:
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid)
conn = manager.connection(did=trans_obj.did, conn_id=trans_obj.conn_id,
use_binary_placeholder=True)
use_binary_placeholder=True,
array_to_string=True)
except Exception as e:
return False, internal_server_error(errormsg=str(e)), None, None, None
@ -526,7 +527,8 @@ def start_query_tool(trans_id):
try:
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid)
conn = manager.connection(did=trans_obj.did, conn_id=conn_id,
use_binary_placeholder=True)
use_binary_placeholder=True,
array_to_string=True)
except Exception as e:
return internal_server_error(errormsg=str(e))
@ -641,7 +643,6 @@ def preferences(trans_id):
return success_return()
@blueprint.route('/poll/<int:trans_id>', methods=["GET"], endpoint='poll')
@login_required
def poll(trans_id):

View File

@ -605,24 +605,24 @@ define('tools.querytool', [
else if (c.cell == 'Json') {
options['editor'] = is_editable ? Slick.Editors.JsonText
: Slick.Editors.ReadOnlyJsonText;
options['formatter'] = c.is_array ? Slick.Formatters.JsonStringArray : Slick.Formatters.JsonString;
options['formatter'] = Slick.Formatters.JsonString;
} else if (c.cell == 'number' ||
$.inArray(c.type, ['oid', 'xid', 'real']) !== -1
) {
options['editor'] = is_editable ? Slick.Editors.CustomNumber
: Slick.Editors.ReadOnlyText;
options['formatter'] = c.is_array ? Slick.Formatters.NumbersArray : Slick.Formatters.Numbers;
options['formatter'] = Slick.Formatters.Numbers;
} else if (c.cell == 'boolean') {
options['editor'] = is_editable ? Slick.Editors.Checkbox
: Slick.Editors.ReadOnlyCheckbox;
options['formatter'] = c.is_array ? Slick.Formatters.CheckmarkArray : Slick.Formatters.Checkmark;
options['formatter'] = Slick.Formatters.Checkmark;
} else if (c.cell == 'binary') {
// We do not support editing binary data in SQL editor and data grid.
options['formatter'] = Slick.Formatters.Binary;
}else {
options['editor'] = is_editable ? Slick.Editors.pgText
: Slick.Editors.ReadOnlypgText;
options['formatter'] = c.is_array ? Slick.Formatters.TextArray : Slick.Formatters.Text;
options['formatter'] = Slick.Formatters.Text;
}
grid_columns.push(options)

View File

@ -21,7 +21,6 @@ import sys
import simplejson as json
import psycopg2
import psycopg2.extras
from flask import g, current_app, session
from flask_babel import gettext
from flask_security import current_user
@ -34,14 +33,16 @@ from pgadmin.utils.exception import ConnectionLost
from .keywords import ScanKeyword
from ..abstract import BaseDriver, BaseConnection
from .cursor import DictCursor
from .typecast import register_global_typecasters, register_string_typecasters,\
register_binary_typecasters, register_array_to_string_typecasters,\
ALL_JSON_TYPES
if sys.version_info < (3,):
# Python2 in-built csv module do not handle unicode
# backports.csv module ported from PY3 csv module for unicode handling
from backports import csv
from StringIO import StringIO
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
IS_PY2 = True
else:
from io import StringIO
@ -50,134 +51,9 @@ else:
_ = gettext
unicode_type_for_record = psycopg2.extensions.new_type(
(2249,),
"RECORD",
psycopg2.extensions.UNICODE
)
unicode_array_type_for_record_array = psycopg2.extensions.new_array_type(
(2287,),
"ARRAY_RECORD",
unicode_type_for_record
)
# This registers a unicode type caster for datatype 'RECORD'.
psycopg2.extensions.register_type(unicode_type_for_record)
# This registers a array unicode type caster for datatype 'ARRAY_RECORD'.
psycopg2.extensions.register_type(unicode_array_type_for_record_array)
# define type caster to convert various pg types into string type
pg_types_to_string_type = psycopg2.extensions.new_type(
(
# To cast bytea, interval type
17, 1186,
# to cast int4range, int8range, numrange tsrange, tstzrange, daterange
3904, 3926, 3906, 3908, 3910, 3912, 3913,
# date, timestamp, timestamptz, bigint, double precision
1700, 1082, 1114, 1184, 20, 701,
# real, time without time zone
700, 1083, 1183
),
'TYPECAST_TO_STRING', psycopg2.STRING
)
# define type caster to convert pg array types of above types into
# array of string type
pg_array_types_to_array_of_string_type = psycopg2.extensions.new_array_type(
(
# To cast bytea[] type
1001,
# bigint[]
1016,
# double precision[], real[]
1022, 1021
),
'TYPECAST_TO_ARRAY_OF_STRING', pg_types_to_string_type
)
# This registers a type caster to convert various pg types into string type
psycopg2.extensions.register_type(pg_types_to_string_type)
# This registers a type caster to convert various pg array types into
# array of string type
psycopg2.extensions.register_type(pg_array_types_to_array_of_string_type)
def register_string_typecasters(connection):
if connection.encoding != 'UTF8':
# In python3 when database encoding is other than utf-8 and client
# encoding is set to UNICODE then we need to map data from database
# encoding to utf-8.
# This is required because when client encoding is set to UNICODE then
# psycopg assumes database encoding utf-8 and not the actual encoding.
# Not sure whether it's bug or feature in psycopg for python3.
if sys.version_info >= (3,):
def return_as_unicode(value, cursor):
if value is None:
return None
# Treat value as byte sequence of database encoding and then
# decode it as utf-8 to get correct unicode value.
return bytes(
value, encodings[cursor.connection.encoding]
).decode('utf-8')
unicode_type = psycopg2.extensions.new_type(
# "char", name, text, character, character varying
(19, 18, 25, 1042, 1043, 0),
'UNICODE', return_as_unicode)
else:
def return_as_unicode(value, cursor):
if value is None:
return None
# Decode it as utf-8 to get correct unicode value.
return value.decode('utf-8')
unicode_type = psycopg2.extensions.new_type(
# "char", name, text, character, character varying
(19, 18, 25, 1042, 1043, 0),
'UNICODE', return_as_unicode)
unicode_array_type = psycopg2.extensions.new_array_type(
# "char"[], name[], text[], character[], character varying[]
(1002, 1003, 1009, 1014, 1015, 0
), 'UNICODEARRAY', unicode_type)
psycopg2.extensions.register_type(unicode_type)
psycopg2.extensions.register_type(unicode_array_type)
def register_binary_typecasters(connection):
psycopg2.extensions.register_type(
psycopg2.extensions.new_type(
(
# To cast bytea type
17,
),
'BYTEA_PLACEHOLDER',
# Only show placeholder if data actually exists.
lambda value, cursor: 'binary data' if value is not None else None),
connection
)
psycopg2.extensions.register_type(
psycopg2.extensions.new_type(
(
# To cast bytea[] type
1001,
),
'BYTEA_ARRAY_PLACEHOLDER',
# Only show placeholder if data actually exists.
lambda value, cursor: 'binary data[]' if value is not None else None),
connection
)
# Register global type caster which will be applicable to all connections.
register_global_typecasters()
class Connection(BaseConnection):
@ -262,7 +138,7 @@ class Connection(BaseConnection):
"""
def __init__(self, manager, conn_id, db, auto_reconnect=True, async=0,
use_binary_placeholder=False):
use_binary_placeholder=False, array_to_string=False):
assert (manager is not None)
assert (conn_id is not None)
@ -284,6 +160,7 @@ class Connection(BaseConnection):
# This flag indicates the connection reconnecting status.
self.reconnecting = False
self.use_binary_placeholder = use_binary_placeholder
self.array_to_string = array_to_string
super(Connection, self).__init__()
@ -302,6 +179,7 @@ class Connection(BaseConnection):
res['async'] = self.async
res['wasConnected'] = self.wasConnected
res['use_binary_placeholder'] = self.use_binary_placeholder
res['array_to_string'] = self.array_to_string
return res
@ -469,6 +347,11 @@ Failed to connect to the database server(#{server_id}) for connection ({conn_id}
register_string_typecasters(self.conn)
if self.array_to_string:
register_array_to_string_typecasters(self.conn)
# Register type casters for binary data only after registering array to
# string type casters.
if self.use_binary_placeholder:
register_binary_typecasters(self.conn)
@ -799,15 +682,13 @@ WHERE
json_columns = []
conn_encoding = cur.connection.encoding
# json, jsonb, json[], jsonb[]
json_types = (114, 199, 3802, 3807)
for c in cur.ordered_description():
# This is to handle the case in which column name is non-ascii
column_name = c.to_dict()['name']
if IS_PY2:
column_name = column_name.decode(conn_encoding)
header.append(column_name)
if c.to_dict()['type_code'] in json_types:
if c.to_dict()['type_code'] in ALL_JSON_TYPES:
json_columns.append(column_name)
if IS_PY2:
@ -1801,7 +1682,7 @@ class ServerManager(object):
def connection(
self, database=None, conn_id=None, auto_reconnect=True, did=None,
async=None, use_binary_placeholder=False
async=None, use_binary_placeholder=False, array_to_string=False
):
if database is not None:
if hasattr(str, 'decode') and \
@ -1856,7 +1737,8 @@ WHERE db.oid = {0}""".format(did))
async = 1 if async is True else 0
self.connections[my_id] = Connection(
self, my_id, database, auto_reconnect, async,
use_binary_placeholder=use_binary_placeholder
use_binary_placeholder=use_binary_placeholder,
array_to_string=array_to_string
)
return self.connections[my_id]
@ -1886,7 +1768,8 @@ WHERE db.oid = {0}""".format(did))
conn = self.connections[conn_info['conn_id']] = Connection(
self, conn_info['conn_id'], conn_info['database'],
True, conn_info['async'],
use_binary_placeholder=conn_info['use_binary_placeholder']
use_binary_placeholder=conn_info['use_binary_placeholder'],
array_to_string=conn_info['array_to_string']
)
# only try to reconnect if connection was connected previously.
if conn_info['wasConnected']:

View File

@ -72,7 +72,7 @@ class _WrapperColumn(object):
def __getitem__(self, idx):
"""Overrides __getitem__ to fetch item from original object"""
if idx == 0 and self.dummy_name is not None:
return self.name
return self.dummy_name
return self.orig_col.__getitem__(idx)
def __setitem__(self, *args, **kwargs):
@ -200,7 +200,7 @@ class DictCursor(_cursor):
def fetchall(self):
"""
Fetch all tuples as orderd dictionary list.
Fetch all tuples as ordered dictionary list.
"""
tuples = _cursor.fetchall(self)
if tuples is not None:

View File

@ -0,0 +1,250 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2017, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""
Typecast various data types so that they can be compatible with Javascript
data types.
"""
import sys
from psycopg2 import STRING as _STRING
import psycopg2
from psycopg2.extensions import encodings
# OIDs of data types which need to typecast as string to avoid JavaScript
# compatibility issues.
# e.g JavaScript does not support 64 bit integers. It has 64-bit double
# giving only 53 bits of integer range (IEEE 754)
# So to avoid loss of remaining 11 bits (64-53) we need to typecast bigint to
# string.
TO_STRING_DATATYPES = (
# To cast bytea, interval type
17, 1186,
# date, timestamp, timestamptz, bigint, double precision
1700, 1082, 1114, 1184, 20, 701,
# real, time without time zone
700, 1083
)
# OIDs of array data types which need to typecast to array of string.
# This list may contain:
# OIDs of data types from PSYCOPG_SUPPORTED_ARRAY_DATATYPES as they need to be
# typecast to array of string.
# Also OIDs of data types which psycopg2 does not typecast array of that
# data type. e.g: uuid, bit, varbit, etc.
TO_ARRAY_OF_STRING_DATATYPES = (
# To cast bytea[] type
1001,
# bigint[]
1016,
# double precision[], real[]
1022, 1021,
# bit[], varbit[]
1561, 1563,
)
# OID of record array data type
RECORD_ARRAY = (2287,)
# OIDs of builtin array datatypes supported by psycopg2
# OID reference psycopg2/psycopg/typecast_builtins.c
#
# For these array data types psycopg2 returns result in list.
# For all other array data types psycopg2 returns result as string (string
# representing array literal)
# e.g:
#
# For below two sql psycopg2 returns result in different formats.
# SELECT '{foo,bar}'::text[];
# print('type of {} ==> {}'.format(res[0], type(res[0])))
# SELECT '{<a>foo</a>,<b>bar</b>}'::xml[];
# print('type of {} ==> {}'.format(res[0], type(res[0])))
#
# Output:
# type of ['foo', 'bar'] ==> <type 'list'>
# type of {<a>foo</a>,<b>bar</b>} ==> <type 'str'>
PSYCOPG_SUPPORTED_BUILTIN_ARRAY_DATATYPES = (
1016, 1005, 1006, 1007, 1021, 1022, 1231,
1002, 1003, 1009, 1014, 1015, 1002, 1003,
1009, 1014, 1015, 1000, 1115, 1185, 1183,
1270, 1182, 1187, 1001, 1028, 1013, 1041,
651, 1040
)
# json, jsonb
# OID reference psycopg2/lib/_json.py
PSYCOPG_SUPPORTED_JSON_TYPES = (114, 3802)
# json[], jsonb[]
PSYCOPG_SUPPORTED_JSON_ARRAY_TYPES = (199, 3807)
ALL_JSON_TYPES = PSYCOPG_SUPPORTED_JSON_TYPES +\
PSYCOPG_SUPPORTED_JSON_ARRAY_TYPES
# INET[], CIDR[]
# OID reference psycopg2/lib/_ipaddress.py
PSYCOPG_SUPPORTED_IPADDRESS_ARRAY_TYPES = (1041, 651)
# uuid[]
# OID reference psycopg2/lib/extras.py
PSYCOPG_SUPPORTED_IPADDRESS_ARRAY_TYPES = (2951,)
# int4range, int8range, numrange, daterange tsrange, tstzrange[]
# OID reference psycopg2/lib/_range.py
PSYCOPG_SUPPORTED_RANGE_TYPES = (3904, 3926, 3906, 3912, 3908, 3910)
# int4range[], int8range[], numrange[], daterange[] tsrange[], tstzrange[]
# OID reference psycopg2/lib/_range.py
PSYCOPG_SUPPORTED_RANGE_ARRAY_TYPES = (3905, 3927, 3907, 3913, 3909, 3911)
def register_global_typecasters():
if sys.version_info < (3,):
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
unicode_type_for_record = psycopg2.extensions.new_type(
(2249,),
"RECORD",
psycopg2.extensions.UNICODE
)
unicode_array_type_for_record_array = psycopg2.extensions.new_array_type(
RECORD_ARRAY,
"ARRAY_RECORD",
unicode_type_for_record
)
# This registers a unicode type caster for datatype 'RECORD'.
psycopg2.extensions.register_type(unicode_type_for_record)
# This registers a array unicode type caster for datatype 'ARRAY_RECORD'.
psycopg2.extensions.register_type(unicode_array_type_for_record_array)
# define type caster to convert various pg types into string type
pg_types_to_string_type = psycopg2.extensions.new_type(
TO_STRING_DATATYPES + PSYCOPG_SUPPORTED_RANGE_TYPES,
'TYPECAST_TO_STRING', _STRING
)
# define type caster to convert pg array types of above types into
# array of string type
pg_array_types_to_array_of_string_type = psycopg2.extensions.new_array_type(
TO_ARRAY_OF_STRING_DATATYPES,
'TYPECAST_TO_ARRAY_OF_STRING', pg_types_to_string_type
)
# This registers a type caster to convert various pg types into string type
psycopg2.extensions.register_type(pg_types_to_string_type)
# This registers a type caster to convert various pg array types into
# array of string type
psycopg2.extensions.register_type(pg_array_types_to_array_of_string_type)
def register_string_typecasters(connection):
if connection.encoding != 'UTF8':
# In python3 when database encoding is other than utf-8 and client
# encoding is set to UNICODE then we need to map data from database
# encoding to utf-8.
# This is required because when client encoding is set to UNICODE then
# psycopg assumes database encoding utf-8 and not the actual encoding.
# Not sure whether it's bug or feature in psycopg for python3.
if sys.version_info >= (3,):
def return_as_unicode(value, cursor):
if value is None:
return None
# Treat value as byte sequence of database encoding and then
# decode it as utf-8 to get correct unicode value.
return bytes(
value, encodings[cursor.connection.encoding]
).decode('utf-8')
unicode_type = psycopg2.extensions.new_type(
# "char", name, text, character, character varying
(19, 18, 25, 1042, 1043, 0),
'UNICODE', return_as_unicode)
else:
def return_as_unicode(value, cursor):
if value is None:
return None
# Decode it as utf-8 to get correct unicode value.
return value.decode('utf-8')
unicode_type = psycopg2.extensions.new_type(
# "char", name, text, character, character varying
(19, 18, 25, 1042, 1043, 0),
'UNICODE', return_as_unicode)
unicode_array_type = psycopg2.extensions.new_array_type(
# "char"[], name[], text[], character[], character varying[]
(1002, 1003, 1009, 1014, 1015, 0
), 'UNICODEARRAY', unicode_type)
psycopg2.extensions.register_type(unicode_type)
psycopg2.extensions.register_type(unicode_array_type)
def register_binary_typecasters(connection):
psycopg2.extensions.register_type(
psycopg2.extensions.new_type(
(
# To cast bytea type
17,
),
'BYTEA_PLACEHOLDER',
# Only show placeholder if data actually exists.
lambda value, cursor: 'binary data' if value is not None else None),
connection
)
psycopg2.extensions.register_type(
psycopg2.extensions.new_type(
(
# To cast bytea[] type
1001,
),
'BYTEA_ARRAY_PLACEHOLDER',
# Only show placeholder if data actually exists.
lambda value, cursor: 'binary data[]' if value is not None else None),
connection
)
def register_array_to_string_typecasters(connection):
psycopg2.extensions.register_type(
psycopg2.extensions.new_type(
PSYCOPG_SUPPORTED_BUILTIN_ARRAY_DATATYPES +
PSYCOPG_SUPPORTED_JSON_ARRAY_TYPES +
PSYCOPG_SUPPORTED_IPADDRESS_ARRAY_TYPES +
PSYCOPG_SUPPORTED_RANGE_ARRAY_TYPES +
TO_ARRAY_OF_STRING_DATATYPES,
'ARRAY_TO_STRING',
_STRING),
connection
)