mirror of
https://github.com/pgadmin-org/pgadmin4.git
synced 2025-02-25 18:55:31 -06:00
Better handling the non-ascii characters for different database objects.
Using 'psycopg2.extensions.UNICODE' (for Python < 3) in the psycopg2 driver for proper conversation of unicode characters. Also - adjusted the string typecaster to take care of different character types (char, character, text, name, character varying, and their array types). Reviewed by: Dave Page, Murtuza Zabuawala & Akshay Joshi
This commit is contained in:
parent
af29eac6ba
commit
ffa8d94e76
@ -194,8 +194,6 @@ class DatabaseView(PGChildNodeView):
|
||||
|
||||
for row in rset['rows']:
|
||||
dbname = row['name']
|
||||
if hasattr(str, 'decode'):
|
||||
dbname = dbname.decode('utf-8')
|
||||
if self.manager.db == dbname:
|
||||
connected = True
|
||||
canDrop = canDisConn = False
|
||||
@ -253,8 +251,6 @@ class DatabaseView(PGChildNodeView):
|
||||
|
||||
for row in rset['rows']:
|
||||
db = row['name']
|
||||
if hasattr(str, 'decode'):
|
||||
db = db.decode('utf-8')
|
||||
if self.manager.db == db:
|
||||
connected = True
|
||||
else:
|
||||
@ -905,9 +901,7 @@ class DatabaseView(PGChildNodeView):
|
||||
frmtd_variables = parse_variables_from_db(res1['rows'])
|
||||
result.update(frmtd_variables)
|
||||
|
||||
sql_header = "-- Database: {0}\n\n-- ".format(result['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
sql_header = u"-- Database: {0}\n\n-- ".format(result['name'])
|
||||
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
|
@ -502,18 +502,15 @@ class CastView(PGChildNodeView):
|
||||
:return:
|
||||
"""
|
||||
data = request.args
|
||||
try:
|
||||
sql, name = self.get_sql(gid, sid, did, data, cid)
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
if sql == '':
|
||||
sql = "--modified SQL"
|
||||
sql, name = self.get_sql(gid, sid, did, data, cid)
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
if sql == '':
|
||||
sql = "--modified SQL"
|
||||
|
||||
return make_json_response(
|
||||
data=sql,
|
||||
status=200
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
return make_json_response(
|
||||
data=sql,
|
||||
status=200
|
||||
)
|
||||
|
||||
def get_sql(self, gid, sid, did, data, cid=None):
|
||||
"""
|
||||
@ -544,13 +541,13 @@ class CastView(PGChildNodeView):
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data
|
||||
)
|
||||
return str(sql), data['name'] if 'name' in data else old_data['name']
|
||||
return sql, data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
if 'srctyp' in data and 'trgtyp' in data:
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']), data=data, conn=self.conn)
|
||||
else:
|
||||
return "-- incomplete definition", None
|
||||
return str(sql), data['srctyp'] + "->" + data["trgtyp"]
|
||||
return u"-- incomplete definition", None
|
||||
return sql, data['srctyp'] + "->" + data["trgtyp"]
|
||||
|
||||
@check_precondition
|
||||
def get_functions(self, gid, sid, did, cid=None):
|
||||
|
@ -623,39 +623,33 @@ class EventTriggerView(PGChildNodeView):
|
||||
Returns:
|
||||
|
||||
"""
|
||||
try:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), etid=etid)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), etid=etid)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
result = res['rows'][0]
|
||||
result = self._formatter(result)
|
||||
result = res['rows'][0]
|
||||
result = self._formatter(result)
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']), data=result, conn=self.conn)
|
||||
sql += "\n\n"
|
||||
sql += render_template("/".join([self.template_path, 'grant.sql']), data=result, conn=self.conn)
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']), data=result, conn=self.conn)
|
||||
sql += "\n\n"
|
||||
sql += render_template("/".join([self.template_path, 'grant.sql']), data=result, conn=self.conn)
|
||||
|
||||
db_sql = render_template("/".join([self.template_path, 'get_db.sql']), did=did)
|
||||
status, db_name = self.conn.execute_scalar(db_sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=db_name)
|
||||
db_sql = render_template("/".join([self.template_path, 'get_db.sql']), did=did)
|
||||
status, db_name = self.conn.execute_scalar(db_sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=db_name)
|
||||
|
||||
sql_header = "-- Event Trigger: {0} on database {1}\n\n-- ".format(result['name'], db_name)
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
sql_header = u"-- Event Trigger: {0} on database {1}\n\n-- ".format(result['name'], db_name)
|
||||
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
name=result['name'], )
|
||||
sql_header += "\n"
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
name=result['name'], )
|
||||
sql_header += "\n"
|
||||
|
||||
sql = sql_header + sql
|
||||
sql = sql_header + sql
|
||||
|
||||
return ajax_response(response=sql)
|
||||
|
||||
except Exception as e:
|
||||
return ajax_response(response=str(e))
|
||||
return ajax_response(response=sql)
|
||||
|
||||
@check_precondition
|
||||
def get_event_funcs(self, gid, sid, did, etid=None):
|
||||
|
@ -682,15 +682,12 @@ class ForeignDataWrapperView(PGChildNodeView):
|
||||
)
|
||||
sql += "\n"
|
||||
|
||||
sql_header = """-- Foreign Data Wrapper: {0}
|
||||
sql_header = u"""-- Foreign Data Wrapper: {0}
|
||||
|
||||
-- DROP FOREIGN DATA WRAPPER {0}
|
||||
|
||||
""".format(res['rows'][0]['name'])
|
||||
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
|
||||
sql = sql_header + sql
|
||||
|
||||
return ajax_response(response=sql.strip('\n'))
|
||||
|
@ -703,13 +703,11 @@ class ForeignServerView(PGChildNodeView):
|
||||
conn=self.conn)
|
||||
sql += "\n"
|
||||
|
||||
sql_header = """-- Foreign Server: {0}
|
||||
sql_header = u"""-- Foreign Server: {0}
|
||||
|
||||
-- DROP SERVER {0}
|
||||
|
||||
""".format(res['rows'][0]['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
|
||||
sql = sql_header + sql
|
||||
|
||||
|
@ -709,13 +709,11 @@ class UserMappingView(PGChildNodeView):
|
||||
conn=self.conn)
|
||||
sql += "\n"
|
||||
|
||||
sql_header = """-- User Mapping : {0}
|
||||
sql_header = u"""-- User Mapping : {0}
|
||||
|
||||
-- DROP USER MAPPING FOR {0} SERVER {1}
|
||||
|
||||
""".format(res['rows'][0]['name'], fdw_data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
|
||||
sql = sql_header + sql
|
||||
|
||||
|
@ -805,9 +805,7 @@ It may have been removed by another user.
|
||||
_=gettext, data=data, conn=self.conn
|
||||
)
|
||||
|
||||
sql_header = "-- SCHEMA: {0}\n\n-- ".format(data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
sql_header = u"-- SCHEMA: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
# drop schema
|
||||
sql_header += render_template(
|
||||
@ -991,14 +989,12 @@ It may have been removed by another user.
|
||||
_=gettext, data=old_data, conn=self.conn
|
||||
)
|
||||
|
||||
sql_header = """
|
||||
sql_header = u"""
|
||||
-- CATALOG: {0}
|
||||
|
||||
-- DROP SCHEMA {0};(
|
||||
|
||||
""".format(old_data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
|
||||
SQL = sql_header + SQL
|
||||
|
||||
|
@ -650,9 +650,8 @@ class CollationView(PGChildNodeView):
|
||||
'create.sql']),
|
||||
data=data, conn=self.conn)
|
||||
|
||||
sql_header = "-- Collation: {0};\n\n-- ".format(data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
sql_header = u"-- Collation: {0};\n\n-- ".format(data['name'])
|
||||
|
||||
sql_header += render_template("/".join([self.template_path,
|
||||
'delete.sql']),
|
||||
name=data['name'])
|
||||
|
@ -699,14 +699,11 @@ AND relkind != 'c'))"""
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'create.sql']), data=data)
|
||||
|
||||
sql_header = """-- DOMAIN: {0}
|
||||
sql_header = u"""-- DOMAIN: {0}
|
||||
|
||||
-- DROP DOMAIN {0};
|
||||
|
||||
""".format(data['basensp'] + '.' + data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
|
||||
""".format(self.qtIdent(self.conn, data['basensp'], data['name']))
|
||||
SQL = sql_header + SQL
|
||||
|
||||
return ajax_response(response=SQL.strip('\n'))
|
||||
|
@ -601,13 +601,12 @@ class DomainConstraintView(PGChildNodeView):
|
||||
'create.sql']),
|
||||
data=data, domain=domain, schema=schema)
|
||||
|
||||
sql_header = """-- CHECK: {1}.{0}
|
||||
sql_header = u"""-- CHECK: {1}.{0}
|
||||
|
||||
-- ALTER DOMAIN {1} DROP CONSTRAINT {0};
|
||||
|
||||
""".format(data['name'], schema + '.' + domain)
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
""".format(self.qtIdent(self.conn, data['name']),
|
||||
self.qtIdent(self.conn, schema, domain))
|
||||
|
||||
SQL = sql_header + SQL
|
||||
|
||||
|
@ -821,13 +821,11 @@ shifted to the another schema.
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'create.sql']), data=data)
|
||||
|
||||
sql_header = """-- {0}: {1}
|
||||
sql_header = u"""-- FOREIGN TABLE: {0}
|
||||
|
||||
-- DROP {0} {1};
|
||||
-- DROP FOREIGN TABLE {0};
|
||||
|
||||
""".format('FOREIGN TABLE', data['basensp'] + "." + data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
""".format(self.qtIdent(self.conn, data['basensp'], data['name']))
|
||||
|
||||
SQL = sql_header + SQL
|
||||
|
||||
|
@ -594,18 +594,16 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
data[k] = json.loads(v, encoding='utf-8')
|
||||
except ValueError:
|
||||
data[k] = v
|
||||
try:
|
||||
# Fetch sql query for modified data
|
||||
SQL, name = self.get_sql(gid, sid, did, scid, data, cfgid)
|
||||
if SQL == '':
|
||||
SQL = "-- No change"
|
||||
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
# Fetch sql query for modified data
|
||||
SQL, name = self.get_sql(gid, sid, did, scid, data, cfgid)
|
||||
if SQL == '':
|
||||
SQL = "-- No change"
|
||||
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
|
||||
def get_sql(self, gid, sid, did, scid, data, cfgid=None):
|
||||
"""
|
||||
@ -668,7 +666,7 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
data=new_data, o_data=old_data
|
||||
)
|
||||
# Fetch sql query for modified data
|
||||
return str(sql.strip('\n')), data['name'] if 'name' in data else old_data['name']
|
||||
return sql.strip('\n'), data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template(
|
||||
@ -692,8 +690,8 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
conn=self.conn
|
||||
)
|
||||
else:
|
||||
sql = "-- incomplete definition"
|
||||
return str(sql.strip('\n')), data['name']
|
||||
sql = u"-- incomplete definition"
|
||||
return sql.strip('\n'), data['name']
|
||||
|
||||
@check_precondition
|
||||
def parsers(self, gid, sid, did, scid):
|
||||
|
@ -579,18 +579,15 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
except ValueError:
|
||||
data[k] = v
|
||||
|
||||
try:
|
||||
# Fetch sql query for modified data
|
||||
SQL, name = self.get_sql(gid, sid, did, scid, data, dcid)
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
# Fetch sql query for modified data
|
||||
SQL, name = self.get_sql(gid, sid, did, scid, data, dcid)
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
|
||||
def get_sql(self, gid, sid, did, scid, data, dcid=None):
|
||||
"""
|
||||
@ -653,7 +650,7 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
data=new_data, o_data=old_data
|
||||
)
|
||||
# Fetch sql query for modified data
|
||||
return str(sql.strip('\n')), data['name'] if 'name' in data else old_data['name']
|
||||
return sql.strip('\n'), data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template("/".join([self.template_path, 'schema.sql']),
|
||||
@ -676,8 +673,8 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
conn=self.conn
|
||||
)
|
||||
else:
|
||||
sql = "-- incomplete definition"
|
||||
return str(sql.strip('\n')), data['name']
|
||||
sql = u"-- incomplete definition"
|
||||
return sql.strip('\n'), data['name']
|
||||
|
||||
@check_precondition
|
||||
def fetch_templates(self, gid, sid, did, scid):
|
||||
|
@ -5,9 +5,9 @@ SELECT
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
E'-- Text Search Dictionary: ' || nspname || E'.' || dict.dictname ||
|
||||
E'\n\n-- DROP TEXT SEARCH DICTIONARY ' || nspname || E'.' || dict.dictname ||
|
||||
E'\n\nCREATE TEXT SEARCH DICTIONARY ' || nspname || E'.' || dict.dictname || E' (\n' ||
|
||||
E'-- Text Search Dictionary: ' || quote_ident(nspname) || E'.' || quote_ident(dict.dictname) ||
|
||||
E'\n\n-- DROP TEXT SEARCH DICTIONARY ' || quote_ident(nspname) || E'.' || quote_ident(dict.dictname) ||
|
||||
E'\n\nCREATE TEXT SEARCH DICTIONARY ' || quote_ident(nspname) || E'.' || quote_ident(dict.dictname) || E' (\n' ||
|
||||
E'\tTEMPLATE = ' || template ||
|
||||
CASE
|
||||
WHEN dict.dictinitoption IS NOT NULL THEN E',\n\t' || dict.dictinitoption
|
||||
@ -16,7 +16,7 @@ FROM
|
||||
E'\n);' ||
|
||||
CASE
|
||||
WHEN description IS NOT NULL THEN
|
||||
E'\n\nCOMMENT ON TEXT SEARCH DICTIONARY ' || nspname || E'.' || dict.dictname ||
|
||||
E'\n\nCOMMENT ON TEXT SEARCH DICTIONARY ' || quote_ident(nspname) || E'.' || quote_ident(dict.dictname) ||
|
||||
E' IS ' || pg_catalog.quote_literal(description) || E';'
|
||||
ELSE '' END as sql
|
||||
FROM
|
||||
|
@ -513,18 +513,16 @@ class FtsParserView(PGChildNodeView):
|
||||
"""
|
||||
data = request.args
|
||||
# Fetch sql query for modified data
|
||||
try:
|
||||
# Fetch sql query for modified data
|
||||
SQL, name = self.get_sql(gid, sid, did, scid, data, pid)
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
# Fetch sql query for modified data
|
||||
SQL, name = self.get_sql(gid, sid, did, scid, data, pid)
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
|
||||
def get_sql(self, gid, sid, did, scid, data, pid=None):
|
||||
"""
|
||||
@ -587,7 +585,7 @@ class FtsParserView(PGChildNodeView):
|
||||
o_data=old_data
|
||||
)
|
||||
# Fetch sql query for modified data
|
||||
return str(sql.strip('\n')), data['name'] if 'name' in data else old_data['name']
|
||||
return sql.strip('\n'), data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template(
|
||||
@ -616,7 +614,7 @@ class FtsParserView(PGChildNodeView):
|
||||
)
|
||||
else:
|
||||
sql = "-- incomplete definition"
|
||||
return str(sql.strip('\n')), data['name']
|
||||
return sql.strip('\n'), data['name']
|
||||
|
||||
@check_precondition
|
||||
def start_functions(self, gid, sid, did, scid):
|
||||
|
@ -5,9 +5,9 @@ SELECT
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
E'-- Text Search Parser: ' || nspname || E'.' || prs.prsname ||
|
||||
E'\n\n-- DROP TEXT SEARCH PARSER ' || nspname || E'.' || prs.prsname ||
|
||||
E'\n\nCREATE TEXT SEARCH PARSER ' || nspname || E'.' || prs.prsname || E' (\n' ||
|
||||
E'-- Text Search Parser: ' || quote_ident(nspname) || E'.' || quote_ident(prs.prsname) ||
|
||||
E'\n\n-- DROP TEXT SEARCH PARSER ' || quote_ident(nspname) || E'.' || quote_ident(prs.prsname) ||
|
||||
E'\n\nCREATE TEXT SEARCH PARSER ' || quote_ident(nspname) || E'.' || quote_ident(prs.prsname) || E' (\n' ||
|
||||
E' START = ' || prs.prsstart || E',\n' ||
|
||||
E' GETTOKEN = ' || prs.prstoken || E',\n' ||
|
||||
E' END = ' || prs.prsend || E',\n' ||
|
||||
@ -17,7 +17,7 @@ FROM
|
||||
ELSE '' END || E'\n);' ||
|
||||
CASE
|
||||
WHEN description IS NOT NULL THEN
|
||||
E'\n\nCOMMENT ON TEXT SEARCH PARSER ' || nspname || E'.' || prs.prsname ||
|
||||
E'\n\nCOMMENT ON TEXT SEARCH PARSER ' || quote_ident(nspname) || E'.' || quote_ident(prs.prsname) ||
|
||||
E' IS ' || pg_catalog.quote_literal(description) || E';'
|
||||
ELSE '' END as sql
|
||||
FROM
|
||||
|
@ -275,7 +275,7 @@ class FtsTemplateView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
return gone(
|
||||
_("Could not the requested FTS template.")
|
||||
gettext("Could not the requested FTS template.")
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
@ -547,7 +547,7 @@ class FtsTemplateView(PGChildNodeView):
|
||||
data=new_data, o_data=old_data
|
||||
)
|
||||
# Fetch sql query for modified data
|
||||
return str(sql.strip('\n')), data['name'] if 'name' in data else old_data['name']
|
||||
return sql.strip('\n'), data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template("/".join([self.template_path, 'schema.sql']),
|
||||
@ -570,8 +570,8 @@ class FtsTemplateView(PGChildNodeView):
|
||||
conn=self.conn
|
||||
)
|
||||
else:
|
||||
sql = "-- incomplete definition"
|
||||
return str(sql.strip('\n')), data['name']
|
||||
sql = u"-- incomplete definition"
|
||||
return sql.strip('\n'), data['name']
|
||||
|
||||
@check_precondition
|
||||
def get_lexize(self, gid, sid, did, scid, tid=None):
|
||||
@ -649,7 +649,7 @@ class FtsTemplateView(PGChildNodeView):
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(
|
||||
_(
|
||||
gettext(
|
||||
"Could not generate reversed engineered Query for the FTS Template.\n{0}").format(
|
||||
res
|
||||
)
|
||||
@ -657,7 +657,7 @@ class FtsTemplateView(PGChildNodeView):
|
||||
|
||||
if res is None:
|
||||
return gone(
|
||||
_(
|
||||
gettext(
|
||||
"Could not generate reversed engineered Query for FTS Template node.")
|
||||
)
|
||||
|
||||
|
@ -4,16 +4,16 @@ SELECT
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
E'-- Text Search Template: ' || nspname || E'.' || tmpl.tmplname ||
|
||||
E'\n\n-- DROP TEXT SEARCH TEMPLATE ' || nspname || E'.' || tmpl.tmplname ||
|
||||
E'\n\nCREATE TEXT SEARCH TEMPLATE ' || nspname || E'.' || tmpl.tmplname || E' (\n' ||
|
||||
E'-- Text Search Template: ' || quote_ident(nspname) || E'.' || quote_ident(tmpl.tmplname) ||
|
||||
E'\n\n-- DROP TEXT SEARCH TEMPLATE ' || quote_ident(nspname) || E'.' || quote_ident(tmpl.tmplname) ||
|
||||
E'\n\nCREATE TEXT SEARCH TEMPLATE ' || quote_ident(nspname) || E'.' || quote_ident(tmpl.tmplname) || E' (\n' ||
|
||||
CASE
|
||||
WHEN tmpl.tmplinit != '-'::regclass THEN E' INIT = ' || tmpl.tmplinit || E',\n'
|
||||
ELSE '' END ||
|
||||
E' LEXIZE = ' || tmpl.tmpllexize || E'\n);' ||
|
||||
CASE
|
||||
WHEN a.description IS NOT NULL THEN
|
||||
E'\n\nCOMMENT ON TEXT SEARCH TEMPLATE ' || nspname || E'.' || tmpl.tmplname ||
|
||||
E'\n\nCOMMENT ON TEXT SEARCH TEMPLATE ' || quote_ident(nspname) || E'.' || quote_ident(tmpl.tmplname) ||
|
||||
E' IS ' || pg_catalog.quote_literal(description) || E';'
|
||||
ELSE '' END as sql
|
||||
FROM
|
||||
|
@ -955,13 +955,6 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
if 'acl' in resp_data:
|
||||
resp_data['acl'] = parse_priv_to_db(resp_data['acl'], ['X'])
|
||||
|
||||
# generate function signature
|
||||
header_func_name = '{0}.{1}({2})'.format(
|
||||
resp_data['pronamespace'],
|
||||
resp_data['proname'],
|
||||
resp_data['proargtypenames']
|
||||
)
|
||||
|
||||
# Generate sql for "SQL panel"
|
||||
# func_def is procedure signature with default arguments
|
||||
# query_for - To distinguish the type of call
|
||||
@ -982,13 +975,6 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
if 'acl' in resp_data:
|
||||
resp_data['acl'] = parse_priv_to_db(resp_data['acl'], ['X'])
|
||||
|
||||
# generate function signature
|
||||
header_func_name = '{0}.{1}({2})'.format(
|
||||
resp_data['pronamespace'],
|
||||
resp_data['proname'],
|
||||
resp_data['proargtypenames']
|
||||
)
|
||||
|
||||
SQL = render_template("/".join([self.sql_template_path,
|
||||
'get_definition.sql']
|
||||
), data=resp_data,
|
||||
@ -999,17 +985,7 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
# Add newline and tab before each argument to format
|
||||
name_with_default_args = res['rows'][0]['name_with_default_args'].replace(', ', ',\r\t').replace('(', '(\r\t')
|
||||
|
||||
if hasattr(str, 'decode'):
|
||||
if resp_data['prosrc']:
|
||||
resp_data['prosrc'] = resp_data['prosrc'].decode(
|
||||
'utf-8'
|
||||
)
|
||||
if resp_data['prosrc_c']:
|
||||
resp_data['prosrc_c'] = resp_data['prosrc_c'].decode(
|
||||
'utf-8'
|
||||
)
|
||||
name_with_default_args = res['rows'][0]['name_with_default_args'].replace(', ', ',\r\t').replace('(', '(\r\t')
|
||||
|
||||
# Generate sql for "SQL panel"
|
||||
# func_def is function signature with default arguments
|
||||
@ -1020,13 +996,16 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
func_def=name_with_default_args,
|
||||
query_for="sql_panel")
|
||||
|
||||
sql_header = """-- {0}: {1}
|
||||
sql_header = u"""-- {0}: {1}{2}
|
||||
|
||||
-- DROP {0} {1};
|
||||
-- DROP {0} {1}{2};
|
||||
|
||||
""".format(object_type.upper(), header_func_name)
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
""".format(object_type.upper(),
|
||||
self.qtIdent(
|
||||
self.conn,
|
||||
resp_data['pronamespace'],
|
||||
resp_data['proname']),
|
||||
resp_data['proargtypenames'])
|
||||
|
||||
SQL = sql_header + func_def
|
||||
SQL = re.sub('\n{2,}', '\n\n', SQL)
|
||||
|
@ -606,11 +606,9 @@ class PackageView(PGChildNodeView):
|
||||
sql, name = self.getSQL(gid, sid, did, result, scid)
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
|
||||
sql_header = "-- Package: {}\n\n-- ".format(
|
||||
sql_header = u"-- Package: {}\n\n-- ".format(
|
||||
self.qtIdent(self.conn, self.schema, result['name'])
|
||||
)
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
|
@ -542,11 +542,9 @@ It may have been removed by another user or moved to another schema.
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
sql = "-- Package {}: {}".format(
|
||||
sql = u"-- Package {}: {}".format(
|
||||
'Function' if self.node_type == 'edbfunc' else 'Procedure',
|
||||
name)
|
||||
if hasattr(str, 'decode'):
|
||||
sql = sql.decode('utf-8')
|
||||
if body != '':
|
||||
sql += "\n\n"
|
||||
sql += body
|
||||
|
@ -317,11 +317,9 @@ class EdbVarView(PGChildNodeView, DataTypeReader):
|
||||
return internal_server_error(errormsg=res)
|
||||
var = res['rows'][0]
|
||||
|
||||
sql = "-- Package Variable: {}".format(var['name'])
|
||||
sql += "\n\n"
|
||||
sql += "{} {};".format(var['name'], var['datatype'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql = sql.decode('utf-8')
|
||||
sql = u"-- Package Variable: {}".format(var['name'])
|
||||
sql += u"\n\n"
|
||||
sql += u"{} {};".format(var['name'], var['datatype'])
|
||||
|
||||
return ajax_response(response=sql)
|
||||
|
||||
|
@ -1732,18 +1732,15 @@ class TableView(PGChildNodeView, DataTypeReader, VacuumSettings):
|
||||
except (ValueError, TypeError, KeyError):
|
||||
data[k] = v
|
||||
|
||||
try:
|
||||
SQL, name = self.get_sql(did, scid, tid, data)
|
||||
SQL = re.sub('\n{2,}', '\n\n', SQL)
|
||||
SQL = SQL.strip('\n')
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
SQL, name = self.get_sql(did, scid, tid, data)
|
||||
SQL = re.sub('\n{2,}', '\n\n', SQL)
|
||||
SQL = SQL.strip('\n')
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
|
||||
def get_index_constraint_sql(self, did, tid, data):
|
||||
"""
|
||||
@ -2025,7 +2022,7 @@ class TableView(PGChildNodeView, DataTypeReader, VacuumSettings):
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_exclusion_constraint_sql(self, tid, data):
|
||||
def get_exclusion_constraint_sql(self, did, tid, data):
|
||||
"""
|
||||
Args:
|
||||
tid: Table ID
|
||||
@ -2060,7 +2057,7 @@ class TableView(PGChildNodeView, DataTypeReader, VacuumSettings):
|
||||
|
||||
properties_sql = render_template("/".join(
|
||||
[self.exclusion_constraint_template_path, 'properties.sql']),
|
||||
tid=tid, cid=c['oid'])
|
||||
did=did, tid=tid, cid=c['oid'])
|
||||
status, res = self.conn.execute_dict(properties_sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@ -2097,7 +2094,7 @@ class TableView(PGChildNodeView, DataTypeReader, VacuumSettings):
|
||||
|
||||
if len(sql) > 0:
|
||||
# Join all the sql(s) as single string
|
||||
return '\n\n'.join(sql)
|
||||
return u'\n\n'.join(sql)
|
||||
else:
|
||||
return None
|
||||
|
||||
@ -2277,7 +2274,7 @@ class TableView(PGChildNodeView, DataTypeReader, VacuumSettings):
|
||||
SQL += '\n' + check_constraint_sql
|
||||
|
||||
# Check if exclusion constraint(s) is/are added/changed/deleted
|
||||
exclusion_constraint_sql = self.get_exclusion_constraint_sql(tid, data)
|
||||
exclusion_constraint_sql = self.get_exclusion_constraint_sql(did, tid, data)
|
||||
# If we have check constraint sql then ad it in main sql
|
||||
if exclusion_constraint_sql is not None:
|
||||
SQL += '\n' + exclusion_constraint_sql
|
||||
@ -2574,9 +2571,7 @@ class TableView(PGChildNodeView, DataTypeReader, VacuumSettings):
|
||||
# Push as string
|
||||
data['cols'] = ', '.join(cols)
|
||||
|
||||
sql_header = "\n-- Index: {0}\n\n-- ".format(data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
sql_header = u"\n-- Index: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
sql_header += render_template("/".join([self.index_template_path,
|
||||
'delete.sql']),
|
||||
@ -2646,9 +2641,7 @@ class TableView(PGChildNodeView, DataTypeReader, VacuumSettings):
|
||||
|
||||
data = trigger_definition(data)
|
||||
|
||||
sql_header = "\n-- Trigger: {0}\n\n-- ".format(data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
sql_header = u"\n-- Trigger: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
sql_header += render_template("/".join([self.trigger_template_path,
|
||||
'delete.sql']),
|
||||
|
@ -706,9 +706,7 @@ class CheckConstraintView(PGChildNodeView):
|
||||
'create.sql']),
|
||||
data=data)
|
||||
|
||||
sql_header = "-- Constraint: {0}\n\n-- ".format(data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
sql_header = u"-- Constraint: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
|
@ -868,9 +868,7 @@ class ExclusionConstraintView(PGChildNodeView):
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'create.sql']), data=data)
|
||||
|
||||
sql_header = "-- Constraint: {0}\n\n-- ".format(data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
sql_header = u"-- Constraint: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
|
@ -918,66 +918,61 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
Returns:
|
||||
|
||||
"""
|
||||
try:
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
tid=tid, conn=self.conn, cid=fkid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
data = res['rows'][0]
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
tid=tid, conn=self.conn, cid=fkid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'get_constraint_cols.sql']),
|
||||
tid=tid,
|
||||
keys=zip(data['confkey'], data['conkey']),
|
||||
confrelid=data['confrelid'])
|
||||
data = res['rows'][0]
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'get_constraint_cols.sql']),
|
||||
tid=tid,
|
||||
keys=zip(data['confkey'], data['conkey']),
|
||||
confrelid=data['confrelid'])
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
columns = []
|
||||
for row in res['rows']:
|
||||
columns.append({"local_column": row['conattname'],
|
||||
"references": data['confrelid'],
|
||||
"referenced": row['confattname']})
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
data['columns'] = columns
|
||||
columns = []
|
||||
for row in res['rows']:
|
||||
columns.append({"local_column": row['conattname'],
|
||||
"references": data['confrelid'],
|
||||
"referenced": row['confattname']})
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=data['columns'][0]['references'])
|
||||
status, res = self.conn.execute_2darray(SQL)
|
||||
data['columns'] = columns
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=data['columns'][0]['references'])
|
||||
status, res = self.conn.execute_2darray(SQL)
|
||||
|
||||
data['remote_schema'] = res['rows'][0]['schema']
|
||||
data['remote_table'] = res['rows'][0]['table']
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'create.sql']), data=data)
|
||||
data['remote_schema'] = res['rows'][0]['schema']
|
||||
data['remote_table'] = res['rows'][0]['table']
|
||||
|
||||
sql_header = "-- Constraint: {0}\n\n-- ".format(data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'create.sql']), data=data)
|
||||
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
data=data)
|
||||
sql_header += "\n"
|
||||
sql_header = u"-- Constraint: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
SQL = sql_header + SQL
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
data=data)
|
||||
sql_header += "\n"
|
||||
|
||||
return ajax_response(response=SQL)
|
||||
SQL = sql_header + SQL
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
return ajax_response(response=SQL)
|
||||
|
||||
@check_precondition
|
||||
def dependents(self, gid, sid, did, scid, tid, fkid=None):
|
||||
|
@ -873,57 +873,52 @@ class IndexConstraintView(PGChildNodeView):
|
||||
Returns:
|
||||
|
||||
"""
|
||||
try:
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
did=did,
|
||||
tid=tid,
|
||||
conn=self.conn,
|
||||
cid=cid,
|
||||
constraint_type=self.constraint_type)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
data = res['rows'][0]
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
did=did,
|
||||
tid=tid,
|
||||
conn=self.conn,
|
||||
cid=cid,
|
||||
constraint_type=self.constraint_type)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_constraint_cols.sql']),
|
||||
cid=cid, colcnt=data['indnatts'])
|
||||
data = res['rows'][0]
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_constraint_cols.sql']),
|
||||
cid=cid, colcnt=data['indnatts'])
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
columns = []
|
||||
for row in res['rows']:
|
||||
columns.append({"column": row['column'].strip('"')})
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
data['columns'] = columns
|
||||
columns = []
|
||||
for row in res['rows']:
|
||||
columns.append({"column": row['column'].strip('"')})
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'create.sql']),
|
||||
data=data,
|
||||
constraint_name=self.constraint_name)
|
||||
data['columns'] = columns
|
||||
|
||||
sql_header = "-- Constraint: {0}\n\n-- ".format(data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'create.sql']),
|
||||
data=data,
|
||||
constraint_name=self.constraint_name)
|
||||
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
data=data)
|
||||
sql_header += "\n"
|
||||
sql_header = u"-- Constraint: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
SQL = sql_header + SQL
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
data=data)
|
||||
sql_header += "\n"
|
||||
|
||||
return ajax_response(response=SQL)
|
||||
SQL = sql_header + SQL
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
return ajax_response(response=SQL)
|
||||
|
||||
@check_precondition
|
||||
def statistics(self, gid, sid, did, scid, tid, cid):
|
||||
|
@ -839,40 +839,35 @@ class IndexesView(PGChildNodeView):
|
||||
tid: Table ID
|
||||
idx: Index ID
|
||||
"""
|
||||
try:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
did=did, tid=tid, idx=idx,
|
||||
datlastsysoid=self.datlastsysoid)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
did=did, tid=tid, idx=idx,
|
||||
datlastsysoid=self.datlastsysoid)
|
||||
|
||||
data = dict(res['rows'][0])
|
||||
# Adding parent into data dict, will be using it while creating sql
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
# Add column details for current index
|
||||
data = self._column_details(idx, data)
|
||||
data = dict(res['rows'][0])
|
||||
# Adding parent into data dict, will be using it while creating sql
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
|
||||
SQL, name = self.get_sql(did, scid, tid, None, data)
|
||||
# Add column details for current index
|
||||
data = self._column_details(idx, data)
|
||||
|
||||
sql_header = "-- Index: {0}\n\n-- ".format(data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
SQL, name = self.get_sql(did, scid, tid, None, data)
|
||||
|
||||
sql_header += render_template("/".join([self.template_path,
|
||||
'delete.sql']),
|
||||
data=data, conn=self.conn)
|
||||
sql_header = u"-- Index: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
SQL = sql_header + '\n\n' + SQL
|
||||
sql_header += render_template("/".join([self.template_path,
|
||||
'delete.sql']),
|
||||
data=data, conn=self.conn)
|
||||
|
||||
return ajax_response(response=SQL)
|
||||
SQL = sql_header + '\n\n' + SQL
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
return ajax_response(response=SQL)
|
||||
|
||||
@check_precondition
|
||||
def dependents(self, gid, sid, did, scid, tid, idx):
|
||||
|
@ -1,8 +1,8 @@
|
||||
{# ============Create Rule============= #}
|
||||
{% if display_comments %}
|
||||
-- Rule: {{ data.name }} ON {{ conn|qtIdent(data.schema, data.name) }}
|
||||
-- Rule: {{ conn|qtIdent(data.name) }} ON {{ conn|qtIdent(data.schema, data.name) }}
|
||||
|
||||
-- DROP Rule {{ data.name }} ON {{ conn|qtIdent(data.schema, data.name) }};
|
||||
-- DROP Rule {{ conn|qtIdent(data.name) }} ON {{ conn|qtIdent(data.schema, data.name) }};
|
||||
|
||||
{% endif %}
|
||||
{% if data.name and data.schema and data.view %}
|
||||
|
@ -526,7 +526,7 @@ class TriggerView(PGChildNodeView):
|
||||
Returns:
|
||||
Formated arguments for function
|
||||
"""
|
||||
formatted_args = ["'{0}'".format(arg) for arg in args]
|
||||
formatted_args = ["{0}".format(arg) for arg in args]
|
||||
return ', '.join(formatted_args)
|
||||
|
||||
|
||||
@ -889,56 +889,51 @@ class TriggerView(PGChildNodeView):
|
||||
tid: Table ID
|
||||
trid: Trigger ID
|
||||
"""
|
||||
try:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
tid=tid, trid=trid,
|
||||
datlastsysoid=self.datlastsysoid)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
tid=tid, trid=trid,
|
||||
datlastsysoid=self.datlastsysoid)
|
||||
|
||||
data = dict(res['rows'][0])
|
||||
# Adding parent into data dict, will be using it while creating sql
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
data = self.get_trigger_function_schema(data)
|
||||
data = dict(res['rows'][0])
|
||||
# Adding parent into data dict, will be using it while creating sql
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
|
||||
if len(data['custom_tgargs']) > 1:
|
||||
# We know that trigger has more than 1 argument, let's join them
|
||||
data['tgargs'] = self._format_args(data['custom_tgargs'])
|
||||
data = self.get_trigger_function_schema(data)
|
||||
|
||||
if len(data['tgattr']) > 1:
|
||||
columns = ', '.join(data['tgattr'].split(' '))
|
||||
data['columns'] = self._column_details(tid, columns)
|
||||
if len(data['custom_tgargs']) > 1:
|
||||
# We know that trigger has more than 1 argument, let's join them
|
||||
data['tgargs'] = self._format_args(data['custom_tgargs'])
|
||||
|
||||
data = self._trigger_definition(data)
|
||||
if len(data['tgattr']) > 1:
|
||||
columns = ', '.join(data['tgattr'].split(' '))
|
||||
data['columns'] = self._column_details(tid, columns)
|
||||
|
||||
SQL, name = self.get_sql(scid, tid, None, data)
|
||||
data = self._trigger_definition(data)
|
||||
|
||||
sql_header = "-- Trigger: {0}\n\n-- ".format(data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
SQL, name = self.get_sql(scid, tid, None, data)
|
||||
|
||||
sql_header += render_template("/".join([self.template_path,
|
||||
'delete.sql']),
|
||||
data=data, conn=self.conn)
|
||||
sql_header = u"-- Trigger: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
SQL = sql_header + '\n\n' + SQL.strip('\n')
|
||||
sql_header += render_template("/".join([self.template_path,
|
||||
'delete.sql']),
|
||||
data=data, conn=self.conn)
|
||||
|
||||
# If trigger is disbaled then add sql code for the same
|
||||
if not data['is_enable_trigger']:
|
||||
SQL += '\n\n'
|
||||
SQL += render_template("/".join([self.template_path,
|
||||
'enable_disable_trigger.sql']),
|
||||
data=data, conn=self.conn)
|
||||
SQL = sql_header + '\n\n' + SQL.strip('\n')
|
||||
|
||||
return ajax_response(response=SQL)
|
||||
# If trigger is disbaled then add sql code for the same
|
||||
if not data['is_enable_trigger']:
|
||||
SQL += '\n\n'
|
||||
SQL += render_template("/".join([self.template_path,
|
||||
'enable_disable_trigger.sql']),
|
||||
data=data, conn=self.conn)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
return ajax_response(response=SQL)
|
||||
|
||||
@check_precondition
|
||||
def enable_disable_trigger(self, gid, sid, did, scid, tid, trid):
|
||||
|
@ -1300,9 +1300,7 @@ class TypeView(PGChildNodeView, DataTypeReader):
|
||||
SQL, name = self.get_sql(gid, sid, data, scid, tid=None)
|
||||
|
||||
# We are appending headers here for sql panel
|
||||
sql_header = "-- Type: {0}\n\n-- ".format(data['name'])
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
sql_header = u"-- Type: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
sql_header += render_template("/".join([self.template_path,
|
||||
'delete.sql']),
|
||||
|
@ -875,8 +875,8 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
for trigger in data['rows']:
|
||||
SQL = render_template("/".join(
|
||||
[self.trigger_temp_path, 'sql/#{0}#/properties.sql'.format(self.manager.version)]),
|
||||
tid=trigger['oid'],
|
||||
tgrelid=vid
|
||||
tid=vid,
|
||||
trid=trigger['oid']
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
@ -894,6 +894,21 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
trigger['oid'], columns)
|
||||
res_rows = trigger_definition(res_rows)
|
||||
|
||||
res_rows['schema'] = res_rows['nspname']
|
||||
|
||||
# It should be relname and not table, but in create.sql
|
||||
# (which is referred from many places) we have used
|
||||
# data.table and not data.relname so compatibility add new key as
|
||||
# table in res_rows.
|
||||
res_rows['table'] = res_rows['relname']
|
||||
|
||||
res_rows['tfunction'] = self.qtIdent(self.conn, res_rows['schema'], res_rows['tfunction'])
|
||||
|
||||
# Format arguments
|
||||
if len(res_rows['custom_tgargs']) > 1:
|
||||
formatted_args = ["{0}".format(arg) for arg in res_rows['custom_tgargs']]
|
||||
res_rows['tgargs'] = ', '.join(formatted_args)
|
||||
|
||||
SQL = render_template("/".join(
|
||||
[self.trigger_temp_path, 'sql/#{0}#/create.sql'.format(self.manager.version)]),
|
||||
data=res_rows, display_comments=True)
|
||||
|
@ -534,16 +534,13 @@ class TablespaceView(PGChildNodeView):
|
||||
data=old_data, conn=self.conn
|
||||
)
|
||||
|
||||
sql_header = """
|
||||
sql_header = u"""
|
||||
-- Tablespace: {0}
|
||||
|
||||
-- DROP TABLESPACE {0};
|
||||
|
||||
""".format(old_data['name'])
|
||||
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
|
||||
SQL = sql_header + SQL
|
||||
SQL = re.sub('\n{2,}', '\n\n', SQL)
|
||||
return ajax_response(response=SQL.strip('\n'))
|
||||
|
@ -26,7 +26,7 @@ from flask import g, current_app, session
|
||||
from flask_babel import gettext
|
||||
from flask_security import current_user
|
||||
from pgadmin.utils.crypto import decrypt
|
||||
from psycopg2.extensions import adapt
|
||||
from psycopg2.extensions import adapt, encodings
|
||||
|
||||
import config
|
||||
from pgadmin.model import Server, User
|
||||
@ -37,6 +37,8 @@ from .cursor import DictCursor
|
||||
|
||||
if sys.version_info < (3,):
|
||||
from StringIO import StringIO
|
||||
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
|
||||
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
|
||||
else:
|
||||
from io import StringIO
|
||||
|
||||
@ -62,6 +64,50 @@ psycopg2.extensions.register_type(
|
||||
'TYPECAST_TO_STRING', psycopg2.STRING)
|
||||
)
|
||||
|
||||
|
||||
def register_string_typecasters(connection):
|
||||
if connection.encoding != 'UTF8':
|
||||
# In python3 when database encoding is other than utf-8 and client
|
||||
# encoding is set to UNICODE then we need to map data from database
|
||||
# encoding to utf-8.
|
||||
# This is required because when client encoding is set to UNICODE then
|
||||
# psycopg assumes database encoding utf-8 and not the actual encoding.
|
||||
# Not sure whether it's bug or feature in psycopg for python3.
|
||||
if sys.version_info >= (3,):
|
||||
def return_as_unicode(value, cursor):
|
||||
if value is None:
|
||||
return None
|
||||
# Treat value as byte sequence of database encoding and then
|
||||
# decode it as utf-8 to get correct unicode value.
|
||||
return bytes(
|
||||
value, encodings[cursor.connection.encoding]
|
||||
).decode('utf-8')
|
||||
|
||||
unicode_type = psycopg2.extensions.new_type(
|
||||
# "char", name, text, character, character varying
|
||||
(19, 18, 25, 1042, 1043, 0),
|
||||
'UNICODE', return_as_unicode)
|
||||
else:
|
||||
def return_as_unicode(value, cursor):
|
||||
if value is None:
|
||||
return None
|
||||
# Decode it as utf-8 to get correct unicode value.
|
||||
return value.decode('utf-8')
|
||||
|
||||
unicode_type = psycopg2.extensions.new_type(
|
||||
# "char", name, text, character, character varying
|
||||
(19, 18, 25, 1042, 1043, 0),
|
||||
'UNICODE', return_as_unicode)
|
||||
|
||||
unicode_array_type = psycopg2.extensions.new_array_type(
|
||||
# "char"[], name[], text[], character[], character varying[]
|
||||
(1002, 1003, 1009, 1014, 1015, 0
|
||||
), 'UNICODEARRAY', unicode_type)
|
||||
|
||||
psycopg2.extensions.register_type(unicode_type)
|
||||
psycopg2.extensions.register_type(unicode_array_type)
|
||||
|
||||
|
||||
class Connection(BaseConnection):
|
||||
"""
|
||||
class Connection(object)
|
||||
@ -336,6 +382,7 @@ Failed to connect to the database server(#{server_id}) for connection ({conn_id}
|
||||
else:
|
||||
self.conn.autocommit = True
|
||||
|
||||
register_string_typecasters(self.conn)
|
||||
status = _execute(cur, """
|
||||
SET DateStyle=ISO;
|
||||
SET client_min_messages=notice;
|
||||
@ -538,11 +585,17 @@ WHERE
|
||||
query: SQL query to run.
|
||||
params: Extra parameters
|
||||
"""
|
||||
|
||||
if sys.version_info < (3,):
|
||||
if type(query) == unicode:
|
||||
query = query.encode('utf-8')
|
||||
else:
|
||||
query = query.encode('utf-8')
|
||||
|
||||
cur.execute(query, params)
|
||||
if self.async == 1:
|
||||
self._wait(cur.connection)
|
||||
|
||||
|
||||
def execute_on_server_as_csv(self, query, params=None, formatted_exception_msg=False, records=2000):
|
||||
status, cur = self.__cursor(server_cursor=True)
|
||||
self.row_count = 0
|
||||
@ -551,11 +604,14 @@ WHERE
|
||||
return False, str(cur)
|
||||
query_id = random.randint(1, 9999999)
|
||||
|
||||
if sys.version_info < (3,) and type(query) == unicode:
|
||||
query = query.encode('utf-8')
|
||||
|
||||
current_app.logger.log(25,
|
||||
u"Execute (with server cursor) for server #{server_id} - {conn_id} (Query-id: {query_id}):\n{query}".format(
|
||||
server_id=self.manager.sid,
|
||||
conn_id=self.conn_id,
|
||||
query=query,
|
||||
query=query.decode('utf-8') if sys.version_info < (3,) else query,
|
||||
query_id=query_id
|
||||
)
|
||||
)
|
||||
@ -673,6 +729,13 @@ WHERE
|
||||
params: extra parameters to the function
|
||||
formatted_exception_msg: if True then function return the formatted exception message
|
||||
"""
|
||||
|
||||
if sys.version_info < (3,):
|
||||
if type(query) == unicode:
|
||||
query = query.encode('utf-8')
|
||||
else:
|
||||
query = query.encode('utf-8')
|
||||
|
||||
self.__async_cursor = None
|
||||
status, cur = self.__cursor()
|
||||
|
||||
@ -685,7 +748,7 @@ WHERE
|
||||
u"Execute (async) for server #{server_id} - {conn_id} (Query-id: {query_id}):\n{query}".format(
|
||||
server_id=self.manager.sid,
|
||||
conn_id=self.conn_id,
|
||||
query=query,
|
||||
query=query.decode('utf-8'),
|
||||
query_id=query_id
|
||||
)
|
||||
)
|
||||
@ -703,7 +766,7 @@ Failed to execute query (execute_async) for the server #{server_id} - {conn_id}
|
||||
""".format(
|
||||
server_id=self.manager.sid,
|
||||
conn_id=self.conn_id,
|
||||
query=query,
|
||||
query=query.decode('utf-8'),
|
||||
errmsg=errmsg,
|
||||
query_id=query_id
|
||||
)
|
||||
@ -1275,44 +1338,53 @@ Failed to reset the connection to the server due to following error:
|
||||
if not formatted_msg:
|
||||
return errmsg
|
||||
|
||||
errmsg += '********** Error **********\n\n'
|
||||
errmsg += u'********** Error **********\n\n'
|
||||
|
||||
if exception_obj.diag.severity is not None \
|
||||
and exception_obj.diag.message_primary is not None:
|
||||
errmsg += exception_obj.diag.severity + ": " + \
|
||||
exception_obj.diag.message_primary
|
||||
errmsg += u"{}: {}".format(
|
||||
exception_obj.diag.severity,
|
||||
exception_obj.diag.message_primary.decode('utf-8') if
|
||||
hasattr(str, 'decode') else exception_obj.diag.message_primary)
|
||||
|
||||
elif exception_obj.diag.message_primary is not None:
|
||||
errmsg += exception_obj.diag.message_primary
|
||||
errmsg += exception_obj.diag.message_primary.decode('utf-8') if \
|
||||
hasattr(str, 'decode') else exception_obj.diag.message_primary
|
||||
|
||||
if exception_obj.diag.sqlstate is not None:
|
||||
if not errmsg[:-1].endswith('\n'):
|
||||
errmsg += '\n'
|
||||
errmsg += gettext('SQL state: ')
|
||||
errmsg += exception_obj.diag.sqlstate
|
||||
errmsg += exception_obj.diag.sqlstate.decode('utf-8') if \
|
||||
hasattr(str, 'decode') else exception_obj.diag.sqlstate
|
||||
|
||||
if exception_obj.diag.message_detail is not None:
|
||||
if not errmsg[:-1].endswith('\n'):
|
||||
errmsg += '\n'
|
||||
errmsg += gettext('Detail: ')
|
||||
errmsg += exception_obj.diag.message_detail
|
||||
errmsg += exception_obj.diag.message_detail.decode('utf-8') if \
|
||||
hasattr(str, 'decode') else exception_obj.diag.message_detail
|
||||
|
||||
if exception_obj.diag.message_hint is not None:
|
||||
if not errmsg[:-1].endswith('\n'):
|
||||
errmsg += '\n'
|
||||
errmsg += gettext('Hint: ')
|
||||
errmsg += exception_obj.diag.message_hint
|
||||
errmsg += exception_obj.diag.message_hint.decode('utf-8') if \
|
||||
hasattr(str, 'decode') else exception_obj.diag.message_hint
|
||||
|
||||
if exception_obj.diag.statement_position is not None:
|
||||
if not errmsg[:-1].endswith('\n'):
|
||||
errmsg += '\n'
|
||||
errmsg += gettext('Character: ')
|
||||
errmsg += exception_obj.diag.statement_position
|
||||
errmsg += exception_obj.diag.statement_position.decode('utf-8') if \
|
||||
hasattr(str, 'decode') else exception_obj.diag.statement_position
|
||||
|
||||
if exception_obj.diag.context is not None:
|
||||
if not errmsg[:-1].endswith('\n'):
|
||||
errmsg += '\n'
|
||||
errmsg += gettext('Context: ')
|
||||
errmsg += exception_obj.diag.context
|
||||
errmsg += exception_obj.diag.context.decode('utf-8') if \
|
||||
hasattr(str, 'decode') else exception_obj.diag.context
|
||||
|
||||
return errmsg
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user