mirror of
https://github.com/pgadmin-org/pgadmin4.git
synced 2025-02-25 18:55:31 -06:00
Added Schema Diff tool to compare two schemas and generate the difference script.
Currently supported objects are Table, View, Materialized View, Function and Procedure. Backend comparison of two schemas implemented by: Akshay Joshi Fixes #3452.
This commit is contained in:
committed by
Akshay Joshi
parent
8b99a33e6e
commit
45f2e35a99
@@ -28,6 +28,7 @@ from pgadmin.model import db, Server, ServerGroup, User
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.utils.master_password import get_crypt_key
|
||||
from pgadmin.utils.exception import CryptKeyMissing
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from psycopg2 import Error as psycopg2_Error, OperationalError
|
||||
|
||||
|
||||
@@ -1627,4 +1628,5 @@ class ServerNode(PGChildNodeView):
|
||||
)
|
||||
|
||||
|
||||
SchemaDiffRegistry(blueprint.node_type, ServerNode)
|
||||
ServerNode.register_node_view(blueprint)
|
||||
|
||||
@@ -30,6 +30,8 @@ from pgadmin.utils.ajax import make_json_response, \
|
||||
make_response as ajax_response, internal_server_error, unauthorized
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.tools.sqleditor.utils.query_history import QueryHistory
|
||||
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.model import Server
|
||||
|
||||
|
||||
@@ -1111,4 +1113,5 @@ class DatabaseView(PGChildNodeView):
|
||||
)
|
||||
|
||||
|
||||
SchemaDiffRegistry(blueprint.node_type, DatabaseView)
|
||||
DatabaseView.register_node_view(blueprint)
|
||||
|
||||
@@ -23,6 +23,7 @@ from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone, bad_request
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
|
||||
"""
|
||||
This module is responsible for generating two nodes
|
||||
@@ -1023,5 +1024,6 @@ It may have been removed by another user.
|
||||
return ajax_response(response=SQL.strip("\n"))
|
||||
|
||||
|
||||
SchemaDiffRegistry(schema_blueprint.node_type, SchemaView)
|
||||
SchemaView.register_node_view(schema_blueprint)
|
||||
CatalogView.register_node_view(catalog_blueprint)
|
||||
|
||||
@@ -25,6 +25,8 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.utils.compile_template_name import compile_template_path
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
@@ -92,7 +94,7 @@ class CollationModule(SchemaChildModule):
|
||||
blueprint = CollationModule(__name__)
|
||||
|
||||
|
||||
class CollationView(PGChildNodeView):
|
||||
class CollationView(PGChildNodeView, SchemaDiffObjectCompare):
|
||||
"""
|
||||
This class is responsible for generating routes for Collation node
|
||||
|
||||
@@ -144,6 +146,10 @@ class CollationView(PGChildNodeView):
|
||||
* dependent(gid, sid, did, scid):
|
||||
- This function will generate dependent list to show it in dependent
|
||||
pane for the selected Collation node.
|
||||
|
||||
* compare(**kwargs):
|
||||
- This function will compare the collation nodes from two different
|
||||
schemas.
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
@@ -172,7 +178,8 @@ class CollationView(PGChildNodeView):
|
||||
'dependency': [{'get': 'dependencies'}],
|
||||
'dependent': [{'get': 'dependents'}],
|
||||
'get_collations': [{'get': 'get_collation'},
|
||||
{'get': 'get_collation'}]
|
||||
{'get': 'get_collation'}],
|
||||
'compare': [{'get': 'compare'}, {'get': 'compare'}]
|
||||
})
|
||||
|
||||
def check_precondition(f):
|
||||
@@ -318,23 +325,36 @@ class CollationView(PGChildNodeView):
|
||||
JSON of selected collation node
|
||||
"""
|
||||
|
||||
status, res = self._fetch_properties(scid, coid)
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, scid, coid):
|
||||
"""
|
||||
This function fetch the properties for the specified object.
|
||||
|
||||
:param scid: Schema ID
|
||||
:param coid: Collation ID
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
scid=scid, coid=coid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("Could not find the collation object in the database.")
|
||||
)
|
||||
return False, gone(gettext("Could not find the collation "
|
||||
"object in the database."))
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
)
|
||||
return True, res['rows'][0]
|
||||
|
||||
@check_precondition
|
||||
def get_collation(self, gid, sid, did, scid, coid=None):
|
||||
@@ -748,5 +768,30 @@ class CollationView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid):
|
||||
"""
|
||||
This function will fetch the list of all the collations for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'nodes.sql']), scid=scid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in rset['rows']:
|
||||
status, data = self._fetch_properties(scid, row['oid'])
|
||||
if status:
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
CollationView.register_node_view(blueprint)
|
||||
|
||||
@@ -27,6 +27,8 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.utils.compile_template_name import compile_template_path
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
@@ -79,7 +81,7 @@ class DomainModule(SchemaChildModule):
|
||||
blueprint = DomainModule(__name__)
|
||||
|
||||
|
||||
class DomainView(PGChildNodeView, DataTypeReader):
|
||||
class DomainView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
|
||||
"""
|
||||
class DomainView
|
||||
|
||||
@@ -138,6 +140,10 @@ class DomainView(PGChildNodeView, DataTypeReader):
|
||||
|
||||
* types(gid, sid, did, scid, fnid=None):
|
||||
- Returns Data Types.
|
||||
|
||||
* compare(**kwargs):
|
||||
- This function will compare the domain nodes from two different
|
||||
schemas.
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
@@ -169,7 +175,8 @@ class DomainView(PGChildNodeView, DataTypeReader):
|
||||
'get_collations': [
|
||||
{'get': 'get_collations'},
|
||||
{'get': 'get_collations'}
|
||||
]
|
||||
],
|
||||
'compare': [{'get': 'compare'}, {'get': 'compare'}]
|
||||
})
|
||||
|
||||
def validate_request(f):
|
||||
@@ -369,15 +376,31 @@ class DomainView(PGChildNodeView, DataTypeReader):
|
||||
scid: Schema Id
|
||||
doid: Domain Id
|
||||
"""
|
||||
status, res = self._fetch_properties(did, scid, doid)
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, did, scid, doid):
|
||||
"""
|
||||
This function is used to fecth the properties of specified object.
|
||||
:param did:
|
||||
:param scid:
|
||||
:param doid:
|
||||
:return:
|
||||
"""
|
||||
SQL = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
scid=scid, doid=doid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""
|
||||
return False, gone(gettext("""
|
||||
Could not find the domain in the database.
|
||||
It may have been removed by another user or moved to another schema.
|
||||
"""))
|
||||
@@ -393,7 +416,7 @@ It may have been removed by another user or moved to another schema.
|
||||
doid=doid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
data['constraints'] = res['rows']
|
||||
|
||||
@@ -406,10 +429,7 @@ It may have been removed by another user or moved to another schema.
|
||||
if doid <= self.manager.db_info[did]['datlastsysoid']:
|
||||
data['sysdomain'] = True
|
||||
|
||||
return ajax_response(
|
||||
response=data,
|
||||
status=200
|
||||
)
|
||||
return True, data
|
||||
|
||||
def _parse_type(self, basetype):
|
||||
"""
|
||||
@@ -664,7 +684,7 @@ AND relkind != 'c'))"""
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, scid, doid=None):
|
||||
def sql(self, gid, sid, did, scid, doid=None, return_ajax_response=True):
|
||||
"""
|
||||
Returns the SQL for the Domain object.
|
||||
|
||||
@@ -674,6 +694,7 @@ AND relkind != 'c'))"""
|
||||
did: Database Id
|
||||
scid: Schema Id
|
||||
doid: Domain Id
|
||||
return_ajax_response:
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
@@ -716,6 +737,9 @@ AND relkind != 'c'))"""
|
||||
""".format(self.qtIdent(self.conn, data['basensp'], data['name']))
|
||||
SQL = sql_header + SQL
|
||||
|
||||
if not return_ajax_response:
|
||||
return SQL.strip('\n')
|
||||
|
||||
return ajax_response(response=SQL.strip('\n'))
|
||||
|
||||
@check_precondition
|
||||
@@ -846,5 +870,40 @@ AND relkind != 'c'))"""
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid):
|
||||
"""
|
||||
This function will fetch the list of all the domains for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'node.sql']), scid=scid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in rset['rows']:
|
||||
status, data = self._fetch_properties(did, scid, row['oid'])
|
||||
|
||||
if status:
|
||||
if 'constraints' in data and len(data['constraints']) > 0:
|
||||
for item in data['constraints']:
|
||||
# Remove keys that should not be the part
|
||||
# of comparision.
|
||||
if 'conoid' in item:
|
||||
item.pop('conoid')
|
||||
if 'nspname' in item:
|
||||
item.pop('nspname')
|
||||
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
DomainView.register_node_view(blueprint)
|
||||
|
||||
@@ -8,7 +8,7 @@ JOIN
|
||||
JOIN
|
||||
pg_namespace nl ON nl.oid=typnamespace
|
||||
LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=t.oid AND des.classoid='pg_constraint'::regclass)
|
||||
pg_description des ON (des.objoid=c.oid AND des.classoid='pg_constraint'::regclass)
|
||||
WHERE
|
||||
contype = 'c' AND contypid = {{doid}}::oid
|
||||
ORDER BY
|
||||
|
||||
@@ -8,7 +8,7 @@ JOIN
|
||||
JOIN
|
||||
pg_namespace nl ON nl.oid=typnamespace
|
||||
LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=t.oid AND des.classoid='pg_constraint'::regclass)
|
||||
pg_description des ON (des.objoid=c.oid AND des.classoid='pg_constraint'::regclass)
|
||||
WHERE
|
||||
contype = 'c'
|
||||
AND contypid = {{doid}}::oid
|
||||
|
||||
@@ -32,6 +32,8 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.utils.compile_template_name import compile_template_path
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
@@ -93,7 +95,8 @@ class ForeignTableModule(SchemaChildModule):
|
||||
blueprint = ForeignTableModule(__name__)
|
||||
|
||||
|
||||
class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
class ForeignTableView(PGChildNodeView, DataTypeReader,
|
||||
SchemaDiffObjectCompare):
|
||||
"""
|
||||
class ForeignTableView(PGChildNodeView)
|
||||
|
||||
@@ -174,6 +177,9 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
* delete_sql(gid, sid, did, scid, foid):
|
||||
- Returns sql for Script
|
||||
|
||||
* compare(**kwargs):
|
||||
- This function will compare the foreign table nodes from two different
|
||||
schemas.
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
@@ -213,7 +219,8 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
'select_sql': [{'get': 'select_sql'}],
|
||||
'insert_sql': [{'get': 'insert_sql'}],
|
||||
'update_sql': [{'get': 'update_sql'}],
|
||||
'delete_sql': [{'get': 'delete_sql'}]
|
||||
'delete_sql': [{'get': 'delete_sql'}],
|
||||
'compare': [{'get': 'compare'}, {'get': 'compare'}]
|
||||
})
|
||||
|
||||
def validate_request(f):
|
||||
@@ -447,11 +454,9 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
scid: Schema Id
|
||||
foid: Foreign Table Id
|
||||
"""
|
||||
data = self._fetch_properties(gid, sid, did, scid, foid)
|
||||
if data is False:
|
||||
return gone(
|
||||
gettext("Could not find the foreign table on the server.")
|
||||
)
|
||||
status, data = self._fetch_properties(gid, sid, did, scid, foid)
|
||||
if not status:
|
||||
return data
|
||||
|
||||
return ajax_response(
|
||||
response=data,
|
||||
@@ -814,11 +819,10 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
scid: Schema Id
|
||||
foid: Foreign Table Id
|
||||
"""
|
||||
data = self._fetch_properties(gid, sid, did, scid, foid, inherits=True)
|
||||
if data is False:
|
||||
return gone(
|
||||
gettext("Could not find the foreign table on the server.")
|
||||
)
|
||||
status, data = self._fetch_properties(gid, sid, did, scid, foid,
|
||||
inherits=True)
|
||||
if not status:
|
||||
return data
|
||||
|
||||
col_data = []
|
||||
for c in data['columns']:
|
||||
@@ -891,12 +895,10 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
foid: Foreign Table Id
|
||||
"""
|
||||
if foid is not None:
|
||||
old_data = self._fetch_properties(gid, sid, did, scid, foid,
|
||||
inherits=True)
|
||||
if old_data is False:
|
||||
return gone(
|
||||
gettext("Could not find the foreign table on the server.")
|
||||
)
|
||||
status, old_data = self._fetch_properties(gid, sid, did, scid,
|
||||
foid, inherits=True)
|
||||
if not status:
|
||||
return old_data
|
||||
|
||||
# Prepare dict of columns with key = column's attnum
|
||||
# Will use this in the update template when any column is
|
||||
@@ -1051,10 +1053,10 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
scid=scid, foid=foid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return False
|
||||
return False, False
|
||||
|
||||
data = res['rows'][0]
|
||||
|
||||
@@ -1064,7 +1066,7 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
foid=foid)
|
||||
status, aclres = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=aclres)
|
||||
return False, internal_server_error(errormsg=aclres)
|
||||
|
||||
# Get Formatted Privileges
|
||||
data.update(self._format_proacl_from_db(aclres['rows']))
|
||||
@@ -1082,7 +1084,7 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
'get_constraints.sql']), foid=foid)
|
||||
status, cons = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=cons)
|
||||
return False, internal_server_error(errormsg=cons)
|
||||
|
||||
if cons and 'rows' in cons:
|
||||
data['constraints'] = cons['rows']
|
||||
@@ -1091,7 +1093,7 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
'get_columns.sql']), foid=foid)
|
||||
status, cols = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=cols)
|
||||
return False, internal_server_error(errormsg=cols)
|
||||
|
||||
# The Length and the precision of the Datatype should be separated.
|
||||
# The Format we getting from database is: numeric(1,1)
|
||||
@@ -1128,12 +1130,12 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if 'inherits' in res['rows'][0]:
|
||||
data['inherits'] = res['rows'][0]['inherits']
|
||||
|
||||
return data
|
||||
return True, data
|
||||
|
||||
@staticmethod
|
||||
def convert_precision_to_int(typlen):
|
||||
@@ -1222,11 +1224,9 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
Returns:
|
||||
SELECT Script sql for the object
|
||||
"""
|
||||
data = self._fetch_properties(gid, sid, did, scid, foid)
|
||||
if data is False:
|
||||
return gone(
|
||||
gettext("Could not find the foreign table on the server.")
|
||||
)
|
||||
status, data = self._fetch_properties(gid, sid, did, scid, foid)
|
||||
if not status:
|
||||
return data
|
||||
|
||||
columns = []
|
||||
for c in data['columns']:
|
||||
@@ -1259,11 +1259,9 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
Returns:
|
||||
INSERT Script sql for the object
|
||||
"""
|
||||
data = self._fetch_properties(gid, sid, did, scid, foid)
|
||||
if data is False:
|
||||
return gone(
|
||||
gettext("Could not find the foreign table on the server.")
|
||||
)
|
||||
status, data = self._fetch_properties(gid, sid, did, scid, foid)
|
||||
if not status:
|
||||
return data
|
||||
|
||||
columns = []
|
||||
values = []
|
||||
@@ -1301,11 +1299,9 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
Returns:
|
||||
UPDATE Script sql for the object
|
||||
"""
|
||||
data = self._fetch_properties(gid, sid, did, scid, foid)
|
||||
if data is False:
|
||||
return gone(
|
||||
gettext("Could not find the foreign table on the server.")
|
||||
)
|
||||
status, data = self._fetch_properties(gid, sid, did, scid, foid)
|
||||
if not status:
|
||||
return data
|
||||
|
||||
columns = []
|
||||
|
||||
@@ -1346,11 +1342,9 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
Returns:
|
||||
DELETE Script sql for the object
|
||||
"""
|
||||
data = self._fetch_properties(gid, sid, did, scid, foid)
|
||||
if data is False:
|
||||
return gone(
|
||||
gettext("Could not find the foreign table on the server.")
|
||||
)
|
||||
status, data = self._fetch_properties(gid, sid, did, scid, foid)
|
||||
if not status:
|
||||
return data
|
||||
|
||||
sql = u"DELETE FROM {0}\n\tWHERE <condition>;".format(
|
||||
self.qtIdent(self.conn, data['basensp'], data['name'])
|
||||
@@ -1358,5 +1352,37 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
|
||||
return ajax_response(response=sql)
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid):
|
||||
"""
|
||||
This function will fetch the list of all the foreign tables for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'node.sql']), scid=scid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in rset['rows']:
|
||||
status, data = self._fetch_properties(0, sid, did, scid,
|
||||
row['oid'])
|
||||
if status:
|
||||
if 'constraints' in data and data['constraints'] is not None \
|
||||
and len(data['constraints']) > 0:
|
||||
for item in data['constraints']:
|
||||
if 'conoid' in item:
|
||||
item.pop('conoid')
|
||||
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
ForeignTableView.register_node_view(blueprint)
|
||||
|
||||
@@ -24,6 +24,8 @@ from pgadmin.utils import IS_PY2
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
@@ -93,7 +95,7 @@ class FtsConfigurationModule(SchemaChildModule):
|
||||
blueprint = FtsConfigurationModule(__name__)
|
||||
|
||||
|
||||
class FtsConfigurationView(PGChildNodeView):
|
||||
class FtsConfigurationView(PGChildNodeView, SchemaDiffObjectCompare):
|
||||
"""
|
||||
class FtsConfigurationView(PGChildNodeView)
|
||||
|
||||
@@ -167,6 +169,9 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
* dependencies(self, gid, sid, did, scid, cfgid):
|
||||
- This function get the dependencies and return ajax response for node.
|
||||
|
||||
* compare(**kwargs):
|
||||
- This function will compare the fts configuration nodes from two
|
||||
different schemas.
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
@@ -202,6 +207,7 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
{'get': 'copyConfig'}],
|
||||
'tokens': [{'get': 'tokens'}, {'get': 'tokens'}],
|
||||
'dictionaries': [{}, {'get': 'dictionaries'}],
|
||||
'compare': [{'get': 'compare'}, {'get': 'compare'}]
|
||||
})
|
||||
|
||||
def _init_(self, **kwargs):
|
||||
@@ -343,7 +349,22 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
scid: Schema Id
|
||||
cfgid: fts Configuration id
|
||||
"""
|
||||
status, res = self._fetch_properties(scid, cfgid)
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, scid, cfgid):
|
||||
"""
|
||||
This function is used to fetch property of specified object.
|
||||
:param scid:
|
||||
:param cfgid:
|
||||
:return:
|
||||
"""
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
scid=scid,
|
||||
@@ -352,10 +373,10 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
return False, gone(
|
||||
_(
|
||||
"Could not find the FTS Configuration node in the "
|
||||
"database node.")
|
||||
@@ -370,14 +391,11 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
status, rset = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
return False, internal_server_error(errormsg=rset)
|
||||
|
||||
res['rows'][0]['tokens'] = rset['rows']
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
)
|
||||
return True, res['rows'][0]
|
||||
|
||||
@check_precondition
|
||||
def create(self, gid, sid, did, scid):
|
||||
@@ -927,5 +945,30 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid):
|
||||
"""
|
||||
This function will fetch the list of all the fts configurations for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'nodes.sql']), scid=scid)
|
||||
status, fts_cfg = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in fts_cfg['rows']:
|
||||
status, data = self._fetch_properties(scid, row['oid'])
|
||||
if status:
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
FtsConfigurationView.register_node_view(blueprint)
|
||||
|
||||
@@ -24,6 +24,8 @@ from pgadmin.utils import IS_PY2
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
@@ -91,7 +93,7 @@ class FtsDictionaryModule(SchemaChildModule):
|
||||
blueprint = FtsDictionaryModule(__name__)
|
||||
|
||||
|
||||
class FtsDictionaryView(PGChildNodeView):
|
||||
class FtsDictionaryView(PGChildNodeView, SchemaDiffObjectCompare):
|
||||
"""
|
||||
class FtsDictionaryView(PGChildNodeView)
|
||||
|
||||
@@ -159,6 +161,9 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
* dependencies(self, gid, sid, did, scid, dcid):
|
||||
- This function get the dependencies and return ajax response for node.
|
||||
|
||||
* compare(**kwargs):
|
||||
- This function will compare the fts dictionaries nodes from two
|
||||
different schemas.
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
@@ -189,7 +194,7 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
'dependency': [{'get': 'dependencies'}],
|
||||
'dependent': [{'get': 'dependents'}],
|
||||
'fetch_templates': [{'get': 'fetch_templates'},
|
||||
{'get': 'fetch_templates'}],
|
||||
{'get': 'fetch_templates'}]
|
||||
})
|
||||
|
||||
def _init_(self, **kwargs):
|
||||
@@ -353,7 +358,23 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
scid: Schema Id
|
||||
dcid: fts dictionary id
|
||||
"""
|
||||
status, res = self._fetch_properties(scid, dcid)
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, scid, dcid):
|
||||
"""
|
||||
This function is used to fetch the properties of specified object.
|
||||
|
||||
:param scid:
|
||||
:param dcid:
|
||||
:return:
|
||||
"""
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
scid=scid,
|
||||
@@ -362,10 +383,10 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_(
|
||||
return False, gone(_(
|
||||
"Could not find the FTS Dictionary node in the database node."
|
||||
))
|
||||
|
||||
@@ -382,10 +403,7 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
res['rows'][0]['options']
|
||||
)
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
)
|
||||
return True, res['rows'][0]
|
||||
|
||||
@check_precondition
|
||||
def create(self, gid, sid, did, scid):
|
||||
@@ -854,5 +872,30 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid):
|
||||
"""
|
||||
This function will fetch the list of all the fts dictionaries for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'nodes.sql']), scid=scid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in rset['rows']:
|
||||
status, data = self._fetch_properties(scid, row['oid'])
|
||||
if status:
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
FtsDictionaryView.register_node_view(blueprint)
|
||||
|
||||
@@ -24,6 +24,8 @@ from pgadmin.utils import IS_PY2
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
@@ -84,7 +86,7 @@ class FtsParserModule(SchemaChildModule):
|
||||
blueprint = FtsParserModule(__name__)
|
||||
|
||||
|
||||
class FtsParserView(PGChildNodeView):
|
||||
class FtsParserView(PGChildNodeView, SchemaDiffObjectCompare):
|
||||
"""
|
||||
class FtsParserView(PGChildNodeView)
|
||||
|
||||
@@ -161,6 +163,9 @@ class FtsParserView(PGChildNodeView):
|
||||
- This function get the dependencies and return ajax response for
|
||||
FTS Parser node.
|
||||
|
||||
* compare(**kwargs):
|
||||
- This function will compare the fts parser nodes from two
|
||||
different schemas.
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
@@ -198,7 +203,7 @@ class FtsParserView(PGChildNodeView):
|
||||
'lextype_functions': [{'get': 'lextype_functions'},
|
||||
{'get': 'lextype_functions'}],
|
||||
'headline_functions': [{'get': 'headline_functions'},
|
||||
{'get': 'headline_functions'}],
|
||||
{'get': 'headline_functions'}]
|
||||
})
|
||||
|
||||
def _init_(self, **kwargs):
|
||||
@@ -303,6 +308,32 @@ class FtsParserView(PGChildNodeView):
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, scid, pid):
|
||||
"""
|
||||
|
||||
:param gid:
|
||||
:param sid:
|
||||
:param did:
|
||||
:param scid:
|
||||
:param pid:
|
||||
:return:
|
||||
"""
|
||||
status, res = self._fetch_properties(scid, pid)
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, scid, pid):
|
||||
"""
|
||||
This function is used to fetch the properties of specified object.
|
||||
|
||||
:param scid:
|
||||
:param pid:
|
||||
:return:
|
||||
"""
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
scid=scid,
|
||||
@@ -311,16 +342,13 @@ class FtsParserView(PGChildNodeView):
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
return False, gone(
|
||||
_("Could not find the FTS Parser node in the database node."))
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
)
|
||||
return True, res['rows'][0]
|
||||
|
||||
@check_precondition
|
||||
def create(self, gid, sid, did, scid):
|
||||
@@ -862,5 +890,30 @@ class FtsParserView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid):
|
||||
"""
|
||||
This function will fetch the list of all the fts parsers for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'nodes.sql']), scid=scid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in rset['rows']:
|
||||
status, data = self._fetch_properties(scid, row['oid'])
|
||||
if status:
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
FtsParserView.register_node_view(blueprint)
|
||||
|
||||
@@ -24,6 +24,8 @@ from pgadmin.utils import IS_PY2
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
@@ -90,7 +92,7 @@ class FtsTemplateModule(SchemaChildModule):
|
||||
blueprint = FtsTemplateModule(__name__)
|
||||
|
||||
|
||||
class FtsTemplateView(PGChildNodeView):
|
||||
class FtsTemplateView(PGChildNodeView, SchemaDiffObjectCompare):
|
||||
"""
|
||||
class FtsTemplateView(PGChildNodeView)
|
||||
|
||||
@@ -154,6 +156,9 @@ class FtsTemplateView(PGChildNodeView):
|
||||
- This function get the dependencies and return ajax response for the
|
||||
FTS Template node.
|
||||
|
||||
* compare(**kwargs):
|
||||
- This function will compare the fts template nodes from two
|
||||
different schemas.
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
@@ -184,7 +189,7 @@ class FtsTemplateView(PGChildNodeView):
|
||||
'dependency': [{'get': 'dependencies'}],
|
||||
'dependent': [{'get': 'dependents'}],
|
||||
'get_lexize': [{'get': 'get_lexize'}, {'get': 'get_lexize'}],
|
||||
'get_init': [{'get': 'get_init'}, {'get': 'get_init'}],
|
||||
'get_init': [{'get': 'get_init'}, {'get': 'get_init'}]
|
||||
})
|
||||
|
||||
def _init_(self, **kwargs):
|
||||
@@ -281,25 +286,47 @@ class FtsTemplateView(PGChildNodeView):
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, scid, tid):
|
||||
"""
|
||||
|
||||
:param gid:
|
||||
:param sid:
|
||||
:param did:
|
||||
:param scid:
|
||||
:param tid:
|
||||
:return:
|
||||
"""
|
||||
status, res = self._fetch_properties(scid, tid)
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, scid, tid):
|
||||
"""
|
||||
This function is used to fetch the properties of specified object.
|
||||
|
||||
:param scid:
|
||||
:param pid:
|
||||
:return:
|
||||
"""
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
scid=scid,
|
||||
tid=tid
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
return False, gone(
|
||||
gettext("Could not find the requested FTS template.")
|
||||
)
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
)
|
||||
return True, res['rows'][0]
|
||||
|
||||
@check_precondition
|
||||
def create(self, gid, sid, did, scid):
|
||||
@@ -734,5 +761,30 @@ class FtsTemplateView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid):
|
||||
"""
|
||||
This function will fetch the list of all the fts templates for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'nodes.sql']), scid=scid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in rset['rows']:
|
||||
status, data = self._fetch_properties(scid, row['oid'])
|
||||
if status:
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
FtsTemplateView.register_node_view(blueprint)
|
||||
|
||||
@@ -21,7 +21,7 @@ from flask import render_template, make_response, request, jsonify, \
|
||||
current_app
|
||||
from flask_babelex import gettext
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.utils import \
|
||||
SchemaChildModule, DataTypeReader
|
||||
SchemaChildModule, DataTypeReader, get_schema
|
||||
from pgadmin.browser.server_groups.servers.databases.utils import \
|
||||
parse_sec_labels_from_db, parse_variables_from_db
|
||||
from pgadmin.browser.server_groups.servers.utils import parse_priv_from_db, \
|
||||
@@ -30,8 +30,10 @@ from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.utils.driver import get_driver
|
||||
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.model import SchemaDiffModel
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
|
||||
class FunctionModule(SchemaChildModule):
|
||||
@@ -115,7 +117,7 @@ class FunctionModule(SchemaChildModule):
|
||||
blueprint = FunctionModule(__name__)
|
||||
|
||||
|
||||
class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
class FunctionView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
|
||||
"""
|
||||
class FunctionView(PGChildNodeView)
|
||||
|
||||
@@ -177,6 +179,10 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
|
||||
* exec_sql(gid, sid, did, scid, fnid):
|
||||
- Returns sql for Script
|
||||
|
||||
* compare(**kwargs):
|
||||
- This function will compare the function nodes from two
|
||||
different schemas.
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
@@ -213,6 +219,9 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
{'get': 'get_support_functions'}]
|
||||
})
|
||||
|
||||
keys_to_ignore = ['oid', 'proowner', 'typnsp', 'xmin', 'prokind',
|
||||
'proisagg', 'pronamespace', 'proargdefaults']
|
||||
|
||||
@property
|
||||
def required_args(self):
|
||||
"""
|
||||
@@ -790,7 +799,7 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, scid, fnid=None):
|
||||
def delete(self, gid, sid, did, scid, fnid=None, only_sql=False):
|
||||
"""
|
||||
Drop the Function.
|
||||
|
||||
@@ -841,6 +850,8 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
func_args=res['rows'][0]['func_args'],
|
||||
nspname=res['rows'][0]['nspname'],
|
||||
cascade=cascade)
|
||||
if only_sql:
|
||||
return SQL
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@@ -915,7 +926,8 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, scid, fnid=None):
|
||||
def sql(self, gid, sid, did, scid, fnid=None, diff_schema=None,
|
||||
json_resp=True):
|
||||
"""
|
||||
Returns the SQL for the Function object.
|
||||
|
||||
@@ -989,6 +1001,8 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if diff_schema:
|
||||
res['rows'][0]['nspname'] = diff_schema
|
||||
name_with_default_args = self.qtIdent(
|
||||
self.conn,
|
||||
res['rows'][0]['nspname'],
|
||||
@@ -1040,6 +1054,10 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if diff_schema:
|
||||
res['rows'][0]['nspname'] = diff_schema
|
||||
resp_data['pronamespace'] = diff_schema
|
||||
|
||||
name_with_default_args = self.qtIdent(
|
||||
self.conn,
|
||||
res['rows'][0]['nspname'],
|
||||
@@ -1071,6 +1089,9 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
resp_data['proname']),
|
||||
resp_data['proargtypenames'].lstrip('(').rstrip(')'))
|
||||
|
||||
if not json_resp:
|
||||
return re.sub('\n{2,}', '\n\n', func_def)
|
||||
|
||||
SQL = sql_header + func_def
|
||||
SQL = re.sub('\n{2,}', '\n\n', SQL)
|
||||
|
||||
@@ -1597,7 +1618,66 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
status=200
|
||||
)
|
||||
|
||||
def get_sql_from_diff(self, gid, sid, did, scid, oid, data=None,
|
||||
diff_schema=None, drop_sql=False):
|
||||
sql = ''
|
||||
if data:
|
||||
if diff_schema:
|
||||
data['schema'] = diff_schema
|
||||
status, sql = self._get_sql(gid, sid, did, scid, data, oid)
|
||||
else:
|
||||
if drop_sql:
|
||||
sql = self.delete(gid=gid, sid=sid, did=did,
|
||||
scid=scid, fnid=oid, only_sql=True)
|
||||
elif diff_schema:
|
||||
sql = self.sql(gid=gid, sid=sid, did=did, scid=scid, fnid=oid,
|
||||
diff_schema=diff_schema, json_resp=False)
|
||||
else:
|
||||
sql = self.sql(gid=gid, sid=sid, did=did, scid=scid, fnid=oid,
|
||||
json_resp=False)
|
||||
return sql
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid, oid=None):
|
||||
"""
|
||||
This function will fetch the list of all the functions for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
server_type = self.manager.server_type
|
||||
server_version = self.manager.sversion
|
||||
|
||||
if server_type == 'pg' and self.blueprint.min_ver is not None and \
|
||||
server_version < self.blueprint.min_ver:
|
||||
return res
|
||||
if server_type == 'ppas' and self.blueprint.min_ppasver is not None \
|
||||
and server_version < self.blueprint.min_ppasver:
|
||||
return res
|
||||
|
||||
if not oid:
|
||||
SQL = render_template("/".join([self.sql_template_path,
|
||||
'node.sql']), scid=scid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in rset['rows']:
|
||||
data = self._fetch_properties(0, sid, did, scid, row['oid'])
|
||||
if isinstance(data, dict):
|
||||
res[row['name']] = data
|
||||
else:
|
||||
data = self._fetch_properties(0, sid, did, scid, oid)
|
||||
res = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
SchemaDiffRegistry(blueprint.node_type, FunctionView)
|
||||
FunctionView.register_node_view(blueprint)
|
||||
|
||||
|
||||
@@ -1698,6 +1778,7 @@ class ProcedureView(FunctionView):
|
||||
'prosrc']
|
||||
|
||||
|
||||
SchemaDiffRegistry(procedure_blueprint.node_type, ProcedureView)
|
||||
ProcedureView.register_node_view(procedure_blueprint)
|
||||
|
||||
|
||||
@@ -1796,4 +1877,5 @@ class TriggerFunctionView(FunctionView):
|
||||
'prosrc']
|
||||
|
||||
|
||||
SchemaDiffRegistry(trigger_function_blueprint.node_type, TriggerFunctionView)
|
||||
TriggerFunctionView.register_node_view(trigger_function_blueprint)
|
||||
|
||||
@@ -27,6 +27,8 @@ from pgadmin.utils.ajax import make_json_response, \
|
||||
make_response as ajax_response, internal_server_error, \
|
||||
precondition_required, gone
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
@@ -83,7 +85,7 @@ class PackageModule(SchemaChildModule):
|
||||
blueprint = PackageModule(__name__)
|
||||
|
||||
|
||||
class PackageView(PGChildNodeView):
|
||||
class PackageView(PGChildNodeView, SchemaDiffObjectCompare):
|
||||
node_type = blueprint.node_type
|
||||
|
||||
parent_ids = [
|
||||
@@ -111,6 +113,8 @@ class PackageView(PGChildNodeView):
|
||||
'dependent': [{'get': 'dependents'}]
|
||||
})
|
||||
|
||||
keys_to_ignore = ['oid', 'schema', 'xmin']
|
||||
|
||||
def check_precondition(action=None):
|
||||
"""
|
||||
This function will behave as a decorator which will checks
|
||||
@@ -297,16 +301,32 @@ class PackageView(PGChildNodeView):
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
status, res = self._fetch_properties(scid, pkgid)
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, scid, pkgid):
|
||||
"""
|
||||
This function is used to fetch the properties of specified object.
|
||||
:param scid:
|
||||
:param pkgid:
|
||||
:return:
|
||||
"""
|
||||
SQL = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
scid=scid, pkgid=pkgid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
return False, gone(
|
||||
errormsg=_("Could not find the package in the database.")
|
||||
)
|
||||
|
||||
@@ -321,16 +341,13 @@ class PackageView(PGChildNodeView):
|
||||
status, rset1 = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset1)
|
||||
return False, internal_server_error(errormsg=rset1)
|
||||
|
||||
for row in rset1['rows']:
|
||||
priv = parse_priv_from_db(row)
|
||||
res['rows'][0].setdefault(row['deftype'], []).append(priv)
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
)
|
||||
return True, res['rows'][0]
|
||||
|
||||
@check_precondition(action="create")
|
||||
def create(self, gid, sid, did, scid):
|
||||
@@ -396,7 +413,7 @@ class PackageView(PGChildNodeView):
|
||||
)
|
||||
|
||||
@check_precondition(action='delete')
|
||||
def delete(self, gid, sid, did, scid, pkgid=None):
|
||||
def delete(self, gid, sid, did, scid, pkgid=None, only_sql=False):
|
||||
"""
|
||||
This function will drop the object
|
||||
|
||||
@@ -453,6 +470,9 @@ class PackageView(PGChildNodeView):
|
||||
data=res['rows'][0],
|
||||
cascade=cascade)
|
||||
|
||||
if only_sql:
|
||||
return SQL
|
||||
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@@ -552,7 +572,8 @@ class PackageView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
def getSQL(self, gid, sid, did, data, scid, pkgid=None, sqltab=False):
|
||||
def getSQL(self, gid, sid, did, data, scid, pkgid=None, sqltab=False,
|
||||
diff_schema=None):
|
||||
"""
|
||||
This function will generate sql from model data.
|
||||
|
||||
@@ -621,6 +642,9 @@ class PackageView(PGChildNodeView):
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
|
||||
if diff_schema:
|
||||
data['schema'] = diff_schema
|
||||
|
||||
SQL = render_template("/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data, conn=self.conn)
|
||||
return SQL, data['name'] if 'name' in data else old_data['name']
|
||||
@@ -635,7 +659,8 @@ class PackageView(PGChildNodeView):
|
||||
return SQL, data['name']
|
||||
|
||||
@check_precondition(action="sql")
|
||||
def sql(self, gid, sid, did, scid, pkgid):
|
||||
def sql(self, gid, sid, did, scid, pkgid, diff_schema=None,
|
||||
json_resp=True):
|
||||
"""
|
||||
This function will generate sql for sql panel
|
||||
|
||||
@@ -645,6 +670,8 @@ class PackageView(PGChildNodeView):
|
||||
did: Database ID
|
||||
scid: Schema ID
|
||||
pkgid: Package ID
|
||||
diff_schema: Schema diff target schema name
|
||||
json_resp: json response or plain text response
|
||||
"""
|
||||
try:
|
||||
SQL = render_template(
|
||||
@@ -676,13 +703,18 @@ class PackageView(PGChildNodeView):
|
||||
res['rows'][0].setdefault(row['deftype'], []).append(priv)
|
||||
|
||||
result = res['rows'][0]
|
||||
sql, name = self.getSQL(gid, sid, did, result, scid, pkgid, True)
|
||||
sql, name = self.getSQL(gid, sid, did, result, scid, pkgid, True,
|
||||
diff_schema)
|
||||
# Most probably this is due to error
|
||||
if not isinstance(sql, (str, unicode)):
|
||||
return sql
|
||||
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
|
||||
# Return sql for schema diff
|
||||
if not json_resp:
|
||||
return sql
|
||||
|
||||
sql_header = u"-- Package: {}\n\n-- ".format(
|
||||
self.qtIdent(self.conn, self.schema, result['name'])
|
||||
)
|
||||
@@ -756,5 +788,54 @@ class PackageView(PGChildNodeView):
|
||||
|
||||
return sql[start:end].strip("\n")
|
||||
|
||||
@check_precondition(action="fetch_objects_to_compare")
|
||||
def fetch_objects_to_compare(self, sid, did, scid):
|
||||
"""
|
||||
This function will fetch the list of all the packages for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
if self.manager.server_type != 'ppas':
|
||||
return res
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'nodes.sql']), scid=scid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in rset['rows']:
|
||||
status, data = self._fetch_properties(scid, row['oid'])
|
||||
if status:
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
def get_sql_from_diff(self, gid, sid, did, scid, oid, data=None,
|
||||
diff_schema=None, drop_sql=False):
|
||||
sql = ''
|
||||
if data:
|
||||
if diff_schema:
|
||||
data['schema'] = diff_schema
|
||||
status, sql = self.getSQL(gid, sid, did, data, scid, oid)
|
||||
else:
|
||||
if drop_sql:
|
||||
sql = self.delete(gid=gid, sid=sid, did=did,
|
||||
scid=scid, pkgid=oid, only_sql=True)
|
||||
|
||||
elif diff_schema:
|
||||
sql = self.sql(gid=gid, sid=sid, did=did, scid=scid, pkgid=oid,
|
||||
diff_schema=diff_schema, json_resp=False)
|
||||
else:
|
||||
sql = self.sql(gid=gid, sid=sid, did=did, scid=scid, pkgid=oid,
|
||||
json_resp=False)
|
||||
return sql
|
||||
|
||||
|
||||
SchemaDiffRegistry(blueprint.node_type, PackageView)
|
||||
PackageView.register_node_view(blueprint)
|
||||
|
||||
@@ -161,6 +161,8 @@ class EdbFuncView(PGChildNodeView, DataTypeReader):
|
||||
* dependencies(gid, sid, did, scid, pkgid, edbfnid):
|
||||
- Returns the dependencies for the Functions object.
|
||||
|
||||
* compare(**kwargs):
|
||||
- This function will compare the nodes from two different schemas.
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
@@ -184,7 +186,8 @@ class EdbFuncView(PGChildNodeView, DataTypeReader):
|
||||
'nodes': [{'get': 'nodes'}, {'get': 'nodes'}],
|
||||
'sql': [{'get': 'sql'}],
|
||||
'dependency': [{'get': 'dependencies'}],
|
||||
'dependent': [{'get': 'dependents'}]
|
||||
'dependent': [{'get': 'dependents'}],
|
||||
'compare': [{'get': 'compare'}, {'get': 'compare'}]
|
||||
})
|
||||
|
||||
def check_precondition(f):
|
||||
|
||||
@@ -137,6 +137,8 @@ class EdbVarView(PGChildNodeView, DataTypeReader):
|
||||
* sql(gid, sid, did, scid, pkgid, varid):
|
||||
- Returns the SQL for the Functions object.
|
||||
|
||||
* compare(**kwargs):
|
||||
- This function will compare the nodes from two different schemas.
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
@@ -158,7 +160,8 @@ class EdbVarView(PGChildNodeView, DataTypeReader):
|
||||
{'get': 'list'}
|
||||
],
|
||||
'nodes': [{'get': 'nodes'}, {'get': 'nodes'}],
|
||||
'sql': [{'get': 'sql'}]
|
||||
'sql': [{'get': 'sql'}],
|
||||
'compare': [{'get': 'compare'}, {'get': 'compare'}]
|
||||
})
|
||||
|
||||
def check_precondition(f):
|
||||
|
||||
@@ -24,6 +24,9 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.utils import IS_PY2
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
unicode = str
|
||||
@@ -88,7 +91,7 @@ class SequenceModule(SchemaChildModule):
|
||||
blueprint = SequenceModule(__name__)
|
||||
|
||||
|
||||
class SequenceView(PGChildNodeView):
|
||||
class SequenceView(PGChildNodeView, SchemaDiffObjectCompare):
|
||||
node_type = blueprint.node_type
|
||||
|
||||
parent_ids = [
|
||||
@@ -273,6 +276,23 @@ class SequenceView(PGChildNodeView):
|
||||
Returns:
|
||||
|
||||
"""
|
||||
status, res = self._fetch_properties(scid, seid)
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, scid, seid):
|
||||
"""
|
||||
This function is used to fetch the properties of the specified object.
|
||||
:param scid:
|
||||
:param seid:
|
||||
:return:
|
||||
"""
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
scid=scid, seid=seid
|
||||
@@ -280,10 +300,11 @@ class SequenceView(PGChildNodeView):
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("Could not find the sequence in the database."))
|
||||
return False, gone(
|
||||
_("Could not find the sequence in the database."))
|
||||
|
||||
for row in res['rows']:
|
||||
SQL = render_template(
|
||||
@@ -292,7 +313,7 @@ class SequenceView(PGChildNodeView):
|
||||
)
|
||||
status, rset1 = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset1)
|
||||
return False, internal_server_error(errormsg=rset1)
|
||||
|
||||
row['current_value'] = rset1['rows'][0]['last_value']
|
||||
row['minimum'] = rset1['rows'][0]['min_value']
|
||||
@@ -319,7 +340,7 @@ class SequenceView(PGChildNodeView):
|
||||
)
|
||||
status, dataclres = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
for row in dataclres['rows']:
|
||||
priv = parse_priv_from_db(row)
|
||||
@@ -328,10 +349,7 @@ class SequenceView(PGChildNodeView):
|
||||
else:
|
||||
res['rows'][0][row['deftype']] = [priv]
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
)
|
||||
return True, res['rows'][0]
|
||||
|
||||
@check_precondition(action="create")
|
||||
def create(self, gid, sid, did, scid):
|
||||
@@ -869,5 +887,30 @@ class SequenceView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition(action="fetch_objects_to_compare")
|
||||
def fetch_objects_to_compare(self, sid, did, scid):
|
||||
"""
|
||||
This function will fetch the list of all the sequences for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'nodes.sql']), scid=scid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in rset['rows']:
|
||||
status, data = self._fetch_properties(scid, row['oid'])
|
||||
if status:
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
SequenceView.register_node_view(blueprint)
|
||||
|
||||
@@ -24,6 +24,8 @@ from pgadmin.utils.ajax import precondition_required
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.utils import IS_PY2
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
@@ -91,7 +93,7 @@ class SynonymModule(SchemaChildModule):
|
||||
blueprint = SynonymModule(__name__)
|
||||
|
||||
|
||||
class SynonymView(PGChildNodeView):
|
||||
class SynonymView(PGChildNodeView, SchemaDiffObjectCompare):
|
||||
"""
|
||||
This class is responsible for generating routes for Synonym node
|
||||
|
||||
@@ -143,6 +145,10 @@ class SynonymView(PGChildNodeView):
|
||||
* dependent(gid, sid, did, scid):
|
||||
- This function will generate dependent list to show it in dependent
|
||||
pane for the selected Synonym node.
|
||||
|
||||
* compare(**kwargs):
|
||||
- This function will compare the synonyms nodes from two
|
||||
different schemas.
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
@@ -385,26 +391,36 @@ class SynonymView(PGChildNodeView):
|
||||
Returns:
|
||||
JSON of selected synonym node
|
||||
"""
|
||||
status, res = self._fetch_properties(scid, syid)
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, scid, syid):
|
||||
"""
|
||||
This function is used to fetch the properties of the specified object
|
||||
:param scid:
|
||||
:param syid:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
scid=scid, syid=syid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) > 0:
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
)
|
||||
else:
|
||||
return gone(
|
||||
if len(res['rows']) == 0:
|
||||
return False, gone(
|
||||
gettext('The specified synonym could not be found.')
|
||||
)
|
||||
|
||||
return True, res['rows'][0]
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@@ -707,5 +723,33 @@ class SynonymView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid):
|
||||
"""
|
||||
This function will fetch the list of all the synonyms for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
if self.manager.server_type != 'ppas':
|
||||
return res
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']), scid=scid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in rset['rows']:
|
||||
status, data = self._fetch_properties(scid, row['name'])
|
||||
if status:
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
SynonymView.register_node_view(blueprint)
|
||||
|
||||
@@ -11,9 +11,11 @@
|
||||
|
||||
import simplejson as json
|
||||
import re
|
||||
import copy
|
||||
import random
|
||||
|
||||
import pgadmin.browser.server_groups.servers.databases as database
|
||||
from flask import render_template, request, jsonify, url_for
|
||||
from flask import render_template, request, jsonify, url_for, current_app
|
||||
from flask_babelex import gettext
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.utils \
|
||||
import SchemaChildModule, DataTypeReader, VacuumSettings
|
||||
@@ -22,8 +24,15 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from .utils import BaseTableView
|
||||
from pgadmin.utils.preferences import Preferences
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries,\
|
||||
directory_diff
|
||||
from pgadmin.tools.schema_diff.model import SchemaDiffModel
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.foreign_key import utils as fkey_utils
|
||||
from .schema_diff_utils import SchemaDiffTableCompare
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
columns import utils as column_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
@@ -132,7 +141,8 @@ class TableModule(SchemaChildModule):
|
||||
blueprint = TableModule(__name__)
|
||||
|
||||
|
||||
class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
class TableView(BaseTableView, DataTypeReader, VacuumSettings,
|
||||
SchemaDiffTableCompare):
|
||||
"""
|
||||
This class is responsible for generating routes for Table node
|
||||
|
||||
@@ -229,6 +239,10 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
|
||||
* delete_sql(gid, sid, did, scid, foid):
|
||||
- Returns sql for Script
|
||||
|
||||
* compare(**kwargs):
|
||||
- This function will compare the table nodes from two
|
||||
different schemas.
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
@@ -277,7 +291,8 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
'insert_sql': [{'get': 'insert_sql'}],
|
||||
'update_sql': [{'get': 'update_sql'}],
|
||||
'delete_sql': [{'get': 'delete_sql'}],
|
||||
'count_rows': [{'get': 'count_rows'}]
|
||||
'count_rows': [{'get': 'count_rows'}],
|
||||
'compare': [{'get': 'compare'}, {'get': 'compare'}]
|
||||
})
|
||||
|
||||
@BaseTableView.check_precondition
|
||||
@@ -464,9 +479,9 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
- setting
|
||||
values
|
||||
"""
|
||||
res = self.get_vacuum_table_settings(self.conn)
|
||||
res = self.get_vacuum_table_settings(self.conn, sid)
|
||||
return ajax_response(
|
||||
response=res['rows'],
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
@@ -480,9 +495,9 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
- setting
|
||||
values
|
||||
"""
|
||||
res = self.get_vacuum_toast_settings(self.conn)
|
||||
res = self.get_vacuum_toast_settings(self.conn, sid)
|
||||
return ajax_response(
|
||||
response=res['rows'],
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
@@ -582,7 +597,22 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
Returns:
|
||||
JSON of selected table node
|
||||
"""
|
||||
status, res = self._fetch_properties(did, scid, tid)
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return super(TableView, self).properties(
|
||||
gid, sid, did, scid, tid, res
|
||||
)
|
||||
|
||||
def _fetch_properties(self, did, scid, tid):
|
||||
"""
|
||||
This function is used to fetch the properties of the specified object
|
||||
:param did:
|
||||
:param scid:
|
||||
:param tid:
|
||||
:return:
|
||||
"""
|
||||
SQL = render_template(
|
||||
"/".join([self.table_template_path, 'properties.sql']),
|
||||
did=did, scid=scid, tid=tid,
|
||||
@@ -590,10 +620,11 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("The specified table could not be found."))
|
||||
return False, gone(
|
||||
gettext("The specified table could not be found."))
|
||||
|
||||
# We will check the threshold set by user before executing
|
||||
# the query because that can cause performance issues
|
||||
@@ -620,7 +651,7 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
status, count = self.conn.execute_scalar(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=count)
|
||||
return False, internal_server_error(errormsg=count)
|
||||
|
||||
res['rows'][0]['rows_cnt'] = count
|
||||
|
||||
@@ -628,9 +659,7 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
elif not estimated_row_count:
|
||||
res['rows'][0]['rows_cnt'] = estimated_row_count
|
||||
|
||||
return super(TableView, self).properties(
|
||||
gid, sid, did, scid, tid, res
|
||||
)
|
||||
return True, res
|
||||
|
||||
@BaseTableView.check_precondition
|
||||
def types(self, gid, sid, did, scid, tid=None, clid=None):
|
||||
@@ -1168,6 +1197,69 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
"""
|
||||
return BaseTableView.reset_statistics(self, scid, tid)
|
||||
|
||||
@BaseTableView.check_precondition
|
||||
def get_sql_from_table_diff(self, **kwargs):
|
||||
"""
|
||||
This function will create sql on the basis the difference of 2 tables
|
||||
"""
|
||||
data = dict()
|
||||
res = None
|
||||
sid = kwargs['sid']
|
||||
did = kwargs['did']
|
||||
scid = kwargs['scid']
|
||||
tid = kwargs['tid']
|
||||
diff_data = kwargs['diff_data'] if 'diff_data' in kwargs else None
|
||||
json_resp = kwargs['json_resp'] if 'json_resp' in kwargs else True
|
||||
diff_schema = kwargs['diff_schema'] if 'diff_schema' in kwargs else\
|
||||
None
|
||||
schema_diff_table = kwargs['schema_diff_table'] if\
|
||||
'schema_diff_table' in kwargs else None
|
||||
|
||||
if diff_data:
|
||||
return self._fetch_sql(did, scid, tid, diff_data, json_resp)
|
||||
else:
|
||||
main_sql = []
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.table_template_path, 'properties.sql']),
|
||||
did=did, scid=scid, tid=tid,
|
||||
datlastsysoid=self.datlastsysoid
|
||||
)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("The specified table could not be found."
|
||||
))
|
||||
|
||||
if status:
|
||||
data = res['rows'][0]
|
||||
|
||||
if diff_schema:
|
||||
data['schema'] = diff_schema
|
||||
|
||||
if schema_diff_table:
|
||||
data['orig_name'] = data['name']
|
||||
data['name'] = 'schema_diff_temp_{0}'.format(
|
||||
random.randint(1, 9999999))
|
||||
|
||||
sql, partition_sql = BaseTableView.get_reverse_engineered_sql(
|
||||
self, did, scid, tid, main_sql, data, json_resp,
|
||||
diff_partition_sql=True)
|
||||
else:
|
||||
sql, partition_sql = BaseTableView.get_reverse_engineered_sql(
|
||||
self, did, scid, tid, main_sql, data, json_resp)
|
||||
|
||||
if schema_diff_table:
|
||||
# If partition tables have different partitions
|
||||
sql += render_template(
|
||||
"/".join([self.table_template_path, 'schema_diff.sql']),
|
||||
conn=self.conn, data=data, partition_sql=partition_sql
|
||||
)
|
||||
|
||||
return sql
|
||||
|
||||
@BaseTableView.check_precondition
|
||||
def msql(self, gid, sid, did, scid, tid=None):
|
||||
"""
|
||||
@@ -1181,7 +1273,7 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
tid: Table ID
|
||||
"""
|
||||
data = dict()
|
||||
res = None
|
||||
SQL = ''
|
||||
for k, v in request.args.items():
|
||||
try:
|
||||
# comments should be taken as is because if user enters a
|
||||
@@ -1193,6 +1285,11 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
except (ValueError, TypeError, KeyError):
|
||||
data[k] = v
|
||||
|
||||
return self._fetch_sql(did, scid, tid, data)
|
||||
|
||||
def _fetch_sql(self, did, scid, tid, data, json_resp=True):
|
||||
res = None
|
||||
|
||||
if tid is not None:
|
||||
SQL = render_template(
|
||||
"/".join([self.table_template_path, 'properties.sql']),
|
||||
@@ -1201,13 +1298,18 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return internal_server_error(errormsg=SQL)
|
||||
|
||||
SQL, name = self.get_sql(did, scid, tid, data, res)
|
||||
SQL = re.sub('\n{2,}', '\n\n', SQL)
|
||||
SQL = SQL.strip('\n')
|
||||
|
||||
if not json_resp:
|
||||
return SQL
|
||||
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
@@ -1419,7 +1521,7 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
return ajax_response(response=sql)
|
||||
|
||||
@BaseTableView.check_precondition
|
||||
def delete_sql(self, gid, sid, did, scid, tid):
|
||||
def delete_sql(self, gid, sid, did, scid, tid, json_resp=True):
|
||||
"""
|
||||
DELETE script sql for the object
|
||||
|
||||
@@ -1448,6 +1550,9 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
self.qtIdent(self.conn, data['schema'], data['name'])
|
||||
)
|
||||
|
||||
if not json_resp:
|
||||
return sql
|
||||
|
||||
return ajax_response(response=sql)
|
||||
|
||||
@BaseTableView.check_precondition
|
||||
@@ -1502,5 +1607,60 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
data={'total_rows': count}
|
||||
)
|
||||
|
||||
def get_delete_sql(self, res):
|
||||
self.cmd = 'delete'
|
||||
sql = super(TableView, self).get_delete_sql(res)
|
||||
self.cmd = None
|
||||
return sql
|
||||
|
||||
@BaseTableView.check_precondition
|
||||
def fetch_tables(self, sid, did, scid, tid=None, keys_to_remove=None):
|
||||
"""
|
||||
This function will fetch the list of all the tables
|
||||
and will be used by schema diff.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:param tid: Table Id
|
||||
:param keys_to_remove: Table columns to be removed from the dataset
|
||||
:return: Table dataset
|
||||
"""
|
||||
if tid:
|
||||
status, data = self._fetch_properties(did, scid, tid)
|
||||
|
||||
if not status:
|
||||
current_app.logger.error(data)
|
||||
return False
|
||||
|
||||
data = super(TableView, self).properties(
|
||||
0, sid, did, scid, tid, data, False
|
||||
)
|
||||
self.remove_keys_for_comparision(data, keys_to_remove)
|
||||
return data
|
||||
|
||||
else:
|
||||
res = dict()
|
||||
SQL = render_template("/".join([self.table_template_path,
|
||||
'nodes.sql']), scid=scid)
|
||||
status, tables = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
current_app.logger.error(tables)
|
||||
return False
|
||||
|
||||
for row in tables['rows']:
|
||||
status, data = self._fetch_properties(did, scid, row['oid'])
|
||||
|
||||
if status:
|
||||
data = super(TableView, self).properties(
|
||||
0, sid, did, scid, row['oid'], data, False
|
||||
)
|
||||
|
||||
self.remove_keys_for_comparision(data, keys_to_remove)
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
SchemaDiffRegistry(blueprint.node_type, TableView)
|
||||
TableView.register_node_view(blueprint)
|
||||
|
||||
@@ -29,6 +29,7 @@ from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.utils import IS_PY2
|
||||
from pgadmin.utils.ajax import ColParamsJSONDecoder
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
unicode = str
|
||||
|
||||
@@ -26,6 +26,10 @@ from pgadmin.browser.server_groups.servers.databases.schemas.utils \
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.utils import IS_PY2
|
||||
from pgadmin.utils.compile_template_name import compile_template_path
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
unicode = str
|
||||
@@ -155,7 +159,7 @@ class CompoundTriggerModule(CollectionNodeModule):
|
||||
blueprint = CompoundTriggerModule(__name__)
|
||||
|
||||
|
||||
class CompoundTriggerView(PGChildNodeView):
|
||||
class CompoundTriggerView(PGChildNodeView, SchemaDiffObjectCompare):
|
||||
"""
|
||||
This class is responsible for generating routes for Compound Trigger node
|
||||
|
||||
@@ -245,6 +249,10 @@ class CompoundTriggerView(PGChildNodeView):
|
||||
'enable': [{'put': 'enable_disable_trigger'}]
|
||||
})
|
||||
|
||||
# Schema Diff: Keys to ignore while comparing
|
||||
keys_to_ignore = ['oid', 'xmin', 'nspname', 'tfunction',
|
||||
'tgrelid', 'tgfoid']
|
||||
|
||||
def check_precondition(f):
|
||||
"""
|
||||
This function will behave as a decorator which will checks
|
||||
@@ -267,6 +275,12 @@ class CompoundTriggerView(PGChildNodeView):
|
||||
]['datlastsysoid'] if self.manager.db_info is not None and \
|
||||
kwargs['did'] in self.manager.db_info else 0
|
||||
|
||||
self.table_template_path = compile_template_path(
|
||||
'tables/sql',
|
||||
self.manager.server_type,
|
||||
self.manager.version
|
||||
)
|
||||
|
||||
# we will set template path for sql scripts
|
||||
self.template_path = 'compound_triggers/sql/{0}/#{1}#'.format(
|
||||
self.manager.server_type, self.manager.version)
|
||||
@@ -417,6 +431,18 @@ class CompoundTriggerView(PGChildNodeView):
|
||||
JSON of selected compound trigger node
|
||||
"""
|
||||
|
||||
data = self._fetch_properties(tid, trid)
|
||||
|
||||
if not status:
|
||||
return data
|
||||
|
||||
return ajax_response(
|
||||
response=data,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, tid, trid):
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
tid=tid, trid=trid,
|
||||
@@ -440,10 +466,7 @@ class CompoundTriggerView(PGChildNodeView):
|
||||
|
||||
data = trigger_definition(data)
|
||||
|
||||
return ajax_response(
|
||||
response=data,
|
||||
status=200
|
||||
)
|
||||
return True, data
|
||||
|
||||
@check_precondition
|
||||
def create(self, gid, sid, did, scid, tid):
|
||||
@@ -519,7 +542,7 @@ class CompoundTriggerView(PGChildNodeView):
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, scid, tid, trid=None):
|
||||
def delete(self, gid, sid, did, scid, tid, trid=None, only_sql=False):
|
||||
"""
|
||||
This function will updates existing the compound trigger object
|
||||
|
||||
@@ -579,6 +602,9 @@ class CompoundTriggerView(PGChildNodeView):
|
||||
conn=self.conn,
|
||||
cascade=cascade
|
||||
)
|
||||
if only_sql:
|
||||
return SQL
|
||||
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@@ -846,5 +872,109 @@ class CompoundTriggerView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def get_sql_from_diff(self, gid, sid, did, scid, tid, oid,
|
||||
data=None, diff_schema=None, drop_sql=False):
|
||||
if data:
|
||||
sql, name = self.get_sql(scid, tid, oid, data)
|
||||
if not isinstance(sql, (str, unicode)):
|
||||
return sql
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
else:
|
||||
if drop_sql:
|
||||
SQL = self.delete(gid=gid, sid=sid, did=did,
|
||||
scid=scid, tid=tid,
|
||||
trid=oid, only_sql=True)
|
||||
else:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
tid=tid, trid=oid,
|
||||
datlastsysoid=self.datlastsysoid)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""Could not find the compound
|
||||
trigger in the table."""))
|
||||
|
||||
data = dict(res['rows'][0])
|
||||
# Adding parent into data dict,
|
||||
# will be using it while creating sql
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
|
||||
if len(data['tgattr']) >= 1:
|
||||
columns = ', '.join(data['tgattr'].split(' '))
|
||||
data['columns'] = self._column_details(tid, columns)
|
||||
|
||||
data = self._trigger_definition(data)
|
||||
|
||||
if diff_schema:
|
||||
data['schema'] = diff_schema
|
||||
|
||||
SQL, name = self.get_sql(scid, tid, None, data)
|
||||
|
||||
sql_header = u"-- Compound Trigger: {0}\n\n-- ".format(
|
||||
data['name'])
|
||||
|
||||
sql_header += render_template("/".join([self.template_path,
|
||||
'delete.sql']),
|
||||
data=data, conn=self.conn)
|
||||
|
||||
SQL = sql_header + '\n\n' + SQL.strip('\n')
|
||||
|
||||
# If compound trigger is disbaled then add sql
|
||||
# code for the same
|
||||
if not data['is_enable_trigger']:
|
||||
SQL += '\n\n'
|
||||
SQL += render_template("/".join([
|
||||
self.template_path,
|
||||
'enable_disable_trigger.sql']),
|
||||
data=data, conn=self.conn)
|
||||
|
||||
return SQL
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None,
|
||||
ignore_keys=False):
|
||||
"""
|
||||
This function will fetch the list of all the triggers for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:param tid: Table Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
|
||||
if oid:
|
||||
status, data = self._fetch_properties(tid, oid)
|
||||
if not status:
|
||||
current_app.logger.error(data)
|
||||
return False
|
||||
res = data
|
||||
else:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'nodes.sql']), tid=tid)
|
||||
status, triggers = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
current_app.logger.error(triggers)
|
||||
return False
|
||||
|
||||
for row in triggers['rows']:
|
||||
status, data = self._fetch_properties(tid, row['oid'])
|
||||
if status:
|
||||
if ignore_keys:
|
||||
for key in self.keys_to_ignore:
|
||||
if key in data:
|
||||
del data[key]
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
SchemaDiffRegistry(blueprint.node_type, CompoundTriggerView, 'table')
|
||||
CompoundTriggerView.register_node_view(blueprint)
|
||||
|
||||
@@ -152,7 +152,8 @@ def get_index_constraint_sql(conn, did, tid, data, template_path=None):
|
||||
|
||||
modified_sql, name = get_sql(conn, c, did, tid, ctype,
|
||||
c['oid'])
|
||||
sql.append(modified_sql.strip('\n'))
|
||||
if modified_sql:
|
||||
sql.append(modified_sql.strip('\n'))
|
||||
|
||||
if 'added' in constraint:
|
||||
for c in constraint['added']:
|
||||
@@ -183,6 +184,7 @@ def get_sql(conn, data, did, tid, ctype, cid=None, template_path=None):
|
||||
:return:
|
||||
"""
|
||||
name = data['name'] if 'name' in data else None
|
||||
sql = None
|
||||
if cid is not None:
|
||||
sql = render_template("/".join([template_path, 'properties.sql']),
|
||||
did=did, tid=tid, cid=cid,
|
||||
|
||||
@@ -13,7 +13,7 @@ import simplejson as json
|
||||
from functools import wraps
|
||||
|
||||
import pgadmin.browser.server_groups.servers.databases as database
|
||||
from flask import render_template, request, jsonify
|
||||
from flask import render_template, request, jsonify, current_app
|
||||
from flask_babelex import gettext
|
||||
from pgadmin.browser.collection import CollectionNodeModule
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
@@ -25,8 +25,14 @@ from pgadmin.utils.compile_template_name import compile_template_path
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.utils import IS_PY2
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries,\
|
||||
directory_diff
|
||||
from pgadmin.tools.schema_diff.model import SchemaDiffModel
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas. \
|
||||
tables.indexes import utils as index_utils
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
unicode = str
|
||||
@@ -135,7 +141,7 @@ class IndexesModule(CollectionNodeModule):
|
||||
blueprint = IndexesModule(__name__)
|
||||
|
||||
|
||||
class IndexesView(PGChildNodeView):
|
||||
class IndexesView(PGChildNodeView, SchemaDiffObjectCompare):
|
||||
"""
|
||||
This class is responsible for generating routes for Index node
|
||||
|
||||
@@ -227,6 +233,11 @@ class IndexesView(PGChildNodeView):
|
||||
{'get': 'get_op_class'}]
|
||||
})
|
||||
|
||||
# Schema Diff: Keys to ignore while comparing
|
||||
keys_to_ignore = ['oid', 'relowner', 'schema',
|
||||
'indrelid', 'nspname'
|
||||
]
|
||||
|
||||
def check_precondition(f):
|
||||
"""
|
||||
This function will behave as a decorator which will checks
|
||||
@@ -248,6 +259,12 @@ class IndexesView(PGChildNodeView):
|
||||
]['datlastsysoid'] if self.manager.db_info is not None and \
|
||||
kwargs['did'] in self.manager.db_info else 0
|
||||
|
||||
self.table_template_path = compile_template_path(
|
||||
'tables/sql',
|
||||
self.manager.server_type,
|
||||
self.manager.version
|
||||
)
|
||||
|
||||
# we will set template path for sql scripts
|
||||
self.template_path = compile_template_path(
|
||||
'indexes/sql/',
|
||||
@@ -485,19 +502,35 @@ class IndexesView(PGChildNodeView):
|
||||
Returns:
|
||||
JSON of selected schema node
|
||||
"""
|
||||
status, data = self._fetch_properties(did, tid, idx)
|
||||
if not status:
|
||||
return data
|
||||
|
||||
return ajax_response(
|
||||
response=data,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, did, tid, idx):
|
||||
"""
|
||||
This function is used to fetch the properties of specified object.
|
||||
:param did:
|
||||
:param tid:
|
||||
:param idx:
|
||||
:return:
|
||||
"""
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
did=did, tid=tid, idx=idx, datlastsysoid=self.datlastsysoid
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""Could not find the index in the table."""))
|
||||
return False, gone(
|
||||
gettext("""Could not find the index in the table."""))
|
||||
|
||||
# Making copy of output for future use
|
||||
data = dict(res['rows'][0])
|
||||
@@ -509,10 +542,7 @@ class IndexesView(PGChildNodeView):
|
||||
if self.manager.version >= 110000:
|
||||
data = index_utils.get_include_details(self.conn, idx, data)
|
||||
|
||||
return ajax_response(
|
||||
response=data,
|
||||
status=200
|
||||
)
|
||||
return True, data
|
||||
|
||||
@check_precondition
|
||||
def create(self, gid, sid, did, scid, tid):
|
||||
@@ -620,7 +650,8 @@ class IndexesView(PGChildNodeView):
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, scid, tid, idx=None):
|
||||
def delete(self, gid, sid, did, scid, tid, idx=None,
|
||||
only_sql=False):
|
||||
"""
|
||||
This function will updates existing the schema object
|
||||
|
||||
@@ -676,6 +707,9 @@ class IndexesView(PGChildNodeView):
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
data=data, conn=self.conn, cascade=cascade
|
||||
)
|
||||
|
||||
if only_sql:
|
||||
return SQL
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@@ -792,6 +826,32 @@ class IndexesView(PGChildNodeView):
|
||||
|
||||
return ajax_response(response=SQL)
|
||||
|
||||
@check_precondition
|
||||
def get_sql_from_index_diff(self, sid, did, scid, tid, idx, data=None,
|
||||
diff_schema=None, drop_req=False):
|
||||
|
||||
tmp_idx = idx
|
||||
schema = ''
|
||||
if data:
|
||||
schema = self.schema
|
||||
elif diff_schema:
|
||||
schema = diff_schema
|
||||
|
||||
sql = index_utils.get_reverse_engineered_sql(
|
||||
self.conn, schema,
|
||||
self.table, did, tid, idx,
|
||||
self.datlastsysoid,
|
||||
template_path=None, with_header=False)
|
||||
|
||||
drop_sql = ''
|
||||
if drop_req:
|
||||
drop_sql = '\n' + render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
data=data, conn=self.conn
|
||||
)
|
||||
|
||||
return drop_sql + '\n\n' + sql
|
||||
|
||||
@check_precondition
|
||||
def dependents(self, gid, sid, did, scid, tid, idx):
|
||||
"""
|
||||
@@ -914,5 +974,129 @@ class IndexesView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None,
|
||||
ignore_keys=False):
|
||||
"""
|
||||
This function will fetch the list of all the indexes for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
|
||||
res = dict()
|
||||
|
||||
if not oid:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'nodes.sql']), tid=tid)
|
||||
status, indexes = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
current_app.logger.error(indexes)
|
||||
return False
|
||||
|
||||
for row in indexes['rows']:
|
||||
status, data = self._fetch_properties(did, tid,
|
||||
row['oid'])
|
||||
if status:
|
||||
if ignore_keys:
|
||||
for key in self.keys_to_ignore:
|
||||
if key in data:
|
||||
del data[key]
|
||||
res[row['name']] = data
|
||||
else:
|
||||
status, data = self._fetch_properties(did, tid,
|
||||
oid)
|
||||
if not status:
|
||||
current_app.logger.error(data)
|
||||
return False
|
||||
res = data
|
||||
|
||||
return res
|
||||
|
||||
def ddl_compare(self, **kwargs):
|
||||
"""
|
||||
This function will compare index properties and
|
||||
return the difference of SQL
|
||||
"""
|
||||
|
||||
src_sid = kwargs.get('source_sid')
|
||||
src_did = kwargs.get('source_did')
|
||||
src_scid = kwargs.get('source_scid')
|
||||
src_tid = kwargs.get('source_tid')
|
||||
src_oid = kwargs.get('source_oid')
|
||||
tar_sid = kwargs.get('target_sid')
|
||||
tar_did = kwargs.get('target_did')
|
||||
tar_scid = kwargs.get('target_scid')
|
||||
tar_tid = kwargs.get('target_tid')
|
||||
tar_oid = kwargs.get('target_oid')
|
||||
comp_status = kwargs.get('comp_status')
|
||||
|
||||
source = ''
|
||||
target = ''
|
||||
diff = ''
|
||||
|
||||
status, target_schema = self.get_schema(tar_sid,
|
||||
tar_did,
|
||||
tar_scid
|
||||
)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=target_schema)
|
||||
|
||||
if comp_status == SchemaDiffModel.COMPARISON_STATUS['source_only']:
|
||||
diff = self.get_sql_from_index_diff(sid=src_sid,
|
||||
did=src_did, scid=src_scid,
|
||||
tid=src_tid, idx=src_oid,
|
||||
diff_schema=target_schema)
|
||||
|
||||
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['target_only']:
|
||||
diff = self.delete(gid=1, sid=tar_sid, did=tar_did,
|
||||
scid=tar_scid, tid=tar_tid,
|
||||
idx=tar_oid, only_sql=True)
|
||||
|
||||
else:
|
||||
source = self.fetch_objects_to_compare(sid=src_sid, did=src_did,
|
||||
scid=src_scid, tid=src_tid,
|
||||
oid=src_oid)
|
||||
target = self.fetch_objects_to_compare(sid=tar_sid, did=tar_did,
|
||||
scid=tar_scid, tid=tar_tid,
|
||||
oid=tar_oid)
|
||||
|
||||
if not (source or target):
|
||||
return None
|
||||
|
||||
diff_dict = directory_diff(
|
||||
source, target, ignore_keys=self.keys_to_ignore,
|
||||
difference={}
|
||||
)
|
||||
|
||||
required_create_keys = ['columns']
|
||||
create_req = False
|
||||
|
||||
for key in required_create_keys:
|
||||
if key in diff_dict:
|
||||
create_req = True
|
||||
|
||||
if create_req:
|
||||
diff = self.get_sql_from_index_diff(sid=src_sid,
|
||||
did=src_did,
|
||||
scid=src_scid,
|
||||
tid=src_tid,
|
||||
idx=src_oid,
|
||||
diff_schema=target_schema,
|
||||
drop_req=True)
|
||||
else:
|
||||
diff = self.get_sql_from_index_diff(sid=tar_sid,
|
||||
did=tar_did,
|
||||
scid=tar_scid,
|
||||
tid=tar_tid,
|
||||
idx=tar_oid,
|
||||
data=diff_dict)
|
||||
|
||||
return diff
|
||||
|
||||
|
||||
SchemaDiffRegistry(blueprint.node_type, IndexesView, 'table')
|
||||
IndexesView.register_node_view(blueprint)
|
||||
|
||||
@@ -26,7 +26,7 @@ def get_template_path(f):
|
||||
def wrap(*args, **kwargs):
|
||||
# Here args[0] will hold the connection object
|
||||
conn_obj = args[0]
|
||||
if 'template_path' not in kwargs:
|
||||
if 'template_path' not in kwargs or kwargs['template_path'] is None:
|
||||
kwargs['template_path'] = \
|
||||
'indexes/sql/#{0}#'.format(conn_obj.manager.version)
|
||||
|
||||
@@ -229,7 +229,7 @@ def get_sql(conn, data, did, tid, idx, datlastsysoid,
|
||||
@get_template_path
|
||||
def get_reverse_engineered_sql(conn, schema, table, did, tid, idx,
|
||||
datlastsysoid,
|
||||
template_path=None):
|
||||
template_path=None, with_header=True):
|
||||
"""
|
||||
This function will return reverse engineered sql for specified trigger.
|
||||
|
||||
@@ -240,6 +240,8 @@ def get_reverse_engineered_sql(conn, schema, table, did, tid, idx,
|
||||
:param idx: Index ID
|
||||
:param datlastsysoid:
|
||||
:param template_path: Optional template path
|
||||
:param with_header: Optional parameter to decide whether the SQL will be
|
||||
returned with header or not
|
||||
:return:
|
||||
"""
|
||||
SQL = render_template("/".join([template_path, 'properties.sql']),
|
||||
@@ -267,11 +269,12 @@ def get_reverse_engineered_sql(conn, schema, table, did, tid, idx,
|
||||
|
||||
SQL, name = get_sql(conn, data, did, tid, None, datlastsysoid)
|
||||
|
||||
sql_header = u"-- Index: {0}\n\n-- ".format(data['name'])
|
||||
if with_header:
|
||||
sql_header = u"-- Index: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
sql_header += render_template("/".join([template_path, 'delete.sql']),
|
||||
data=data, conn=conn)
|
||||
sql_header += render_template("/".join([template_path, 'delete.sql']),
|
||||
data=data, conn=conn)
|
||||
|
||||
SQL = sql_header + '\n\n' + SQL
|
||||
SQL = sql_header + '\n\n' + SQL
|
||||
|
||||
return SQL
|
||||
|
||||
@@ -24,6 +24,11 @@ from pgadmin.browser.collection import CollectionNodeModule
|
||||
from pgadmin.utils.ajax import make_json_response, precondition_required
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.browser.utils import PGChildModule
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries,\
|
||||
directory_diff
|
||||
from pgadmin.tools.schema_diff.model import SchemaDiffModel
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
|
||||
def backend_supported(module, manager, **kwargs):
|
||||
@@ -152,7 +157,8 @@ class PartitionsModule(CollectionNodeModule):
|
||||
blueprint = PartitionsModule(__name__)
|
||||
|
||||
|
||||
class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings,
|
||||
SchemaDiffObjectCompare):
|
||||
"""
|
||||
This class is responsible for generating routes for Partition node
|
||||
|
||||
@@ -200,6 +206,10 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
|
||||
})
|
||||
|
||||
# Schema Diff: Keys to ignore while comparing
|
||||
keys_to_ignore = ['oid', 'schema', 'vacuum_table',
|
||||
'vacuum_toast', 'edit_types']
|
||||
|
||||
def get_children_nodes(self, manager, **kwargs):
|
||||
nodes = []
|
||||
# treat partition table as normal table.
|
||||
@@ -342,6 +352,63 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
return super(PartitionsView, self).properties(
|
||||
gid, sid, did, scid, ptid, res)
|
||||
|
||||
@BaseTableView.check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid, tid, ptid=None):
|
||||
"""
|
||||
This function will fetch the list of all the tables for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:param tid: Table Id
|
||||
:param ptif: Partition table Id
|
||||
:return:
|
||||
"""
|
||||
res = {}
|
||||
|
||||
if ptid:
|
||||
SQL = render_template("/".join([self.partition_template_path,
|
||||
'properties.sql']),
|
||||
did=did, scid=scid, tid=tid,
|
||||
ptid=ptid, datlastsysoid=self.datlastsysoid)
|
||||
status, result = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
current_app.logger.error(result)
|
||||
return False
|
||||
|
||||
res = super(PartitionsView, self).properties(
|
||||
0, sid, did, scid, ptid, result)
|
||||
|
||||
else:
|
||||
SQL = render_template(
|
||||
"/".join([self.partition_template_path, 'nodes.sql']),
|
||||
scid=scid, tid=tid
|
||||
)
|
||||
status, partitions = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
current_app.logger.error(partitions)
|
||||
return False
|
||||
|
||||
for row in partitions['rows']:
|
||||
SQL = render_template("/".join([self.partition_template_path,
|
||||
'properties.sql']),
|
||||
did=did, scid=scid, tid=tid,
|
||||
ptid=row['oid'],
|
||||
datlastsysoid=self.datlastsysoid)
|
||||
status, result = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
current_app.logger.error(result)
|
||||
return False
|
||||
|
||||
data = super(PartitionsView, self).properties(
|
||||
0, sid, did, scid, row['oid'], result, False
|
||||
)
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
@BaseTableView.check_precondition
|
||||
def sql(self, gid, sid, did, scid, tid, ptid):
|
||||
"""
|
||||
@@ -375,6 +442,62 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
return BaseTableView.get_reverse_engineered_sql(self, did, scid, ptid,
|
||||
main_sql, data)
|
||||
|
||||
@BaseTableView.check_precondition
|
||||
def get_sql_from_diff(self, **kwargs):
|
||||
"""
|
||||
This function will create sql on the basis the difference of 2 tables
|
||||
"""
|
||||
data = dict()
|
||||
res = None
|
||||
sid = kwargs['sid']
|
||||
did = kwargs['did']
|
||||
scid = kwargs['scid']
|
||||
tid = kwargs['tid']
|
||||
ptid = kwargs['ptid']
|
||||
diff_data = kwargs['diff_data'] if 'diff_data' in kwargs else None
|
||||
json_resp = kwargs['json_resp'] if 'json_resp' in kwargs else True
|
||||
diff_schema = kwargs['diff_schema'] if 'diff_schema' in kwargs else\
|
||||
None
|
||||
|
||||
if diff_data:
|
||||
SQL = render_template("/".join([self.partition_template_path,
|
||||
'properties.sql']),
|
||||
did=did, scid=scid, tid=tid,
|
||||
ptid=ptid, datlastsysoid=self.datlastsysoid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
SQL, name = self.get_sql(did, scid, ptid, diff_data, res)
|
||||
SQL = re.sub('\n{2,}', '\n\n', SQL)
|
||||
SQL = SQL.strip('\n')
|
||||
return SQL
|
||||
else:
|
||||
main_sql = []
|
||||
|
||||
SQL = render_template("/".join([self.partition_template_path,
|
||||
'properties.sql']),
|
||||
did=did, scid=scid, tid=tid,
|
||||
ptid=ptid, datlastsysoid=self.datlastsysoid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext(
|
||||
"The specified partitioned table could not be found."))
|
||||
|
||||
data = res['rows'][0]
|
||||
|
||||
if diff_schema:
|
||||
data['schema'] = diff_schema
|
||||
data['parent_schema'] = diff_schema
|
||||
|
||||
return BaseTableView.get_reverse_engineered_sql(self, did,
|
||||
scid, ptid,
|
||||
main_sql, data,
|
||||
False)
|
||||
|
||||
@BaseTableView.check_precondition
|
||||
def detach(self, gid, sid, did, scid, tid, ptid):
|
||||
"""
|
||||
@@ -576,7 +699,7 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@BaseTableView.check_precondition
|
||||
def delete(self, gid, sid, did, scid, tid, ptid=None):
|
||||
def delete(self, gid, sid, did, scid, tid, ptid=None, only_sql=False):
|
||||
"""
|
||||
This function will delete the table object
|
||||
|
||||
@@ -631,5 +754,61 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def ddl_compare(self, **kwargs):
|
||||
"""
|
||||
This function will compare index properties and
|
||||
return the difference of SQL
|
||||
"""
|
||||
|
||||
src_sid = kwargs.get('source_sid')
|
||||
src_did = kwargs.get('source_did')
|
||||
src_scid = kwargs.get('source_scid')
|
||||
src_tid = kwargs.get('source_tid')
|
||||
src_oid = kwargs.get('source_oid')
|
||||
tar_sid = kwargs.get('target_sid')
|
||||
tar_did = kwargs.get('target_did')
|
||||
tar_scid = kwargs.get('target_scid')
|
||||
tar_tid = kwargs.get('target_tid')
|
||||
tar_oid = kwargs.get('target_oid')
|
||||
comp_status = kwargs.get('comp_status')
|
||||
|
||||
source = ''
|
||||
target = ''
|
||||
diff = ''
|
||||
|
||||
status, target_schema = self.get_schema_for_schema_diff(tar_sid,
|
||||
tar_did,
|
||||
tar_scid
|
||||
)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=target_schema)
|
||||
|
||||
if comp_status == SchemaDiffModel.COMPARISON_STATUS['source_only']:
|
||||
diff = self.get_sql_from_diff(sid=src_sid,
|
||||
did=src_did, scid=src_scid,
|
||||
tid=src_tid, ptid=src_oid,
|
||||
diff_schema=target_schema)
|
||||
|
||||
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['target_only']:
|
||||
SQL = render_template("/".join([self.partition_template_path,
|
||||
'properties.sql']),
|
||||
did=did, scid=scid, tid=tid,
|
||||
ptid=ptid, datlastsysoid=self.datlastsysoid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.table_template_path, 'properties.sql']),
|
||||
did=tar_did, scid=tar_scid, tid=tar_oid,
|
||||
datlastsysoid=self.datlastsysoid
|
||||
)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if status:
|
||||
self.cmd = 'delete'
|
||||
diff = super(PartitionsView, self).get_delete_sql(res)
|
||||
self.cmd = None
|
||||
|
||||
return diff
|
||||
|
||||
|
||||
SchemaDiffRegistry(blueprint.node_type, PartitionsView, 'table')
|
||||
PartitionsView.register_node_view(blueprint)
|
||||
|
||||
@@ -13,7 +13,8 @@ import simplejson as json
|
||||
from functools import wraps
|
||||
|
||||
import pgadmin.browser.server_groups.servers.databases.schemas as schemas
|
||||
from flask import render_template, make_response, request, jsonify
|
||||
from flask import render_template, make_response, request, jsonify,\
|
||||
current_app
|
||||
from flask_babelex import gettext
|
||||
from pgadmin.browser.collection import CollectionNodeModule
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.utils import \
|
||||
@@ -23,7 +24,11 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.utils.compile_template_name import compile_template_path
|
||||
from pgadmin.utils import IS_PY2
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
unicode = str
|
||||
@@ -134,7 +139,7 @@ class RuleModule(CollectionNodeModule):
|
||||
blueprint = RuleModule(__name__)
|
||||
|
||||
|
||||
class RuleView(PGChildNodeView):
|
||||
class RuleView(PGChildNodeView, SchemaDiffObjectCompare):
|
||||
"""
|
||||
This is a class for rule node which inherits the
|
||||
properties and methods from PGChildNodeView class and define
|
||||
@@ -178,6 +183,9 @@ class RuleView(PGChildNodeView):
|
||||
'configs': [{'get': 'configs'}]
|
||||
})
|
||||
|
||||
# Schema Diff: Keys to ignore while comparing
|
||||
keys_to_ignore = ['oid', 'schema', 'definition']
|
||||
|
||||
def check_precondition(f):
|
||||
"""
|
||||
This function will behave as a decorator which will check the
|
||||
@@ -197,6 +205,12 @@ class RuleView(PGChildNodeView):
|
||||
]['datlastsysoid'] if self.manager.db_info is not None and \
|
||||
kwargs['did'] in self.manager.db_info else 0
|
||||
self.template_path = 'rules/sql'
|
||||
self.table_template_path = compile_template_path(
|
||||
'tables/sql',
|
||||
self.manager.server_type,
|
||||
self.manager.version
|
||||
)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return wrap
|
||||
@@ -278,6 +292,21 @@ class RuleView(PGChildNodeView):
|
||||
"""
|
||||
Fetch the properties of an individual rule and render in properties tab
|
||||
|
||||
"""
|
||||
status, data = self._fetch_properties(rid)
|
||||
if not status:
|
||||
return data
|
||||
|
||||
return ajax_response(
|
||||
response=data,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, rid):
|
||||
"""
|
||||
This function is used to fetch the properties of the specified object
|
||||
:param rid:
|
||||
:return:
|
||||
"""
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'properties.sql']
|
||||
@@ -285,15 +314,13 @@ class RuleView(PGChildNodeView):
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""Could not find the rule in the table."""))
|
||||
return False, gone(
|
||||
gettext("""Could not find the rule in the table."""))
|
||||
|
||||
return ajax_response(
|
||||
response=parse_rule_definition(res),
|
||||
status=200
|
||||
)
|
||||
return True, parse_rule_definition(res)
|
||||
|
||||
@check_precondition
|
||||
def create(self, gid, sid, did, scid, tid):
|
||||
@@ -369,7 +396,7 @@ class RuleView(PGChildNodeView):
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, scid, tid, rid=None):
|
||||
def delete(self, gid, sid, did, scid, tid, rid=None, only_sql=False):
|
||||
"""
|
||||
This function will drop a rule object
|
||||
"""
|
||||
@@ -412,6 +439,8 @@ class RuleView(PGChildNodeView):
|
||||
nspname=rset['nspname'],
|
||||
cascade=cascade
|
||||
)
|
||||
if only_sql:
|
||||
return SQL
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@@ -489,6 +518,44 @@ class RuleView(PGChildNodeView):
|
||||
[self.template_path, 'create.sql']), data=data)
|
||||
return SQL, data['name'] if 'name' in data else old_data['name']
|
||||
|
||||
@check_precondition
|
||||
def get_sql_from_diff(self, gid, sid, did, scid, tid, oid, data=None,
|
||||
diff_schema=None, drop_sql=False):
|
||||
|
||||
if drop_sql:
|
||||
SQL = self.delete(gid=gid, sid=sid, did=did,
|
||||
scid=scid, tid=tid,
|
||||
rid=oid, only_sql=True)
|
||||
else:
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'properties.sql']), rid=oid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("""Could not find the rule in the table.""")
|
||||
)
|
||||
res_data = parse_rule_definition(res)
|
||||
|
||||
SQL = ''
|
||||
|
||||
if data:
|
||||
old_data = res_data
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data
|
||||
)
|
||||
else:
|
||||
if diff_schema:
|
||||
res_data['schema'] = diff_schema
|
||||
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'create.sql']),
|
||||
data=res_data, display_comments=True)
|
||||
|
||||
return SQL
|
||||
|
||||
@check_precondition
|
||||
def dependents(self, gid, sid, did, scid, tid, rid):
|
||||
"""
|
||||
@@ -527,5 +594,47 @@ class RuleView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None,
|
||||
ignore_keys=False):
|
||||
"""
|
||||
This function will fetch the list of all the rules for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:param tid: Table Id
|
||||
:return:
|
||||
"""
|
||||
|
||||
res = {}
|
||||
if oid:
|
||||
status, data = self._fetch_properties(oid)
|
||||
if not status:
|
||||
current_app.logger.error(data)
|
||||
return False
|
||||
|
||||
res = data
|
||||
else:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'nodes.sql']),
|
||||
tid=tid)
|
||||
status, rules = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
current_app.logger.error(rules)
|
||||
return False
|
||||
|
||||
for row in rules['rows']:
|
||||
status, data = self._fetch_properties(row['oid'])
|
||||
if status:
|
||||
if ignore_keys:
|
||||
for key in self.keys_to_ignore:
|
||||
if key in data:
|
||||
del data[key]
|
||||
res[row['name']] = data
|
||||
return res
|
||||
|
||||
|
||||
SchemaDiffRegistry(blueprint.node_type, RuleView, 'table')
|
||||
RuleView.register_node_view(blueprint)
|
||||
|
||||
@@ -0,0 +1,507 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
""" Implements Utility class for Table and Partitioned Table. """
|
||||
|
||||
import copy
|
||||
|
||||
from flask import render_template
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries,\
|
||||
directory_diff
|
||||
from pgadmin.tools.schema_diff.model import SchemaDiffModel
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
|
||||
|
||||
class SchemaDiffTableCompare(SchemaDiffObjectCompare):
|
||||
|
||||
keys_to_ignore = ['oid', 'schema', 'vacuum_table',
|
||||
'vacuum_toast', 'edit_types', 'attnum', 'col_type',
|
||||
'references', 'reltuples', 'rows_cnt']
|
||||
|
||||
keys_to_ignore_ddl_comp = ['oid',
|
||||
'schema',
|
||||
'columns',
|
||||
'edit_types',
|
||||
'primary_key',
|
||||
'exclude_constraint',
|
||||
'check_constraint',
|
||||
'foreign_key',
|
||||
'reltuples',
|
||||
'rows_cnt'
|
||||
]
|
||||
|
||||
keys_to_remove = {
|
||||
'columns': ['relname', 'nspname', 'parent_tbl', 'attrelid', 'adrelid'],
|
||||
'primary_key': ['oid'],
|
||||
'unique_constraint': ['oid'],
|
||||
'check_constraint': ['oid', 'nspname'],
|
||||
'foreign_key': ['oid', 'fknsp', 'confrelid'],
|
||||
'exclude_constraint': ['oid'],
|
||||
'partitions': ['oid'],
|
||||
}
|
||||
|
||||
keys_to_remove_ddl_comp = {
|
||||
'columns': ['relname', 'nspname', 'parent_tbl', 'attrelid', 'adrelid'],
|
||||
'check_constraint': ['nspname'],
|
||||
'foreign_key': ['fknsp', 'confrelid']
|
||||
}
|
||||
|
||||
def compare(self, **kwargs):
|
||||
"""
|
||||
This function is used to compare all the table objects
|
||||
from two different schemas.
|
||||
|
||||
:return: Comparison Dictionary
|
||||
"""
|
||||
src_sid = kwargs.get('source_sid')
|
||||
src_did = kwargs.get('source_did')
|
||||
src_scid = kwargs.get('source_scid')
|
||||
tar_sid = kwargs.get('target_sid')
|
||||
tar_did = kwargs.get('target_did')
|
||||
tar_scid = kwargs.get('target_scid')
|
||||
sub_modules = ['index', 'rule', 'trigger']
|
||||
|
||||
source_tables = self.fetch_tables(sid=src_sid, did=src_did,
|
||||
scid=src_scid)
|
||||
|
||||
target_tables = self.fetch_tables(sid=tar_sid, did=tar_did,
|
||||
scid=tar_scid)
|
||||
|
||||
if self.manager.version >= 120000:
|
||||
sub_modules.append('compound_trigger')
|
||||
|
||||
# If both the dict have no items then return None.
|
||||
if not (source_tables or target_tables) or (
|
||||
len(source_tables) <= 0 and len(target_tables) <= 0):
|
||||
return None
|
||||
|
||||
src_server_type, tar_server_type = self.get_server_type(src_sid,
|
||||
tar_sid)
|
||||
for module in sub_modules:
|
||||
|
||||
module_view = SchemaDiffRegistry.get_node_view(
|
||||
module)
|
||||
|
||||
# Get sub module data for source tables
|
||||
if module_view.blueprint.server_type is None or \
|
||||
src_server_type in module_view.blueprint.server_type:
|
||||
for key, val in source_tables.items():
|
||||
source = module_view.fetch_objects_to_compare(
|
||||
sid=src_sid,
|
||||
did=src_did,
|
||||
scid=src_scid,
|
||||
tid=val['oid'],
|
||||
oid=None,
|
||||
ignore_keys=True
|
||||
)
|
||||
source_tables[key][module] = source
|
||||
|
||||
# Get sub module data for target tables
|
||||
if module_view.blueprint.server_type is None or \
|
||||
tar_server_type in module_view.blueprint.server_type:
|
||||
for key, val in target_tables.items():
|
||||
target = module_view.fetch_objects_to_compare(
|
||||
sid=tar_sid,
|
||||
did=tar_did,
|
||||
scid=tar_scid,
|
||||
tid=val['oid'],
|
||||
oid=None,
|
||||
ignore_keys=True
|
||||
)
|
||||
target_tables[key][module] = target
|
||||
|
||||
return compare_dictionaries(source_tables, target_tables,
|
||||
self.node_type,
|
||||
self.blueprint.COLLECTION_LABEL,
|
||||
self.keys_to_ignore)
|
||||
|
||||
@staticmethod
|
||||
def get_server_type(src_id, tar_id):
|
||||
"""Get server types of source and target servers."""
|
||||
driver = get_driver(PG_DEFAULT_DRIVER)
|
||||
src_manager = driver.connection_manager(src_id)
|
||||
tar_manager = driver.connection_manager(tar_id)
|
||||
|
||||
return src_manager.server_type, tar_manager.server_type
|
||||
|
||||
def ddl_compare(self, **kwargs):
|
||||
"""
|
||||
This function will compare properties of 2 tables and
|
||||
return the source DDL, target DDL and Difference of them.
|
||||
"""
|
||||
|
||||
src_sid = kwargs.get('source_sid')
|
||||
src_did = kwargs.get('source_did')
|
||||
src_scid = kwargs.get('source_scid')
|
||||
src_oid = kwargs.get('source_oid')
|
||||
tar_sid = kwargs.get('target_sid')
|
||||
tar_did = kwargs.get('target_did')
|
||||
tar_scid = kwargs.get('target_scid')
|
||||
tar_oid = kwargs.get('target_oid')
|
||||
comp_status = kwargs.get('comp_status')
|
||||
generate_script = False
|
||||
|
||||
if 'generate_script' in kwargs and kwargs['generate_script']:
|
||||
generate_script = True
|
||||
|
||||
source = ''
|
||||
target = ''
|
||||
diff = ''
|
||||
ignore_sub_modules = ['column', 'constraints']
|
||||
|
||||
src_server_type, tar_server_type = self.get_server_type(src_sid,
|
||||
tar_sid)
|
||||
|
||||
status, target_schema = self.get_schema(tar_sid,
|
||||
tar_did,
|
||||
tar_scid
|
||||
)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=target_schema)
|
||||
|
||||
if comp_status == SchemaDiffModel.COMPARISON_STATUS['source_only']:
|
||||
if not generate_script:
|
||||
source = self.get_sql_from_table_diff(sid=src_sid,
|
||||
did=src_did,
|
||||
scid=src_scid,
|
||||
tid=src_oid,
|
||||
json_resp=False)
|
||||
diff = self.get_sql_from_table_diff(sid=src_sid, did=src_did,
|
||||
scid=src_scid, tid=src_oid,
|
||||
diff_schema=target_schema,
|
||||
json_resp=False)
|
||||
|
||||
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['target_only']:
|
||||
if not generate_script:
|
||||
target = self.get_sql_from_table_diff(sid=tar_sid,
|
||||
did=tar_did,
|
||||
scid=tar_scid,
|
||||
tid=tar_oid,
|
||||
json_resp=False)
|
||||
SQL = render_template(
|
||||
"/".join([self.table_template_path, 'properties.sql']),
|
||||
did=tar_did, scid=tar_scid, tid=tar_oid,
|
||||
datlastsysoid=self.datlastsysoid
|
||||
)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if status:
|
||||
diff = self.get_delete_sql(res)
|
||||
|
||||
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['different']:
|
||||
source = self.fetch_tables(
|
||||
sid=src_sid, did=src_did,
|
||||
scid=src_scid, tid=src_oid,
|
||||
keys_to_remove=self.keys_to_remove_ddl_comp
|
||||
)
|
||||
target = self.fetch_tables(
|
||||
sid=tar_sid, did=tar_did,
|
||||
scid=tar_scid, tid=tar_oid,
|
||||
keys_to_remove=self.keys_to_remove_ddl_comp
|
||||
)
|
||||
|
||||
if self.manager.version < 100000:
|
||||
ignore_sub_modules.append('partition')
|
||||
|
||||
if self.manager.version < 120000:
|
||||
ignore_sub_modules.append('compound_trigger')
|
||||
|
||||
# In case of error return None
|
||||
if not (source or target):
|
||||
return None
|
||||
|
||||
diff_dict = directory_diff(
|
||||
source, target, ignore_keys=self.keys_to_ignore_ddl_comp,
|
||||
difference={}
|
||||
)
|
||||
|
||||
# Column comparison
|
||||
col_diff = self.table_col_ddl_comp(source, target)
|
||||
diff_dict.update(col_diff)
|
||||
|
||||
# Constraint comparison
|
||||
pk_diff = self.constraint_ddl_comp(source, target)
|
||||
diff_dict.update(pk_diff)
|
||||
|
||||
diff_dict['relacl'] = self.parce_acl(source, target)
|
||||
|
||||
if not generate_script:
|
||||
source = self.get_sql_from_table_diff(sid=src_sid,
|
||||
did=src_did,
|
||||
scid=src_scid,
|
||||
tid=src_oid,
|
||||
json_resp=False)
|
||||
target = self.get_sql_from_table_diff(sid=tar_sid,
|
||||
did=tar_did,
|
||||
scid=tar_scid,
|
||||
tid=tar_oid,
|
||||
json_resp=False)
|
||||
diff = self.get_sql_from_table_diff(sid=tar_sid, did=tar_did,
|
||||
scid=tar_scid, tid=tar_oid,
|
||||
diff_data=diff_dict,
|
||||
json_resp=False)
|
||||
|
||||
for module in self.blueprint.submodules:
|
||||
if module.NODE_TYPE not in ignore_sub_modules:
|
||||
module_view = SchemaDiffRegistry.get_node_view(
|
||||
module.NODE_TYPE)
|
||||
|
||||
if module_view.blueprint.server_type and (
|
||||
src_server_type not in
|
||||
module_view.blueprint.server_type and
|
||||
tar_server_type not in
|
||||
module_view.blueprint.server_type
|
||||
):
|
||||
continue
|
||||
|
||||
if module_view.blueprint.server_type and (
|
||||
(src_server_type in
|
||||
module_view.blueprint.server_type and
|
||||
tar_server_type not in
|
||||
module_view.blueprint.server_type) or (
|
||||
src_server_type not in
|
||||
module_view.blueprint.server_type and
|
||||
tar_server_type in
|
||||
module_view.blueprint.server_type)
|
||||
):
|
||||
continue
|
||||
|
||||
result = module_view.compare(
|
||||
source_sid=src_sid, source_did=src_did,
|
||||
source_scid=src_scid, source_tid=src_oid,
|
||||
target_sid=tar_sid, target_did=tar_did,
|
||||
target_scid=tar_scid, target_tid=tar_oid
|
||||
)
|
||||
if result and module.NODE_TYPE != 'partition':
|
||||
child_diff = ''
|
||||
for res in result:
|
||||
if res['status'] == \
|
||||
SchemaDiffModel.COMPARISON_STATUS[
|
||||
'different']:
|
||||
source_oid = res['source_oid']
|
||||
target_oid = res['target_oid']
|
||||
else:
|
||||
source_oid = res['oid']
|
||||
target_oid = res['oid']
|
||||
|
||||
if res['status'] != \
|
||||
SchemaDiffModel.COMPARISON_STATUS[
|
||||
'identical']:
|
||||
child_diff = module_view.ddl_compare(
|
||||
source_sid=src_sid, source_did=src_did,
|
||||
source_scid=src_scid,
|
||||
source_oid=source_oid,
|
||||
source_tid=src_oid, target_sid=tar_sid,
|
||||
target_did=tar_did, target_scid=tar_scid,
|
||||
target_tid=tar_oid, target_oid=target_oid,
|
||||
comp_status=res['status']
|
||||
|
||||
)
|
||||
if child_diff:
|
||||
diff += child_diff
|
||||
elif result:
|
||||
# For partition module
|
||||
identical = False
|
||||
source_only = False
|
||||
target_only = False
|
||||
different = False
|
||||
for res in result:
|
||||
if res['status'] == \
|
||||
SchemaDiffModel.COMPARISON_STATUS[
|
||||
'identical']:
|
||||
identical = True
|
||||
elif res['status'] == \
|
||||
SchemaDiffModel.COMPARISON_STATUS[
|
||||
'source_only']:
|
||||
source_only = True
|
||||
elif res['status'] == \
|
||||
SchemaDiffModel.COMPARISON_STATUS[
|
||||
'target_only']:
|
||||
target_only = True
|
||||
else:
|
||||
different = True
|
||||
|
||||
if identical:
|
||||
pass
|
||||
elif (source_only or target_only) and not different:
|
||||
for res in result:
|
||||
source_oid = res['oid']
|
||||
target_oid = res['oid']
|
||||
|
||||
child_diff = module_view.ddl_compare(
|
||||
source_sid=src_sid, source_did=src_did,
|
||||
source_scid=src_scid,
|
||||
source_oid=source_oid,
|
||||
source_tid=src_oid, target_sid=tar_sid,
|
||||
target_did=tar_did, target_scid=tar_scid,
|
||||
target_tid=tar_oid, target_oid=target_oid,
|
||||
comp_status=res['status']
|
||||
|
||||
)
|
||||
if ddl_compare:
|
||||
diff += child_diff
|
||||
else:
|
||||
diff = self.get_sql_from_table_diff(
|
||||
sid=src_sid,
|
||||
did=src_did,
|
||||
scid=src_scid,
|
||||
tid=src_oid,
|
||||
diff_schema=target_schema,
|
||||
json_resp=False,
|
||||
schema_diff_table=True
|
||||
)
|
||||
else:
|
||||
source = self.get_sql_from_table_diff(sid=src_sid, did=src_did,
|
||||
scid=src_scid, tid=src_oid,
|
||||
json_resp=False)
|
||||
target = self.get_sql_from_table_diff(sid=tar_sid, did=tar_did,
|
||||
scid=tar_scid, tid=tar_oid,
|
||||
json_resp=False)
|
||||
|
||||
return {'source_ddl': source,
|
||||
'target_ddl': target,
|
||||
'diff_ddl': diff
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def table_col_ddl_comp(source, target):
|
||||
"""
|
||||
Table Column comparison
|
||||
:param source: Source columns
|
||||
:param target: Target columns
|
||||
:return: Difference of the columns
|
||||
"""
|
||||
source_cols = source['columns']
|
||||
target_cols = copy.deepcopy(target['columns'])
|
||||
added = []
|
||||
updated = []
|
||||
different = {'columns': {}}
|
||||
|
||||
for source in source_cols:
|
||||
if 'name' in source:
|
||||
if type(target_cols) is list and len(
|
||||
target_cols) > 0:
|
||||
tmp = None
|
||||
for item in target_cols:
|
||||
if item['name'] == source['name']:
|
||||
tmp = copy.deepcopy(item)
|
||||
if tmp and source != tmp:
|
||||
tmp_updated = copy.deepcopy(source)
|
||||
# Preserve the column number
|
||||
tmp_updated['attnum'] = tmp['attnum']
|
||||
if item['typname'] not in tmp_updated['edit_types']:
|
||||
tmp_updated['col_type_conversion'] = False
|
||||
updated.append(tmp_updated)
|
||||
target_cols.remove(tmp)
|
||||
elif tmp and source == tmp:
|
||||
target_cols.remove(tmp)
|
||||
elif tmp is None:
|
||||
added.append(source)
|
||||
else:
|
||||
added.append(source)
|
||||
different['columns']['added'] = added
|
||||
different['columns']['changed'] = updated
|
||||
|
||||
if target_cols and len(target_cols) > 0:
|
||||
different['columns']['deleted'] = target_cols
|
||||
|
||||
return different
|
||||
|
||||
@staticmethod
|
||||
def constraint_ddl_comp(source_table, target_table):
|
||||
"""
|
||||
Table Constraint DDL comparison
|
||||
:param source: Source Table
|
||||
:param target: Target Table
|
||||
:return: Difference of constraints
|
||||
"""
|
||||
different = {}
|
||||
non_editable_keys = {}
|
||||
|
||||
non_editable_keys = {'primary_key': ['col_count',
|
||||
'condeferrable',
|
||||
'condeffered',
|
||||
'columns'],
|
||||
'check_constraint': ['consrc'],
|
||||
'exclude_constraint': ['amname',
|
||||
'indconstraint',
|
||||
'columns']
|
||||
}
|
||||
|
||||
for constraint in ['primary_key', 'check_constraint',
|
||||
'exclude_constraint']:
|
||||
source_cols = source_table[constraint] if \
|
||||
constraint in source_table else []
|
||||
target_cols = copy.deepcopy(target_table[constraint]) if\
|
||||
constraint in target_table else []
|
||||
added = []
|
||||
updated = []
|
||||
deleted = []
|
||||
|
||||
different[constraint] = {}
|
||||
for source in source_cols:
|
||||
if 'name' in source:
|
||||
if type(target_cols) is list and len(
|
||||
target_cols) > 0:
|
||||
tmp_src = copy.deepcopy(source)
|
||||
tmp_src.pop('oid')
|
||||
tmp_tar = None
|
||||
tmp = None
|
||||
for item in target_cols:
|
||||
if item['name'] == source['name']:
|
||||
tmp_tar = copy.deepcopy(item)
|
||||
tmp = copy.deepcopy(item)
|
||||
tmp_tar.pop('oid')
|
||||
if tmp_tar and tmp_src != tmp_tar:
|
||||
tmp_updated = copy.deepcopy(source)
|
||||
for key in non_editable_keys[constraint]:
|
||||
if key in tmp_updated and \
|
||||
tmp_updated[key] != tmp_tar[key]:
|
||||
added.append(source)
|
||||
deleted.append(tmp_updated)
|
||||
tmp_updated = None
|
||||
break
|
||||
if tmp_updated:
|
||||
tmp_updated['oid'] = tmp_tar['oid']
|
||||
updated.append(tmp_updated)
|
||||
target_cols.remove(tmp)
|
||||
elif tmp_tar and tmp_src == tmp_tar:
|
||||
target_cols.remove(tmp)
|
||||
elif tmp_tar is None:
|
||||
added.append(source)
|
||||
else:
|
||||
added.append(source)
|
||||
different[constraint]['added'] = added
|
||||
different[constraint]['changed'] = updated
|
||||
different[constraint]['deleted'] = deleted
|
||||
|
||||
if target_cols and len(target_cols) > 0:
|
||||
different[constraint]['deleted'] = target_cols
|
||||
|
||||
return different
|
||||
|
||||
def remove_keys_for_comparision(self, data, keys=None):
|
||||
"""
|
||||
This function is used to remove specific keys from data
|
||||
"""
|
||||
|
||||
keys_to_remove = keys if keys else self.keys_to_remove
|
||||
|
||||
for p_key, p_val in keys_to_remove.items():
|
||||
if p_key in data and data[p_key] is not None \
|
||||
and len(data[p_key]) > 0:
|
||||
for item in data[p_key]:
|
||||
# Remove keys that should not be the part of comparision.
|
||||
for key in p_val:
|
||||
if key in item:
|
||||
item.pop(key)
|
||||
@@ -21,7 +21,7 @@ CACHE {{data.seqcache|int}} {% endif %}
|
||||
{% endif %}{% endif %};
|
||||
|
||||
{### Add comments ###}
|
||||
{% if data and data.description %}
|
||||
{% if data and data.description and data.description != None %}
|
||||
COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, data.name)}}
|
||||
IS {{data.description|qtLiteral}};
|
||||
|
||||
|
||||
@@ -10,8 +10,14 @@ ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
|
||||
{% endif %}
|
||||
{### Alter column type and collation ###}
|
||||
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen is defined and data.attlen != o_data.attlen) or (data.attprecision is defined and data.attprecision != o_data.attprecision) or (data.collspcname and data.collspcname != o_data.collspcname)%}
|
||||
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
|
||||
ALTER COLUMN {% if data.name %}{{conn|qtTypeIdent(data.name)}}{% else %}{{conn|qtTypeIdent(o_data.name)}}{% endif %} TYPE {{ GET_TYPE.UPDATE_TYPE_SQL(conn, data, o_data) }}{% if data.collspcname and data.collspcname != o_data.collspcname %}
|
||||
{% if data.col_type_conversion is defined and data.col_type_conversion == False %}
|
||||
-- WARNING:
|
||||
-- The SQL statement below would normally be used to alter the datatype for the {{o_data.name}} column, however,
|
||||
-- the current datatype cannot be cast to the target datatype so this conversion cannot be made automatically.
|
||||
|
||||
{% endif %}
|
||||
{% if data.col_type_conversion is defined and data.col_type_conversion == False %} -- {% endif %}ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
|
||||
{% if data.col_type_conversion is defined and data.col_type_conversion == False %} -- {% endif %} ALTER COLUMN {% if data.name %}{{conn|qtTypeIdent(data.name)}}{% else %}{{conn|qtTypeIdent(o_data.name)}}{% endif %} TYPE {{ GET_TYPE.UPDATE_TYPE_SQL(conn, data, o_data) }}{% if data.collspcname and data.collspcname != o_data.collspcname %}
|
||||
COLLATE {{data.collspcname}}{% elif o_data.collspcname %} COLLATE {{o_data.collspcname}}{% endif %};
|
||||
{% endif %}
|
||||
{### Alter column default value ###}
|
||||
@@ -95,7 +101,7 @@ COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, o_data.name)}}
|
||||
|
||||
{% endif %}
|
||||
{### Update column variables ###}
|
||||
{% if 'attoptions' in data and data.attoptions|length > 0 %}
|
||||
{% if 'attoptions' in data and data.attoptions and data.attoptions|length > 0 %}
|
||||
{% set variables = data.attoptions %}
|
||||
{% if 'deleted' in variables and variables.deleted|length > 0 %}
|
||||
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
|
||||
|
||||
@@ -21,7 +21,7 @@ CACHE {{data.seqcache|int}} {% endif %}
|
||||
{% endif %}{% endif %}{% if data.colconstype == 'g' and data.genexpr and data.genexpr != '' %} GENERATED ALWAYS AS ({{data.genexpr}}) STORED{% endif %};
|
||||
|
||||
{### Add comments ###}
|
||||
{% if data and data.description %}
|
||||
{% if data and data.description and data.description != None %}
|
||||
COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, data.name)}}
|
||||
IS {{data.description|qtLiteral}};
|
||||
|
||||
|
||||
@@ -6,6 +6,12 @@
|
||||
{% if data.name and data.name != o_data.name %}
|
||||
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
|
||||
RENAME {{conn|qtIdent(o_data.name)}} TO {{conn|qtIdent(data.name)}};
|
||||
{% endif %}
|
||||
{% if data.col_type_conversion is defined and data.col_type_conversion == False %}
|
||||
-- WARNING:
|
||||
-- The SQL statement below would normally be used to alter the datatype for the {{o_data.name}} column, however,
|
||||
-- the current datatype cannot be cast to the target datatype so this conversion cannot be made automatically.
|
||||
|
||||
{% endif %}
|
||||
{### Alter column type and collation ###}
|
||||
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen is defined and data.attlen != o_data.attlen) or (data.attprecision is defined and data.attprecision != o_data.attprecision) or (data.collspcname and data.collspcname != o_data.collspcname)%}
|
||||
@@ -45,7 +51,7 @@ PLAIN{% elif data.attstorage == 'm'%}MAIN{% elif data.attstorage == 'e'%}
|
||||
EXTERNAL{% elif data.attstorage == 'x'%}EXTENDED{% endif %};
|
||||
|
||||
{% endif %}
|
||||
{% if data.description is defined %}
|
||||
{% if data.description is defined and data.description != None %}
|
||||
{% if data.name %}
|
||||
COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, data.name)}}
|
||||
{% else %}
|
||||
@@ -55,7 +61,7 @@ COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, o_data.name)}}
|
||||
|
||||
{% endif %}
|
||||
{### Update column variables ###}
|
||||
{% if 'attoptions' in data and data.attoptions|length > 0 %}
|
||||
{% if 'attoptions' in data and data.attoptions != None and data.attoptions|length > 0 %}
|
||||
{% set variables = data.attoptions %}
|
||||
{% if 'deleted' in variables and variables.deleted|length > 0 %}
|
||||
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
|
||||
|
||||
@@ -12,7 +12,7 @@ ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
|
||||
|
||||
{% endif %}
|
||||
{### Add comments ###}
|
||||
{% if data and data.description %}
|
||||
{% if data and data.description and data.description != None %}
|
||||
COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, data.name)}}
|
||||
IS {{data.description|qtLiteral}};
|
||||
|
||||
|
||||
@@ -9,9 +9,15 @@ ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
|
||||
|
||||
{% endif %}
|
||||
{### Alter column type and collation ###}
|
||||
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen is defined and data.attlen != o_data.attlen) or (data.attprecision is defined and data.attprecision != o_data.attprecision) or (data.collspcname and data.collspcname != o_data.collspcname) %}
|
||||
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
|
||||
ALTER COLUMN {% if data.name %}{{conn|qtTypeIdent(data.name)}}{% else %}{{conn|qtTypeIdent(o_data.name)}}{% endif %} TYPE {{ GET_TYPE.UPDATE_TYPE_SQL(conn, data, o_data) }}{% if data.collspcname and data.collspcname != o_data.collspcname %}
|
||||
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen is defined and data.attlen != o_data.attlen) or (data.attprecision is defined and data.attprecision != o_data.attprecision) or (data.collspcname and data.collspcname != o_data.collspcname)%}
|
||||
{% if data.col_type_conversion is defined and data.col_type_conversion == False %}
|
||||
-- WARNING:
|
||||
-- The SQL statement below would normally be used to alter the datatype for the XXX column, however,
|
||||
-- the current datatype cannot be cast to the target datatype so this conversion cannot be made automatically.
|
||||
|
||||
{% endif %}
|
||||
{% if data.col_type_conversion is defined and data.col_type_conversion == False %} -- {% endif %}ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
|
||||
{% if data.col_type_conversion is defined and data.col_type_conversion == False %} -- {% endif %} ALTER COLUMN {% if data.name %}{{conn|qtTypeIdent(data.name)}}{% else %}{{conn|qtTypeIdent(o_data.name)}}{% endif %} TYPE {{ GET_TYPE.UPDATE_TYPE_SQL(conn, data, o_data) }}{% if data.collspcname and data.collspcname != o_data.collspcname %}
|
||||
COLLATE {{data.collspcname}}{% elif o_data.collspcname %} COLLATE {{o_data.collspcname}}{% endif %};
|
||||
{% endif %}
|
||||
{### Alter column default value ###}
|
||||
@@ -46,7 +52,7 @@ PLAIN{% elif data.attstorage == 'm'%}MAIN{% elif data.attstorage == 'e'%}
|
||||
EXTERNAL{% elif data.attstorage == 'x'%}EXTENDED{% endif %};
|
||||
|
||||
{% endif %}
|
||||
{% if data.description is defined %}
|
||||
{% if data.description is defined and data.description != None %}
|
||||
{% if data.name %}
|
||||
COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, data.name)}}
|
||||
{% else %}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
{#====== MAIN TABLE TEMPLATE STARTS HERE ======#}
|
||||
{#===========================================#}
|
||||
{### CREATE TABLE STATEMENT FOR partitions ###}
|
||||
|
||||
CREATE {% if data.relpersistence %}UNLOGGED {% endif %}TABLE {{conn|qtIdent(data.schema, data.name)}}{% if data.relispartition is defined and data.relispartition %} PARTITION OF {{conn|qtIdent(data.parent_schema, data.partitioned_table_name)}}{% endif %}
|
||||
|
||||
{# Macro to render for constraints #}
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
|
||||
|
||||
INSERT INTO {{conn|qtIdent(data.schema, data.name)}}(
|
||||
{% if data.columns and data.columns|length > 0 %}
|
||||
{% for c in data.columns %}{{c.name}}{% if not loop.last %},{% endif %}{% endfor %}{% endif %})
|
||||
SELECT {% if data.columns and data.columns|length > 0 %}{% for c in data.columns %}{{c.name}}{% if not loop.last %},{% endif %}{% endfor %}{% endif %}
|
||||
FROM {{conn|qtIdent(data.schema, data.orig_name)}};
|
||||
|
||||
DROP TABLE {{conn|qtIdent(data.schema, data.orig_name)}};
|
||||
|
||||
{{partition_sql}}
|
||||
|
||||
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}}
|
||||
RENAME TO {{conn|qtIdent(data.orig_name)}};
|
||||
@@ -13,7 +13,7 @@ import simplejson as json
|
||||
from functools import wraps
|
||||
|
||||
import pgadmin.browser.server_groups.servers.databases as database
|
||||
from flask import render_template, request, jsonify
|
||||
from flask import render_template, request, jsonify, current_app
|
||||
from flask_babelex import gettext
|
||||
from pgadmin.browser.collection import CollectionNodeModule
|
||||
from pgadmin.browser.utils import PGChildNodeView
|
||||
@@ -25,7 +25,11 @@ from pgadmin.browser.server_groups.servers.databases.schemas.utils \
|
||||
import trigger_definition
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.utils.compile_template_name import compile_template_path
|
||||
from pgadmin.utils import IS_PY2
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
unicode = str
|
||||
@@ -151,7 +155,7 @@ class TriggerModule(CollectionNodeModule):
|
||||
blueprint = TriggerModule(__name__)
|
||||
|
||||
|
||||
class TriggerView(PGChildNodeView):
|
||||
class TriggerView(PGChildNodeView, SchemaDiffObjectCompare):
|
||||
"""
|
||||
This class is responsible for generating routes for Trigger node
|
||||
|
||||
@@ -244,6 +248,10 @@ class TriggerView(PGChildNodeView):
|
||||
'enable': [{'put': 'enable_disable_trigger'}]
|
||||
})
|
||||
|
||||
# Schema Diff: Keys to ignore while comparing
|
||||
keys_to_ignore = ['oid', 'xmin', 'nspname', 'tfunction',
|
||||
'tgrelid', 'tgfoid', 'prosrc']
|
||||
|
||||
def check_precondition(f):
|
||||
"""
|
||||
This function will behave as a decorator which will checks
|
||||
@@ -267,6 +275,11 @@ class TriggerView(PGChildNodeView):
|
||||
kwargs['did'] in self.manager.db_info else 0
|
||||
|
||||
# we will set template path for sql scripts
|
||||
self.table_template_path = compile_template_path(
|
||||
'tables/sql',
|
||||
self.manager.server_type,
|
||||
self.manager.version
|
||||
)
|
||||
self.template_path = 'triggers/sql/{0}/#{1}#'.format(
|
||||
self.manager.server_type, self.manager.version)
|
||||
# Store server type
|
||||
@@ -450,7 +463,22 @@ class TriggerView(PGChildNodeView):
|
||||
Returns:
|
||||
JSON of selected trigger node
|
||||
"""
|
||||
status, data = self._fetch_properties(tid, trid)
|
||||
if not status:
|
||||
return data
|
||||
|
||||
return ajax_response(
|
||||
response=data,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, tid, trid):
|
||||
"""
|
||||
This function is used to fetch the properties of the specified object
|
||||
:param tid:
|
||||
:param trid:
|
||||
:return:
|
||||
"""
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
tid=tid, trid=trid,
|
||||
@@ -459,10 +487,10 @@ class TriggerView(PGChildNodeView):
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
return False, gone(
|
||||
gettext("""Could not find the trigger in the table."""))
|
||||
|
||||
# Making copy of output for future use
|
||||
@@ -472,10 +500,7 @@ class TriggerView(PGChildNodeView):
|
||||
|
||||
data = trigger_definition(data)
|
||||
|
||||
return ajax_response(
|
||||
response=data,
|
||||
status=200
|
||||
)
|
||||
return True, data
|
||||
|
||||
@check_precondition
|
||||
def create(self, gid, sid, did, scid, tid):
|
||||
@@ -552,7 +577,7 @@ class TriggerView(PGChildNodeView):
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, scid, tid, trid=None):
|
||||
def delete(self, gid, sid, did, scid, tid, trid=None, only_sql=False):
|
||||
"""
|
||||
This function will updates existing the trigger object
|
||||
|
||||
@@ -610,6 +635,8 @@ class TriggerView(PGChildNodeView):
|
||||
conn=self.conn,
|
||||
cascade=cascade
|
||||
)
|
||||
if only_sql:
|
||||
return SQL
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@@ -761,6 +788,36 @@ class TriggerView(PGChildNodeView):
|
||||
|
||||
return ajax_response(response=SQL)
|
||||
|
||||
@check_precondition
|
||||
def get_sql_from_diff(self, gid, sid, did, scid, tid, oid,
|
||||
data=None, diff_schema=None, drop_sql=False):
|
||||
if data:
|
||||
SQL, name = trigger_utils.get_sql(
|
||||
self.conn, data, tid, oid,
|
||||
self.datlastsysoid,
|
||||
self.blueprint.show_system_objects)
|
||||
|
||||
if not isinstance(SQL, (str, unicode)):
|
||||
return SQL
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
else:
|
||||
if drop_sql:
|
||||
SQL = self.delete(gid=gid, sid=sid, did=did,
|
||||
scid=scid, tid=tid, trid=oid,
|
||||
only_sql=True)
|
||||
else:
|
||||
schema = self.schema
|
||||
if diff_schema:
|
||||
schema = diff_schema
|
||||
SQL = trigger_utils.get_reverse_engineered_sql(
|
||||
self.conn, schema,
|
||||
self.table, tid, oid,
|
||||
self.datlastsysoid,
|
||||
self.blueprint.show_system_objects,
|
||||
template_path=None, with_header=False)
|
||||
|
||||
return SQL
|
||||
|
||||
@check_precondition
|
||||
def enable_disable_trigger(self, gid, sid, did, scid, tid, trid):
|
||||
"""
|
||||
@@ -875,5 +932,46 @@ class TriggerView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None,
|
||||
ignore_keys=False):
|
||||
"""
|
||||
This function will fetch the list of all the triggers for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:param tid: Table Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
|
||||
if oid:
|
||||
status, data = self._fetch_properties(tid, oid)
|
||||
if not status:
|
||||
current_app.logger.error(data)
|
||||
return False
|
||||
res = data
|
||||
else:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'nodes.sql']), tid=tid)
|
||||
status, triggers = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
current_app.logger.error(triggers)
|
||||
return False
|
||||
|
||||
for row in triggers['rows']:
|
||||
status, data = self._fetch_properties(tid, row['oid'])
|
||||
if status:
|
||||
if ignore_keys:
|
||||
for key in self.keys_to_ignore:
|
||||
if key in data:
|
||||
del data[key]
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
SchemaDiffRegistry(blueprint.node_type, TriggerView, 'table')
|
||||
TriggerView.register_node_view(blueprint)
|
||||
|
||||
@@ -30,7 +30,7 @@ def get_template_path(f):
|
||||
def wrap(*args, **kwargs):
|
||||
# Here args[0] will hold the connection object
|
||||
conn_obj = args[0]
|
||||
if 'template_path' not in kwargs:
|
||||
if 'template_path' not in kwargs or kwargs['template_path'] is None:
|
||||
kwargs['template_path'] = 'triggers/sql/{0}/#{1}#'.format(
|
||||
conn_obj.manager.server_type, conn_obj.manager.version)
|
||||
|
||||
@@ -201,7 +201,7 @@ def get_sql(conn, data, tid, trid, datlastsysoid,
|
||||
@get_template_path
|
||||
def get_reverse_engineered_sql(conn, schema, table, tid, trid,
|
||||
datlastsysoid, show_system_objects,
|
||||
template_path=None):
|
||||
template_path=None, with_header=True):
|
||||
"""
|
||||
This function will return reverse engineered sql for specified trigger.
|
||||
|
||||
@@ -213,6 +213,8 @@ def get_reverse_engineered_sql(conn, schema, table, tid, trid,
|
||||
:param datlastsysoid:
|
||||
:param show_system_objects: Show System Object value True or False
|
||||
:param template_path: Optional template path
|
||||
:param with_header: Optional parameter to decide whether the SQL will be
|
||||
returned with header or not
|
||||
:return:
|
||||
"""
|
||||
SQL = render_template("/".join([template_path, 'properties.sql']),
|
||||
@@ -240,12 +242,15 @@ def get_reverse_engineered_sql(conn, schema, table, tid, trid,
|
||||
SQL, name = get_sql(conn, data, tid, None, datlastsysoid,
|
||||
show_system_objects)
|
||||
|
||||
sql_header = u"-- Trigger: {0}\n\n-- ".format(data['name'])
|
||||
if with_header:
|
||||
sql_header = u"-- Trigger: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
sql_header += render_template("/".join([template_path, 'delete.sql']),
|
||||
data=data, conn=conn)
|
||||
sql_header += render_template("/".join([template_path, 'delete.sql']),
|
||||
data=data, conn=conn)
|
||||
|
||||
SQL = sql_header + '\n\n' + SQL.strip('\n')
|
||||
SQL = sql_header + '\n\n' + SQL.strip('\n')
|
||||
else:
|
||||
SQL = SQL.strip('\n')
|
||||
|
||||
# If trigger is disabled then add sql code for the same
|
||||
if data['is_enable_trigger'] != 'O':
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
""" Implements Utility class for Table and Partitioned Table. """
|
||||
|
||||
import re
|
||||
import copy
|
||||
from functools import wraps
|
||||
import simplejson as json
|
||||
from flask import render_template, jsonify, request
|
||||
@@ -179,8 +180,10 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
data[row['deftype']] = [priv]
|
||||
|
||||
# We will add Auto vacuum defaults with out result for grid
|
||||
data['vacuum_table'] = self.parse_vacuum_data(self.conn, data, 'table')
|
||||
data['vacuum_toast'] = self.parse_vacuum_data(self.conn, data, 'toast')
|
||||
data['vacuum_table'] = copy.deepcopy(
|
||||
self.parse_vacuum_data(self.conn, data, 'table'))
|
||||
data['vacuum_toast'] = copy.deepcopy(
|
||||
self.parse_vacuum_data(self.conn, data, 'toast'))
|
||||
|
||||
# Fetch columns for the table logic
|
||||
#
|
||||
@@ -405,7 +408,8 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
status=200
|
||||
)
|
||||
|
||||
def get_reverse_engineered_sql(self, did, scid, tid, main_sql, data):
|
||||
def get_reverse_engineered_sql(self, did, scid, tid, main_sql, data,
|
||||
json_resp=True, diff_partition_sql=False):
|
||||
"""
|
||||
This function will creates reverse engineered sql for
|
||||
the table object
|
||||
@@ -416,6 +420,9 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
tid: Table ID
|
||||
main_sql: List contains all the reversed engineered sql
|
||||
data: Table's Data
|
||||
json_resp: Json response or plain SQL
|
||||
diff_partition_sql: In Schema diff, the Partition sql should be
|
||||
return separately to perform further task
|
||||
"""
|
||||
"""
|
||||
#####################################
|
||||
@@ -427,6 +434,7 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
schema = data['schema']
|
||||
table = data['name']
|
||||
is_partitioned = 'is_partitioned' in data and data['is_partitioned']
|
||||
sql_header = ''
|
||||
|
||||
data = self._formatter(did, scid, tid, data)
|
||||
|
||||
@@ -444,18 +452,20 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
c['cltype'], c['hasSqrBracket'] = \
|
||||
column_utils.type_formatter(c['cltype'])
|
||||
|
||||
sql_header = u"-- Table: {0}\n\n-- ".format(
|
||||
self.qtIdent(self.conn, data['schema'], data['name']))
|
||||
if json_resp:
|
||||
sql_header = u"-- Table: {0}\n\n-- ".format(
|
||||
self.qtIdent(self.conn, data['schema'], data['name']))
|
||||
|
||||
sql_header += render_template("/".join([self.table_template_path,
|
||||
'delete.sql']),
|
||||
data=data, conn=self.conn)
|
||||
sql_header += render_template("/".join([self.table_template_path,
|
||||
'delete.sql']),
|
||||
data=data, conn=self.conn)
|
||||
|
||||
sql_header = sql_header.strip('\n')
|
||||
sql_header += '\n'
|
||||
sql_header = sql_header.strip('\n')
|
||||
sql_header += '\n'
|
||||
|
||||
# Add into main sql
|
||||
main_sql.append(sql_header)
|
||||
# Add into main sql
|
||||
main_sql.append(sql_header)
|
||||
partition_main_sql = ""
|
||||
|
||||
# Parse privilege data
|
||||
if 'relacl' in data:
|
||||
@@ -493,12 +503,14 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
for row in rset['rows']:
|
||||
index_sql = index_utils.get_reverse_engineered_sql(
|
||||
self.conn, schema, table, did, tid, row['oid'],
|
||||
self.datlastsysoid)
|
||||
self.datlastsysoid,
|
||||
template_path=None, with_header=json_resp)
|
||||
index_sql = u"\n" + index_sql
|
||||
|
||||
# Add into main sql
|
||||
index_sql = re.sub('\n{2,}', '\n\n', index_sql)
|
||||
main_sql.append(index_sql)
|
||||
|
||||
main_sql.append(index_sql.strip('\n'))
|
||||
|
||||
"""
|
||||
########################################
|
||||
@@ -514,7 +526,8 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
for row in rset['rows']:
|
||||
trigger_sql = trigger_utils.get_reverse_engineered_sql(
|
||||
self.conn, schema, table, tid, row['oid'],
|
||||
self.datlastsysoid, self.blueprint.show_system_objects)
|
||||
self.datlastsysoid, self.blueprint.show_system_objects,
|
||||
template_path=None, with_header=json_resp)
|
||||
trigger_sql = u"\n" + trigger_sql
|
||||
|
||||
# Add into main sql
|
||||
@@ -571,10 +584,13 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
display_comments = True
|
||||
if not json_resp:
|
||||
display_comments = False
|
||||
res_data = parse_rule_definition(res)
|
||||
rules_sql += render_template("/".join(
|
||||
[self.rules_template_path, 'create.sql']),
|
||||
data=res_data, display_comments=True)
|
||||
data=res_data, display_comments=display_comments)
|
||||
|
||||
# Add into main sql
|
||||
rules_sql = re.sub('\n{2,}', '\n\n', rules_sql)
|
||||
@@ -594,13 +610,17 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
if len(rset['rows']):
|
||||
sql_header = u"\n-- Partitions SQL"
|
||||
if json_resp:
|
||||
sql_header = u"\n-- Partitions SQL"
|
||||
partition_sql = ''
|
||||
for row in rset['rows']:
|
||||
part_data = dict()
|
||||
part_data['partitioned_table_name'] = table
|
||||
part_data['parent_schema'] = schema
|
||||
part_data['schema'] = row['schema_name']
|
||||
part_data['partitioned_table_name'] = data['name']
|
||||
part_data['parent_schema'] = data['schema']
|
||||
if not json_resp:
|
||||
part_data['schema'] = data['schema']
|
||||
else:
|
||||
part_data['schema'] = row['schema_name']
|
||||
part_data['relispartition'] = True
|
||||
part_data['name'] = row['name']
|
||||
part_data['partition_value'] = row['partition_value']
|
||||
@@ -612,13 +632,18 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
data=part_data, conn=self.conn)
|
||||
|
||||
# Add into main sql
|
||||
partition_sql = re.sub('\n{2,}', '\n\n', partition_sql)
|
||||
main_sql.append(
|
||||
sql_header + '\n\n' + partition_sql.strip('\n')
|
||||
)
|
||||
partition_sql = re.sub('\n{2,}', '\n\n', partition_sql
|
||||
).strip('\n')
|
||||
partition_main_sql = partition_sql.strip('\n')
|
||||
if not diff_partition_sql:
|
||||
main_sql.append(
|
||||
sql_header + '\n\n' + partition_main_sql
|
||||
)
|
||||
|
||||
sql = '\n'.join(main_sql)
|
||||
|
||||
if not json_resp:
|
||||
return sql, partition_main_sql
|
||||
return ajax_response(response=sql.strip('\n'))
|
||||
|
||||
def reset_statistics(self, scid, tid):
|
||||
@@ -907,7 +932,8 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
conn=self.conn).strip('\n') + '\n\n'
|
||||
|
||||
# If partition(s) is/are added
|
||||
if 'added' in partitions:
|
||||
if 'added' in partitions and 'partition_scheme' in old_data\
|
||||
and old_data['partition_scheme'] != '':
|
||||
temp_data = dict()
|
||||
temp_data['schema'] = data['schema']
|
||||
temp_data['name'] = data['name']
|
||||
@@ -1133,7 +1159,8 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def properties(self, gid, sid, did, scid, tid, res):
|
||||
def properties(self, gid, sid, did, scid, tid, res,
|
||||
return_ajax_response=True):
|
||||
"""
|
||||
This function will show the properties of the selected table node.
|
||||
|
||||
@@ -1145,6 +1172,7 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
scid: Schema ID
|
||||
tid: Table ID
|
||||
res: Table/Partition table properties
|
||||
return_ajax_response: If True then return the ajax response
|
||||
|
||||
Returns:
|
||||
JSON of selected table node
|
||||
@@ -1242,6 +1270,9 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
|
||||
data['partitions'] = partitions
|
||||
|
||||
if not return_ajax_response:
|
||||
return data
|
||||
|
||||
return ajax_response(
|
||||
response=data,
|
||||
status=200
|
||||
@@ -1359,6 +1390,22 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
}
|
||||
)
|
||||
|
||||
def get_delete_sql(self, res):
|
||||
# Below will decide if it's simple drop or drop with cascade call
|
||||
if self.cmd == 'delete':
|
||||
# This is a cascade operation
|
||||
cascade = True
|
||||
else:
|
||||
cascade = False
|
||||
|
||||
data = res['rows'][0]
|
||||
|
||||
return render_template(
|
||||
"/".join([self.table_template_path, 'delete.sql']),
|
||||
data=data, cascade=cascade,
|
||||
conn=self.conn
|
||||
)
|
||||
|
||||
def delete(self, gid, sid, did, scid, tid, res):
|
||||
"""
|
||||
This function will delete the table object
|
||||
@@ -1371,20 +1418,8 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
|
||||
tid: Table ID
|
||||
"""
|
||||
|
||||
# Below will decide if it's simple drop or drop with cascade call
|
||||
if self.cmd == 'delete':
|
||||
# This is a cascade operation
|
||||
cascade = True
|
||||
else:
|
||||
cascade = False
|
||||
SQL = self.get_delete_sql(res)
|
||||
|
||||
data = res['rows'][0]
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.table_template_path, 'delete.sql']),
|
||||
data=data, cascade=cascade,
|
||||
conn=self.conn
|
||||
)
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return status, res
|
||||
|
||||
@@ -26,6 +26,8 @@ from pgadmin.utils import IS_PY2
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
@@ -94,7 +96,7 @@ class TypeModule(SchemaChildModule):
|
||||
blueprint = TypeModule(__name__)
|
||||
|
||||
|
||||
class TypeView(PGChildNodeView, DataTypeReader):
|
||||
class TypeView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
|
||||
"""
|
||||
This class is responsible for generating routes for Type node
|
||||
|
||||
@@ -173,6 +175,10 @@ class TypeView(PGChildNodeView, DataTypeReader):
|
||||
* get_external_functions_list(gid, sid, did, scid, tid):
|
||||
- This function will return list of external functions
|
||||
in ajax response
|
||||
|
||||
* compare(**kwargs):
|
||||
- This function will compare the type nodes from two
|
||||
different schemas.
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
@@ -559,6 +565,22 @@ class TypeView(PGChildNodeView, DataTypeReader):
|
||||
Returns:
|
||||
JSON of selected type node
|
||||
"""
|
||||
status, res = self._fetch_properties(scid, tid)
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, scid, tid):
|
||||
"""
|
||||
This function is used to fecth the properties of the specified object.
|
||||
:param scid:
|
||||
:param tid:
|
||||
:return:
|
||||
"""
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path,
|
||||
@@ -569,10 +591,10 @@ class TypeView(PGChildNodeView, DataTypeReader):
|
||||
)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
return False, gone(
|
||||
gettext("""Could not find the type in the database."""))
|
||||
|
||||
# Making copy of output for future use
|
||||
@@ -583,7 +605,7 @@ class TypeView(PGChildNodeView, DataTypeReader):
|
||||
scid=scid, tid=tid)
|
||||
status, acl = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=acl)
|
||||
return False, internal_server_error(errormsg=acl)
|
||||
|
||||
# We will set get privileges from acl sql so we don't need
|
||||
# it from properties sql
|
||||
@@ -599,10 +621,7 @@ class TypeView(PGChildNodeView, DataTypeReader):
|
||||
# Calling function to check and additional properties if available
|
||||
copy_dict.update(self.additional_properties(copy_dict, tid))
|
||||
|
||||
return ajax_response(
|
||||
response=copy_dict,
|
||||
status=200
|
||||
)
|
||||
return True, copy_dict
|
||||
|
||||
@check_precondition
|
||||
def get_collations(self, gid, sid, did, scid, tid=None):
|
||||
@@ -1428,5 +1447,31 @@ class TypeView(PGChildNodeView, DataTypeReader):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid):
|
||||
"""
|
||||
This function will fetch the list of all the types for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'nodes.sql']),
|
||||
scid=scid, datlastsysoid=self.datlastsysoid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in rset['rows']:
|
||||
status, data = self._fetch_properties(scid, row['oid'])
|
||||
if status:
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
TypeView.register_node_view(blueprint)
|
||||
|
||||
@@ -15,6 +15,8 @@ from flask import render_template
|
||||
|
||||
from pgadmin.browser.collection import CollectionNodeModule
|
||||
from pgadmin.utils.ajax import internal_server_error
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
|
||||
|
||||
class SchemaChildModule(CollectionNodeModule):
|
||||
@@ -485,11 +487,50 @@ class VacuumSettings:
|
||||
* type - table/toast vacuum type
|
||||
|
||||
"""
|
||||
vacuum_settings = dict()
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def get_vacuum_table_settings(self, conn):
|
||||
def fetch_default_vacuum_settings(self, conn, sid, setting_type):
|
||||
"""
|
||||
This function is used to fetch and cached the default vacuum settings
|
||||
for specified server id.
|
||||
:param conn: Connection Object
|
||||
:param sid: Server ID
|
||||
:param setting_type: Type (table or toast)
|
||||
:return:
|
||||
"""
|
||||
if sid in VacuumSettings.vacuum_settings:
|
||||
if setting_type in VacuumSettings.vacuum_settings[sid]:
|
||||
return VacuumSettings.vacuum_settings[sid][setting_type]
|
||||
else:
|
||||
VacuumSettings.vacuum_settings[sid] = dict()
|
||||
|
||||
# returns an array of name & label values
|
||||
vacuum_fields = render_template("vacuum_settings/vacuum_fields.json")
|
||||
vacuum_fields = json.loads(vacuum_fields)
|
||||
|
||||
# returns an array of setting & name values
|
||||
vacuum_fields_keys = "'" + "','".join(
|
||||
vacuum_fields[setting_type].keys()) + "'"
|
||||
SQL = render_template('vacuum_settings/sql/vacuum_defaults.sql',
|
||||
columns=vacuum_fields_keys)
|
||||
|
||||
status, res = conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in res['rows']:
|
||||
row_name = row['name']
|
||||
row['name'] = vacuum_fields[setting_type][row_name][0]
|
||||
row['label'] = vacuum_fields[setting_type][row_name][1]
|
||||
row['column_type'] = vacuum_fields[setting_type][row_name][2]
|
||||
|
||||
VacuumSettings.vacuum_settings[sid][setting_type] = res['rows']
|
||||
return VacuumSettings.vacuum_settings[sid][setting_type]
|
||||
|
||||
def get_vacuum_table_settings(self, conn, sid):
|
||||
"""
|
||||
Fetch the default values for autovacuum
|
||||
fields, return an array of
|
||||
@@ -498,31 +539,9 @@ class VacuumSettings:
|
||||
- setting
|
||||
values
|
||||
"""
|
||||
return self.fetch_default_vacuum_settings(conn, sid, 'table')
|
||||
|
||||
# returns an array of name & label values
|
||||
vacuum_fields = render_template("vacuum_settings/vacuum_fields.json")
|
||||
|
||||
vacuum_fields = json.loads(vacuum_fields)
|
||||
|
||||
# returns an array of setting & name values
|
||||
vacuum_fields_keys = "'" + "','".join(
|
||||
vacuum_fields['table'].keys()) + "'"
|
||||
SQL = render_template('vacuum_settings/sql/vacuum_defaults.sql',
|
||||
columns=vacuum_fields_keys)
|
||||
status, res = conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in res['rows']:
|
||||
row_name = row['name']
|
||||
row['name'] = vacuum_fields['table'][row_name][0]
|
||||
row['label'] = vacuum_fields['table'][row_name][1]
|
||||
row['column_type'] = vacuum_fields['table'][row_name][2]
|
||||
|
||||
return res
|
||||
|
||||
def get_vacuum_toast_settings(self, conn):
|
||||
def get_vacuum_toast_settings(self, conn, sid):
|
||||
"""
|
||||
Fetch the default values for autovacuum
|
||||
fields, return an array of
|
||||
@@ -531,29 +550,7 @@ class VacuumSettings:
|
||||
- setting
|
||||
values
|
||||
"""
|
||||
|
||||
# returns an array of name & label values
|
||||
vacuum_fields = render_template("vacuum_settings/vacuum_fields.json")
|
||||
|
||||
vacuum_fields = json.loads(vacuum_fields)
|
||||
|
||||
# returns an array of setting & name values
|
||||
vacuum_fields_keys = "'" + "','".join(
|
||||
vacuum_fields['toast'].keys()) + "'"
|
||||
SQL = render_template('vacuum_settings/sql/vacuum_defaults.sql',
|
||||
columns=vacuum_fields_keys)
|
||||
status, res = conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in res['rows']:
|
||||
row_name = row['name']
|
||||
row['name'] = vacuum_fields['toast'][row_name][0]
|
||||
row['label'] = vacuum_fields['toast'][row_name][1]
|
||||
row['column_type'] = vacuum_fields['table'][row_name][2]
|
||||
|
||||
return res
|
||||
return self.fetch_default_vacuum_settings(conn, sid, 'toast')
|
||||
|
||||
def parse_vacuum_data(self, conn, result, type):
|
||||
"""
|
||||
@@ -567,47 +564,46 @@ class VacuumSettings:
|
||||
* type - table/toast vacuum type
|
||||
"""
|
||||
|
||||
# returns an array of name & label values
|
||||
vacuum_fields = render_template("vacuum_settings/vacuum_fields.json")
|
||||
vacuum_settings_tmp = self.fetch_default_vacuum_settings(
|
||||
conn, self.manager.sid, type)
|
||||
|
||||
vacuum_fields = json.loads(vacuum_fields)
|
||||
for row in vacuum_settings_tmp:
|
||||
row_name = row['name']
|
||||
if type is 'toast':
|
||||
row_name = 'toast_{0}'.format(row['name'])
|
||||
if row_name in result and result[row_name] is not None:
|
||||
if row['column_type'] == 'number':
|
||||
value = float(result[row_name])
|
||||
else:
|
||||
value = int(result[row_name])
|
||||
row['value'] = value
|
||||
else:
|
||||
if 'value' in row:
|
||||
row.pop('value')
|
||||
|
||||
# returns an array of setting & name values
|
||||
vacuum_fields_keys = "'" + "','".join(
|
||||
vacuum_fields[type].keys()) + "'"
|
||||
SQL = render_template('vacuum_settings/sql/vacuum_defaults.sql',
|
||||
columns=vacuum_fields_keys)
|
||||
status, res = conn.execute_dict(SQL)
|
||||
return vacuum_settings_tmp
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if type is 'table':
|
||||
for row in res['rows']:
|
||||
row_name = row['name']
|
||||
row['name'] = vacuum_fields[type][row_name][0]
|
||||
row['label'] = vacuum_fields[type][row_name][1]
|
||||
row['column_type'] = vacuum_fields[type][row_name][2]
|
||||
if result[row['name']] is not None:
|
||||
if row['column_type'] == 'number':
|
||||
value = float(result[row['name']])
|
||||
else:
|
||||
value = int(result[row['name']])
|
||||
row['value'] = row['setting'] = value
|
||||
def get_schema(sid, did, scid):
|
||||
"""
|
||||
This function will return the schema name.
|
||||
"""
|
||||
|
||||
elif type is 'toast':
|
||||
for row in res['rows']:
|
||||
row_old_name = row['name']
|
||||
row_name = 'toast_{0}'.format(
|
||||
vacuum_fields[type][row_old_name][0])
|
||||
row['name'] = vacuum_fields[type][row_old_name][0]
|
||||
row['label'] = vacuum_fields[type][row_old_name][1]
|
||||
row['column_type'] = vacuum_fields[type][row_old_name][2]
|
||||
if result[row_name] and result[row_name] is not None:
|
||||
if row['column_type'] == 'number':
|
||||
value = float(result[row_name])
|
||||
else:
|
||||
value = int(result[row_name])
|
||||
row['value'] = row['setting'] = value
|
||||
driver = get_driver(PG_DEFAULT_DRIVER)
|
||||
manager = driver.connection_manager(sid)
|
||||
conn = manager.connection(did=did)
|
||||
|
||||
return res['rows']
|
||||
ver = manager.version
|
||||
server_type = manager.server_type
|
||||
|
||||
# Fetch schema name
|
||||
status, schema_name = conn.execute_scalar(
|
||||
render_template("/".join(['schemas',
|
||||
'{0}/#{1}#'.format(server_type,
|
||||
ver),
|
||||
'sql/get_name.sql']),
|
||||
conn=conn, scid=scid
|
||||
)
|
||||
)
|
||||
|
||||
return status, schema_name
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
|
||||
"""Implements View and Materialized View Node"""
|
||||
|
||||
import copy
|
||||
from functools import wraps
|
||||
|
||||
import simplejson as json
|
||||
@@ -18,13 +19,16 @@ from flask_babelex import gettext
|
||||
import pgadmin.browser.server_groups.servers.databases as databases
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.utils import \
|
||||
SchemaChildModule, parse_rule_definition, VacuumSettings
|
||||
SchemaChildModule, parse_rule_definition, VacuumSettings, get_schema
|
||||
from pgadmin.browser.server_groups.servers.utils import parse_priv_from_db, \
|
||||
parse_priv_to_db
|
||||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
|
||||
"""
|
||||
This module is responsible for generating two nodes
|
||||
@@ -197,7 +201,7 @@ def check_precondition(f):
|
||||
return wrap
|
||||
|
||||
|
||||
class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
class ViewNode(PGChildNodeView, VacuumSettings, SchemaDiffObjectCompare):
|
||||
"""
|
||||
This class is responsible for generating routes for view node.
|
||||
|
||||
@@ -250,6 +254,10 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
* dependent(gid, sid, did, scid):
|
||||
- This function will generate dependent list to show it in dependent
|
||||
pane for the selected view node.
|
||||
|
||||
* compare(**kwargs):
|
||||
- This function will compare the view nodes from two
|
||||
different schemas.
|
||||
"""
|
||||
node_type = view_blueprint.node_type
|
||||
|
||||
@@ -290,6 +298,8 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
{'get': 'get_toast_table_vacuum'}]
|
||||
})
|
||||
|
||||
keys_to_ignore = ['oid', 'schema', 'xmin']
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""
|
||||
Initialize the variables used by methods of ViewNode.
|
||||
@@ -400,21 +410,37 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
Fetches the properties of an individual view
|
||||
and render in the properties tab
|
||||
"""
|
||||
status, res = self._fetch_properties(scid, vid)
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, scid, vid):
|
||||
"""
|
||||
This function is used to fetch the properties of the specified object
|
||||
:param scid:
|
||||
:param vid:
|
||||
:return:
|
||||
"""
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'sql/properties.sql']
|
||||
), vid=vid, datlastsysoid=self.datlastsysoid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""Could not find the view."""))
|
||||
return False, gone(gettext("""Could not find the view."""))
|
||||
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'sql/acl.sql']), vid=vid)
|
||||
status, dataclres = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
for row in dataclres['rows']:
|
||||
priv = parse_priv_from_db(row)
|
||||
@@ -428,10 +454,7 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
# merging formated result with main result again
|
||||
result.update(frmtd_reslt)
|
||||
|
||||
return ajax_response(
|
||||
response=result,
|
||||
status=200
|
||||
)
|
||||
return True, result
|
||||
|
||||
@staticmethod
|
||||
def formatter(result):
|
||||
@@ -556,7 +579,7 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, scid, vid=None):
|
||||
def delete(self, gid, sid, did, scid, vid=None, only_sql=False):
|
||||
"""
|
||||
This function will drop a view object
|
||||
"""
|
||||
@@ -604,6 +627,10 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
nspname=res_data['rows'][0]['schema'],
|
||||
name=res_data['rows'][0]['name'], cascade=cascade
|
||||
)
|
||||
|
||||
if only_sql:
|
||||
return SQL
|
||||
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@@ -840,7 +867,7 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
|
||||
return columns
|
||||
|
||||
def get_rule_sql(self, vid):
|
||||
def get_rule_sql(self, vid, display_comments=True):
|
||||
"""
|
||||
Get all non system rules of view node,
|
||||
generate their sql and render
|
||||
@@ -869,12 +896,12 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
res = parse_rule_definition(res)
|
||||
SQL = render_template("/".join(
|
||||
[self.rule_temp_path, 'sql/create.sql']),
|
||||
data=res, display_comments=True)
|
||||
data=res, display_comments=display_comments)
|
||||
SQL_data += '\n'
|
||||
SQL_data += SQL
|
||||
return SQL_data
|
||||
|
||||
def get_compound_trigger_sql(self, vid):
|
||||
def get_compound_trigger_sql(self, vid, display_comments=True):
|
||||
"""
|
||||
Get all compound trigger nodes associated with view node,
|
||||
generate their sql and render into sql tab
|
||||
@@ -945,13 +972,13 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
[self.ct_trigger_temp_path,
|
||||
'sql/{0}/#{1}#/create.sql'.format(
|
||||
self.manager.server_type, self.manager.version)]),
|
||||
data=res_rows, display_comments=True)
|
||||
data=res_rows, display_comments=display_comments)
|
||||
SQL_data += '\n'
|
||||
SQL_data += SQL
|
||||
|
||||
return SQL_data
|
||||
|
||||
def get_trigger_sql(self, vid):
|
||||
def get_trigger_sql(self, vid, display_comments=True):
|
||||
"""
|
||||
Get all trigger nodes associated with view node,
|
||||
generate their sql and render
|
||||
@@ -1038,13 +1065,13 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
[self.trigger_temp_path,
|
||||
'sql/{0}/#{1}#/create.sql'.format(
|
||||
self.manager.server_type, self.manager.version)]),
|
||||
data=res_rows, display_comments=True)
|
||||
data=res_rows, display_comments=display_comments)
|
||||
SQL_data += '\n'
|
||||
SQL_data += SQL
|
||||
|
||||
return SQL_data
|
||||
|
||||
def get_index_sql(self, did, vid):
|
||||
def get_index_sql(self, did, vid, display_comments=True):
|
||||
"""
|
||||
Get all index associated with view node,
|
||||
generate their sql and render
|
||||
@@ -1084,17 +1111,23 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
SQL = render_template("/".join(
|
||||
[self.index_temp_path,
|
||||
'sql/#{0}#/create.sql'.format(self.manager.version)]),
|
||||
data=data, display_comments=True)
|
||||
data=data, display_comments=display_comments)
|
||||
SQL_data += '\n'
|
||||
SQL_data += SQL
|
||||
return SQL_data
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, scid, vid):
|
||||
def sql(self, gid, sid, did, scid, vid, diff_schema=None,
|
||||
json_resp=True):
|
||||
"""
|
||||
This function will generate sql to render into the sql panel
|
||||
"""
|
||||
|
||||
display_comments = True
|
||||
|
||||
if not json_resp:
|
||||
display_comments = False
|
||||
|
||||
SQL_data = ''
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'sql/properties.sql']),
|
||||
@@ -1111,6 +1144,9 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
)
|
||||
|
||||
result = res['rows'][0]
|
||||
if diff_schema:
|
||||
result['schema'] = diff_schema
|
||||
|
||||
# sending result to formtter
|
||||
frmtd_reslt = self.formatter(result)
|
||||
|
||||
@@ -1152,18 +1188,20 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
[self.template_path, 'sql/create.sql']),
|
||||
data=result,
|
||||
conn=self.conn,
|
||||
display_comments=True
|
||||
display_comments=display_comments
|
||||
)
|
||||
SQL += "\n"
|
||||
SQL += render_template("/".join(
|
||||
[self.template_path, 'sql/grant.sql']), data=result)
|
||||
|
||||
SQL_data += SQL
|
||||
SQL_data += self.get_rule_sql(vid)
|
||||
SQL_data += self.get_trigger_sql(vid)
|
||||
SQL_data += self.get_compound_trigger_sql(vid)
|
||||
SQL_data += self.get_index_sql(did, vid)
|
||||
SQL_data += self.get_rule_sql(vid, display_comments)
|
||||
SQL_data += self.get_trigger_sql(vid, display_comments)
|
||||
SQL_data += self.get_compound_trigger_sql(vid, display_comments)
|
||||
SQL_data += self.get_index_sql(did, vid, display_comments)
|
||||
|
||||
if not json_resp:
|
||||
return SQL_data
|
||||
return ajax_response(response=SQL_data)
|
||||
|
||||
@check_precondition
|
||||
@@ -1357,6 +1395,60 @@ class ViewNode(PGChildNodeView, VacuumSettings):
|
||||
|
||||
return ajax_response(response=sql)
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid, oid=None):
|
||||
"""
|
||||
This function will fetch the list of all the views for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
|
||||
if not oid:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'sql/nodes.sql']), did=did,
|
||||
scid=scid, datlastsysoid=self.datlastsysoid)
|
||||
status, views = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
current_app.logger.error(views)
|
||||
return False
|
||||
|
||||
for row in views['rows']:
|
||||
status, data = self._fetch_properties(scid, row['oid'])
|
||||
if status:
|
||||
res[row['name']] = data
|
||||
else:
|
||||
status, data = self._fetch_properties(scid, oid)
|
||||
if not status:
|
||||
current_app.logger.error(data)
|
||||
return False
|
||||
res = data
|
||||
|
||||
return res
|
||||
|
||||
def get_sql_from_diff(self, gid, sid, did, scid, oid, data=None,
|
||||
diff_schema=None, drop_sql=False):
|
||||
sql = ''
|
||||
if data:
|
||||
if diff_schema:
|
||||
data['schema'] = diff_schema
|
||||
sql, nameOrError = self.getSQL(gid, sid, did, data, oid)
|
||||
else:
|
||||
if drop_sql:
|
||||
sql = self.delete(gid=gid, sid=sid, did=did,
|
||||
scid=scid, vid=oid, only_sql=True)
|
||||
elif diff_schema:
|
||||
sql = self.sql(gid=gid, sid=sid, did=did, scid=scid, vid=oid,
|
||||
diff_schema=diff_schema, json_resp=False)
|
||||
else:
|
||||
sql = self.sql(gid=gid, sid=sid, did=did, scid=scid, vid=oid,
|
||||
json_resp=False)
|
||||
return sql
|
||||
|
||||
|
||||
# Override the operations for materialized view
|
||||
mview_operations = {
|
||||
@@ -1631,11 +1723,17 @@ class MViewNode(ViewNode, VacuumSettings):
|
||||
return SQL, data['name'] if 'name' in data else old_data['name']
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, scid, vid):
|
||||
def sql(self, gid, sid, did, scid, vid, diff_schema=None,
|
||||
json_resp=True):
|
||||
"""
|
||||
This function will generate sql to render into the sql panel
|
||||
"""
|
||||
|
||||
display_comments = True
|
||||
|
||||
if not json_resp:
|
||||
display_comments = False
|
||||
|
||||
SQL_data = ''
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'sql/properties.sql']),
|
||||
@@ -1654,6 +1752,9 @@ class MViewNode(ViewNode, VacuumSettings):
|
||||
|
||||
result = res['rows'][0]
|
||||
|
||||
if diff_schema:
|
||||
result['schema'] = diff_schema
|
||||
|
||||
# sending result to formtter
|
||||
frmtd_reslt = self.formatter(result)
|
||||
|
||||
@@ -1732,17 +1833,20 @@ class MViewNode(ViewNode, VacuumSettings):
|
||||
[self.template_path, 'sql/create.sql']),
|
||||
data=result,
|
||||
conn=self.conn,
|
||||
display_comments=True
|
||||
display_comments=display_comments
|
||||
)
|
||||
SQL += "\n"
|
||||
SQL += render_template("/".join(
|
||||
[self.template_path, 'sql/grant.sql']), data=result)
|
||||
|
||||
SQL_data += SQL
|
||||
SQL_data += self.get_rule_sql(vid)
|
||||
SQL_data += self.get_trigger_sql(vid)
|
||||
SQL_data += self.get_index_sql(did, vid)
|
||||
SQL_data += self.get_rule_sql(vid, display_comments)
|
||||
SQL_data += self.get_trigger_sql(vid, display_comments)
|
||||
SQL_data += self.get_index_sql(did, vid, display_comments)
|
||||
SQL_data = SQL_data.strip('\n')
|
||||
|
||||
if not json_resp:
|
||||
return SQL_data
|
||||
return ajax_response(response=SQL_data)
|
||||
|
||||
@check_precondition
|
||||
@@ -1756,9 +1860,9 @@ class MViewNode(ViewNode, VacuumSettings):
|
||||
values
|
||||
"""
|
||||
|
||||
res = self.get_vacuum_table_settings(self.conn)
|
||||
res = self.get_vacuum_table_settings(self.conn, sid)
|
||||
return ajax_response(
|
||||
response=res['rows'],
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
@@ -1772,10 +1876,10 @@ class MViewNode(ViewNode, VacuumSettings):
|
||||
- setting
|
||||
values
|
||||
"""
|
||||
res = self.get_vacuum_toast_settings(self.conn)
|
||||
res = self.get_vacuum_toast_settings(self.conn, sid)
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'],
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
@@ -1785,21 +1889,39 @@ class MViewNode(ViewNode, VacuumSettings):
|
||||
Fetches the properties of an individual view
|
||||
and render in the properties tab
|
||||
"""
|
||||
status, res = self._fetch_properties(did, scid, vid)
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, did, scid, vid):
|
||||
"""
|
||||
This function is used to fetch the properties of the specified object
|
||||
:param did:
|
||||
:param scid:
|
||||
:param vid:
|
||||
:return:
|
||||
"""
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'sql/properties.sql']
|
||||
), did=did, vid=vid, datlastsysoid=self.datlastsysoid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""Could not find the materialized view."""))
|
||||
return False, gone(
|
||||
gettext("""Could not find the materialized view."""))
|
||||
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'sql/acl.sql']), vid=vid)
|
||||
status, dataclres = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
for row in dataclres['rows']:
|
||||
priv = parse_priv_from_db(row)
|
||||
@@ -1818,10 +1940,7 @@ class MViewNode(ViewNode, VacuumSettings):
|
||||
result['vacuum_toast'] = self.parse_vacuum_data(
|
||||
self.conn, result, 'toast')
|
||||
|
||||
return ajax_response(
|
||||
response=result,
|
||||
status=200
|
||||
)
|
||||
return True, result
|
||||
|
||||
@check_precondition
|
||||
def refresh_data(self, gid, sid, did, scid, vid):
|
||||
@@ -1873,6 +1992,34 @@ class MViewNode(ViewNode, VacuumSettings):
|
||||
current_app.logger.exception(e)
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def fetch_objects_to_compare(self, sid, did, scid, oid=None):
|
||||
"""
|
||||
This function will fetch the list of all the mviews for
|
||||
specified schema id.
|
||||
|
||||
:param sid: Server Id
|
||||
:param did: Database Id
|
||||
:param scid: Schema Id
|
||||
:return:
|
||||
"""
|
||||
res = dict()
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'sql/nodes.sql']), did=did,
|
||||
scid=scid, datlastsysoid=self.datlastsysoid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in rset['rows']:
|
||||
status, data = self._fetch_properties(did, scid, row['oid'])
|
||||
if status:
|
||||
res[row['name']] = data
|
||||
|
||||
return res
|
||||
|
||||
|
||||
SchemaDiffRegistry(view_blueprint.node_type, ViewNode)
|
||||
ViewNode.register_node_view(view_blueprint)
|
||||
SchemaDiffRegistry(mview_blueprint.node_type, MViewNode)
|
||||
MViewNode.register_node_view(mview_blueprint)
|
||||
|
||||
@@ -19,7 +19,7 @@ ALTER TABLE {{ conn|qtIdent(view_schema, view_name) }}
|
||||
{% endif %}
|
||||
{% if def and def != o_data.definition.rstrip(';') %}
|
||||
CREATE OR REPLACE VIEW {{ conn|qtIdent(view_schema, view_name) }}
|
||||
WITH (security_barrier={{ data.security_barrier|lower if data.security_barrier else o_data.security_barrier|default('false', 'true')|lower }})
|
||||
WITH (security_barrier={{ data.security_barrier|lower if data.security_barrier is defined else o_data.security_barrier|default('false', 'true')|lower }})
|
||||
AS
|
||||
{{ def }};
|
||||
{% else %}
|
||||
|
||||
@@ -19,7 +19,7 @@ ALTER TABLE {{ conn|qtIdent(view_schema, view_name) }}
|
||||
{% endif %}
|
||||
{% if def and def != o_data.definition.rstrip(';') %}
|
||||
CREATE OR REPLACE VIEW {{ conn|qtIdent(view_schema, view_name) }}
|
||||
WITH (security_barrier={{ data.security_barrier|lower if data.security_barrier else o_data.security_barrier|default('false', 'true')|lower }})
|
||||
WITH (security_barrier={{ data.security_barrier|lower if data.security_barrier is defined else o_data.security_barrier|default('false', 'true')|lower }})
|
||||
AS
|
||||
{{ def }};
|
||||
{% else %}
|
||||
|
||||
@@ -19,7 +19,7 @@ ALTER TABLE {{ conn|qtIdent(view_schema, view_name) }}
|
||||
{% endif %}
|
||||
{% if def and def != o_data.definition.rstrip(';') %}
|
||||
CREATE OR REPLACE VIEW {{ conn|qtIdent(view_schema, view_name) }}
|
||||
WITH ({% if (data.check_option or o_data.check_option) %}check_option={{ data.check_option if data.check_option else o_data.check_option }}{{', ' }}{% endif %}security_barrier={{ data.security_barrier|lower if data.security_barrier else o_data.security_barrier|default('false', 'true')|lower }})
|
||||
WITH ({% if (data.check_option or o_data.check_option) %}check_option={{ data.check_option if data.check_option else o_data.check_option }}{{', ' }}{% endif %}security_barrier={{ data.security_barrier|lower if data.security_barrier is defined else o_data.security_barrier|default('false', 'true')|lower }})
|
||||
AS
|
||||
{{ def }};
|
||||
{% else %}
|
||||
|
||||
@@ -11,4 +11,9 @@
|
||||
background-image: url('{{ url_for('NODE-database.static', filename='img/databasebad.svg') }}') !important;
|
||||
border-radius: 10px;
|
||||
background-size: 20px !important;
|
||||
background-repeat: no-repeat;
|
||||
vertical-align: middle;
|
||||
align-content: center;
|
||||
height: 1.3em;
|
||||
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user