Added Schema Diff tool to compare two schemas and generate the difference script. 

Currently supported objects are Table, View, Materialized View, Function and Procedure.

Backend comparison of two schemas implemented by: Akshay Joshi

Fixes #3452.
This commit is contained in:
Khushboo Vashi 2020-01-10 15:39:32 +05:30 committed by Akshay Joshi
parent 8b99a33e6e
commit 45f2e35a99
87 changed files with 10727 additions and 402 deletions

View File

@ -9,6 +9,7 @@ This release contains a number of bug fixes and new features since the release o
New features
************
| `Issue #3452 <https://redmine.postgresql.org/issues/3452>`_ - Added Schema Diff tool to compare two schemas and generate the difference script.
Housekeeping
************

View File

@ -28,6 +28,7 @@ from pgadmin.model import db, Server, ServerGroup, User
from pgadmin.utils.driver import get_driver
from pgadmin.utils.master_password import get_crypt_key
from pgadmin.utils.exception import CryptKeyMissing
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from psycopg2 import Error as psycopg2_Error, OperationalError
@ -1627,4 +1628,5 @@ class ServerNode(PGChildNodeView):
)
SchemaDiffRegistry(blueprint.node_type, ServerNode)
ServerNode.register_node_view(blueprint)

View File

@ -30,6 +30,8 @@ from pgadmin.utils.ajax import make_json_response, \
make_response as ajax_response, internal_server_error, unauthorized
from pgadmin.utils.driver import get_driver
from pgadmin.tools.sqleditor.utils.query_history import QueryHistory
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.model import Server
@ -1111,4 +1113,5 @@ class DatabaseView(PGChildNodeView):
)
SchemaDiffRegistry(blueprint.node_type, DatabaseView)
DatabaseView.register_node_view(blueprint)

View File

@ -23,6 +23,7 @@ from pgadmin.browser.utils import PGChildNodeView
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
make_response as ajax_response, gone, bad_request
from pgadmin.utils.driver import get_driver
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
"""
This module is responsible for generating two nodes
@ -1023,5 +1024,6 @@ It may have been removed by another user.
return ajax_response(response=SQL.strip("\n"))
SchemaDiffRegistry(schema_blueprint.node_type, SchemaView)
SchemaView.register_node_view(schema_blueprint)
CatalogView.register_node_view(catalog_blueprint)

View File

@ -25,6 +25,8 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
make_response as ajax_response, gone
from pgadmin.utils.compile_template_name import compile_template_path
from pgadmin.utils.driver import get_driver
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
# If we are in Python3
if not IS_PY2:
@ -92,7 +94,7 @@ class CollationModule(SchemaChildModule):
blueprint = CollationModule(__name__)
class CollationView(PGChildNodeView):
class CollationView(PGChildNodeView, SchemaDiffObjectCompare):
"""
This class is responsible for generating routes for Collation node
@ -144,6 +146,10 @@ class CollationView(PGChildNodeView):
* dependent(gid, sid, did, scid):
- This function will generate dependent list to show it in dependent
pane for the selected Collation node.
* compare(**kwargs):
- This function will compare the collation nodes from two different
schemas.
"""
node_type = blueprint.node_type
@ -172,7 +178,8 @@ class CollationView(PGChildNodeView):
'dependency': [{'get': 'dependencies'}],
'dependent': [{'get': 'dependents'}],
'get_collations': [{'get': 'get_collation'},
{'get': 'get_collation'}]
{'get': 'get_collation'}],
'compare': [{'get': 'compare'}, {'get': 'compare'}]
})
def check_precondition(f):
@ -318,23 +325,36 @@ class CollationView(PGChildNodeView):
JSON of selected collation node
"""
status, res = self._fetch_properties(scid, coid)
if not status:
return res
return ajax_response(
response=res,
status=200
)
def _fetch_properties(self, scid, coid):
"""
This function fetch the properties for the specified object.
:param scid: Schema ID
:param coid: Collation ID
"""
SQL = render_template("/".join([self.template_path,
'properties.sql']),
scid=scid, coid=coid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(
gettext("Could not find the collation object in the database.")
)
return False, gone(gettext("Could not find the collation "
"object in the database."))
return ajax_response(
response=res['rows'][0],
status=200
)
return True, res['rows'][0]
@check_precondition
def get_collation(self, gid, sid, did, scid, coid=None):
@ -748,5 +768,30 @@ class CollationView(PGChildNodeView):
status=200
)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid):
"""
This function will fetch the list of all the collations for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
SQL = render_template("/".join([self.template_path,
'nodes.sql']), scid=scid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
status, data = self._fetch_properties(scid, row['oid'])
if status:
res[row['name']] = data
return res
CollationView.register_node_view(blueprint)

View File

@ -27,6 +27,8 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
make_response as ajax_response, gone
from pgadmin.utils.compile_template_name import compile_template_path
from pgadmin.utils.driver import get_driver
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
# If we are in Python3
if not IS_PY2:
@ -79,7 +81,7 @@ class DomainModule(SchemaChildModule):
blueprint = DomainModule(__name__)
class DomainView(PGChildNodeView, DataTypeReader):
class DomainView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
"""
class DomainView
@ -138,6 +140,10 @@ class DomainView(PGChildNodeView, DataTypeReader):
* types(gid, sid, did, scid, fnid=None):
- Returns Data Types.
* compare(**kwargs):
- This function will compare the domain nodes from two different
schemas.
"""
node_type = blueprint.node_type
@ -169,7 +175,8 @@ class DomainView(PGChildNodeView, DataTypeReader):
'get_collations': [
{'get': 'get_collations'},
{'get': 'get_collations'}
]
],
'compare': [{'get': 'compare'}, {'get': 'compare'}]
})
def validate_request(f):
@ -369,15 +376,31 @@ class DomainView(PGChildNodeView, DataTypeReader):
scid: Schema Id
doid: Domain Id
"""
status, res = self._fetch_properties(did, scid, doid)
if not status:
return res
return ajax_response(
response=res,
status=200
)
def _fetch_properties(self, did, scid, doid):
"""
This function is used to fecth the properties of specified object.
:param did:
:param scid:
:param doid:
:return:
"""
SQL = render_template("/".join([self.template_path, 'properties.sql']),
scid=scid, doid=doid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(gettext("""
return False, gone(gettext("""
Could not find the domain in the database.
It may have been removed by another user or moved to another schema.
"""))
@ -393,7 +416,7 @@ It may have been removed by another user or moved to another schema.
doid=doid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
data['constraints'] = res['rows']
@ -406,10 +429,7 @@ It may have been removed by another user or moved to another schema.
if doid <= self.manager.db_info[did]['datlastsysoid']:
data['sysdomain'] = True
return ajax_response(
response=data,
status=200
)
return True, data
def _parse_type(self, basetype):
"""
@ -664,7 +684,7 @@ AND relkind != 'c'))"""
)
@check_precondition
def sql(self, gid, sid, did, scid, doid=None):
def sql(self, gid, sid, did, scid, doid=None, return_ajax_response=True):
"""
Returns the SQL for the Domain object.
@ -674,6 +694,7 @@ AND relkind != 'c'))"""
did: Database Id
scid: Schema Id
doid: Domain Id
return_ajax_response:
"""
SQL = render_template("/".join([self.template_path,
@ -716,6 +737,9 @@ AND relkind != 'c'))"""
""".format(self.qtIdent(self.conn, data['basensp'], data['name']))
SQL = sql_header + SQL
if not return_ajax_response:
return SQL.strip('\n')
return ajax_response(response=SQL.strip('\n'))
@check_precondition
@ -846,5 +870,40 @@ AND relkind != 'c'))"""
status=200
)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid):
"""
This function will fetch the list of all the domains for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
SQL = render_template("/".join([self.template_path,
'node.sql']), scid=scid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
status, data = self._fetch_properties(did, scid, row['oid'])
if status:
if 'constraints' in data and len(data['constraints']) > 0:
for item in data['constraints']:
# Remove keys that should not be the part
# of comparision.
if 'conoid' in item:
item.pop('conoid')
if 'nspname' in item:
item.pop('nspname')
res[row['name']] = data
return res
DomainView.register_node_view(blueprint)

View File

@ -8,7 +8,7 @@ JOIN
JOIN
pg_namespace nl ON nl.oid=typnamespace
LEFT OUTER JOIN
pg_description des ON (des.objoid=t.oid AND des.classoid='pg_constraint'::regclass)
pg_description des ON (des.objoid=c.oid AND des.classoid='pg_constraint'::regclass)
WHERE
contype = 'c' AND contypid = {{doid}}::oid
ORDER BY

View File

@ -8,7 +8,7 @@ JOIN
JOIN
pg_namespace nl ON nl.oid=typnamespace
LEFT OUTER JOIN
pg_description des ON (des.objoid=t.oid AND des.classoid='pg_constraint'::regclass)
pg_description des ON (des.objoid=c.oid AND des.classoid='pg_constraint'::regclass)
WHERE
contype = 'c'
AND contypid = {{doid}}::oid

View File

@ -32,6 +32,8 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
make_response as ajax_response, gone
from pgadmin.utils.compile_template_name import compile_template_path
from pgadmin.utils.driver import get_driver
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
# If we are in Python3
if not IS_PY2:
@ -93,7 +95,8 @@ class ForeignTableModule(SchemaChildModule):
blueprint = ForeignTableModule(__name__)
class ForeignTableView(PGChildNodeView, DataTypeReader):
class ForeignTableView(PGChildNodeView, DataTypeReader,
SchemaDiffObjectCompare):
"""
class ForeignTableView(PGChildNodeView)
@ -174,6 +177,9 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
* delete_sql(gid, sid, did, scid, foid):
- Returns sql for Script
* compare(**kwargs):
- This function will compare the foreign table nodes from two different
schemas.
"""
node_type = blueprint.node_type
@ -213,7 +219,8 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
'select_sql': [{'get': 'select_sql'}],
'insert_sql': [{'get': 'insert_sql'}],
'update_sql': [{'get': 'update_sql'}],
'delete_sql': [{'get': 'delete_sql'}]
'delete_sql': [{'get': 'delete_sql'}],
'compare': [{'get': 'compare'}, {'get': 'compare'}]
})
def validate_request(f):
@ -447,11 +454,9 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
scid: Schema Id
foid: Foreign Table Id
"""
data = self._fetch_properties(gid, sid, did, scid, foid)
if data is False:
return gone(
gettext("Could not find the foreign table on the server.")
)
status, data = self._fetch_properties(gid, sid, did, scid, foid)
if not status:
return data
return ajax_response(
response=data,
@ -814,11 +819,10 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
scid: Schema Id
foid: Foreign Table Id
"""
data = self._fetch_properties(gid, sid, did, scid, foid, inherits=True)
if data is False:
return gone(
gettext("Could not find the foreign table on the server.")
)
status, data = self._fetch_properties(gid, sid, did, scid, foid,
inherits=True)
if not status:
return data
col_data = []
for c in data['columns']:
@ -891,12 +895,10 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
foid: Foreign Table Id
"""
if foid is not None:
old_data = self._fetch_properties(gid, sid, did, scid, foid,
inherits=True)
if old_data is False:
return gone(
gettext("Could not find the foreign table on the server.")
)
status, old_data = self._fetch_properties(gid, sid, did, scid,
foid, inherits=True)
if not status:
return old_data
# Prepare dict of columns with key = column's attnum
# Will use this in the update template when any column is
@ -1051,10 +1053,10 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
scid=scid, foid=foid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return False
return False, False
data = res['rows'][0]
@ -1064,7 +1066,7 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
foid=foid)
status, aclres = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=aclres)
return False, internal_server_error(errormsg=aclres)
# Get Formatted Privileges
data.update(self._format_proacl_from_db(aclres['rows']))
@ -1082,7 +1084,7 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
'get_constraints.sql']), foid=foid)
status, cons = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=cons)
return False, internal_server_error(errormsg=cons)
if cons and 'rows' in cons:
data['constraints'] = cons['rows']
@ -1091,7 +1093,7 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
'get_columns.sql']), foid=foid)
status, cols = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=cols)
return False, internal_server_error(errormsg=cols)
# The Length and the precision of the Datatype should be separated.
# The Format we getting from database is: numeric(1,1)
@ -1128,12 +1130,12 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if 'inherits' in res['rows'][0]:
data['inherits'] = res['rows'][0]['inherits']
return data
return True, data
@staticmethod
def convert_precision_to_int(typlen):
@ -1222,11 +1224,9 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
Returns:
SELECT Script sql for the object
"""
data = self._fetch_properties(gid, sid, did, scid, foid)
if data is False:
return gone(
gettext("Could not find the foreign table on the server.")
)
status, data = self._fetch_properties(gid, sid, did, scid, foid)
if not status:
return data
columns = []
for c in data['columns']:
@ -1259,11 +1259,9 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
Returns:
INSERT Script sql for the object
"""
data = self._fetch_properties(gid, sid, did, scid, foid)
if data is False:
return gone(
gettext("Could not find the foreign table on the server.")
)
status, data = self._fetch_properties(gid, sid, did, scid, foid)
if not status:
return data
columns = []
values = []
@ -1301,11 +1299,9 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
Returns:
UPDATE Script sql for the object
"""
data = self._fetch_properties(gid, sid, did, scid, foid)
if data is False:
return gone(
gettext("Could not find the foreign table on the server.")
)
status, data = self._fetch_properties(gid, sid, did, scid, foid)
if not status:
return data
columns = []
@ -1346,11 +1342,9 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
Returns:
DELETE Script sql for the object
"""
data = self._fetch_properties(gid, sid, did, scid, foid)
if data is False:
return gone(
gettext("Could not find the foreign table on the server.")
)
status, data = self._fetch_properties(gid, sid, did, scid, foid)
if not status:
return data
sql = u"DELETE FROM {0}\n\tWHERE <condition>;".format(
self.qtIdent(self.conn, data['basensp'], data['name'])
@ -1358,5 +1352,37 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
return ajax_response(response=sql)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid):
"""
This function will fetch the list of all the foreign tables for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
SQL = render_template("/".join([self.template_path,
'node.sql']), scid=scid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
status, data = self._fetch_properties(0, sid, did, scid,
row['oid'])
if status:
if 'constraints' in data and data['constraints'] is not None \
and len(data['constraints']) > 0:
for item in data['constraints']:
if 'conoid' in item:
item.pop('conoid')
res[row['name']] = data
return res
ForeignTableView.register_node_view(blueprint)

View File

@ -24,6 +24,8 @@ from pgadmin.utils import IS_PY2
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
make_response as ajax_response, gone
from pgadmin.utils.driver import get_driver
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
# If we are in Python3
if not IS_PY2:
@ -93,7 +95,7 @@ class FtsConfigurationModule(SchemaChildModule):
blueprint = FtsConfigurationModule(__name__)
class FtsConfigurationView(PGChildNodeView):
class FtsConfigurationView(PGChildNodeView, SchemaDiffObjectCompare):
"""
class FtsConfigurationView(PGChildNodeView)
@ -167,6 +169,9 @@ class FtsConfigurationView(PGChildNodeView):
* dependencies(self, gid, sid, did, scid, cfgid):
- This function get the dependencies and return ajax response for node.
* compare(**kwargs):
- This function will compare the fts configuration nodes from two
different schemas.
"""
node_type = blueprint.node_type
@ -202,6 +207,7 @@ class FtsConfigurationView(PGChildNodeView):
{'get': 'copyConfig'}],
'tokens': [{'get': 'tokens'}, {'get': 'tokens'}],
'dictionaries': [{}, {'get': 'dictionaries'}],
'compare': [{'get': 'compare'}, {'get': 'compare'}]
})
def _init_(self, **kwargs):
@ -343,7 +349,22 @@ class FtsConfigurationView(PGChildNodeView):
scid: Schema Id
cfgid: fts Configuration id
"""
status, res = self._fetch_properties(scid, cfgid)
if not status:
return res
return ajax_response(
response=res,
status=200
)
def _fetch_properties(self, scid, cfgid):
"""
This function is used to fetch property of specified object.
:param scid:
:param cfgid:
:return:
"""
sql = render_template(
"/".join([self.template_path, 'properties.sql']),
scid=scid,
@ -352,10 +373,10 @@ class FtsConfigurationView(PGChildNodeView):
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(
return False, gone(
_(
"Could not find the FTS Configuration node in the "
"database node.")
@ -370,14 +391,11 @@ class FtsConfigurationView(PGChildNodeView):
status, rset = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=rset)
return False, internal_server_error(errormsg=rset)
res['rows'][0]['tokens'] = rset['rows']
return ajax_response(
response=res['rows'][0],
status=200
)
return True, res['rows'][0]
@check_precondition
def create(self, gid, sid, did, scid):
@ -927,5 +945,30 @@ class FtsConfigurationView(PGChildNodeView):
status=200
)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid):
"""
This function will fetch the list of all the fts configurations for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
SQL = render_template("/".join([self.template_path,
'nodes.sql']), scid=scid)
status, fts_cfg = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in fts_cfg['rows']:
status, data = self._fetch_properties(scid, row['oid'])
if status:
res[row['name']] = data
return res
FtsConfigurationView.register_node_view(blueprint)

View File

@ -24,6 +24,8 @@ from pgadmin.utils import IS_PY2
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
make_response as ajax_response, gone
from pgadmin.utils.driver import get_driver
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
# If we are in Python3
if not IS_PY2:
@ -91,7 +93,7 @@ class FtsDictionaryModule(SchemaChildModule):
blueprint = FtsDictionaryModule(__name__)
class FtsDictionaryView(PGChildNodeView):
class FtsDictionaryView(PGChildNodeView, SchemaDiffObjectCompare):
"""
class FtsDictionaryView(PGChildNodeView)
@ -159,6 +161,9 @@ class FtsDictionaryView(PGChildNodeView):
* dependencies(self, gid, sid, did, scid, dcid):
- This function get the dependencies and return ajax response for node.
* compare(**kwargs):
- This function will compare the fts dictionaries nodes from two
different schemas.
"""
node_type = blueprint.node_type
@ -189,7 +194,7 @@ class FtsDictionaryView(PGChildNodeView):
'dependency': [{'get': 'dependencies'}],
'dependent': [{'get': 'dependents'}],
'fetch_templates': [{'get': 'fetch_templates'},
{'get': 'fetch_templates'}],
{'get': 'fetch_templates'}]
})
def _init_(self, **kwargs):
@ -353,7 +358,23 @@ class FtsDictionaryView(PGChildNodeView):
scid: Schema Id
dcid: fts dictionary id
"""
status, res = self._fetch_properties(scid, dcid)
if not status:
return res
return ajax_response(
response=res,
status=200
)
def _fetch_properties(self, scid, dcid):
"""
This function is used to fetch the properties of specified object.
:param scid:
:param dcid:
:return:
"""
sql = render_template(
"/".join([self.template_path, 'properties.sql']),
scid=scid,
@ -362,10 +383,10 @@ class FtsDictionaryView(PGChildNodeView):
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(_(
return False, gone(_(
"Could not find the FTS Dictionary node in the database node."
))
@ -382,10 +403,7 @@ class FtsDictionaryView(PGChildNodeView):
res['rows'][0]['options']
)
return ajax_response(
response=res['rows'][0],
status=200
)
return True, res['rows'][0]
@check_precondition
def create(self, gid, sid, did, scid):
@ -854,5 +872,30 @@ class FtsDictionaryView(PGChildNodeView):
status=200
)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid):
"""
This function will fetch the list of all the fts dictionaries for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
SQL = render_template("/".join([self.template_path,
'nodes.sql']), scid=scid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
status, data = self._fetch_properties(scid, row['oid'])
if status:
res[row['name']] = data
return res
FtsDictionaryView.register_node_view(blueprint)

View File

@ -24,6 +24,8 @@ from pgadmin.utils import IS_PY2
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
make_response as ajax_response, gone
from pgadmin.utils.driver import get_driver
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
# If we are in Python3
if not IS_PY2:
@ -84,7 +86,7 @@ class FtsParserModule(SchemaChildModule):
blueprint = FtsParserModule(__name__)
class FtsParserView(PGChildNodeView):
class FtsParserView(PGChildNodeView, SchemaDiffObjectCompare):
"""
class FtsParserView(PGChildNodeView)
@ -161,6 +163,9 @@ class FtsParserView(PGChildNodeView):
- This function get the dependencies and return ajax response for
FTS Parser node.
* compare(**kwargs):
- This function will compare the fts parser nodes from two
different schemas.
"""
node_type = blueprint.node_type
@ -198,7 +203,7 @@ class FtsParserView(PGChildNodeView):
'lextype_functions': [{'get': 'lextype_functions'},
{'get': 'lextype_functions'}],
'headline_functions': [{'get': 'headline_functions'},
{'get': 'headline_functions'}],
{'get': 'headline_functions'}]
})
def _init_(self, **kwargs):
@ -303,6 +308,32 @@ class FtsParserView(PGChildNodeView):
@check_precondition
def properties(self, gid, sid, did, scid, pid):
"""
:param gid:
:param sid:
:param did:
:param scid:
:param pid:
:return:
"""
status, res = self._fetch_properties(scid, pid)
if not status:
return res
return ajax_response(
response=res,
status=200
)
def _fetch_properties(self, scid, pid):
"""
This function is used to fetch the properties of specified object.
:param scid:
:param pid:
:return:
"""
sql = render_template(
"/".join([self.template_path, 'properties.sql']),
scid=scid,
@ -311,16 +342,13 @@ class FtsParserView(PGChildNodeView):
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(
return False, gone(
_("Could not find the FTS Parser node in the database node."))
return ajax_response(
response=res['rows'][0],
status=200
)
return True, res['rows'][0]
@check_precondition
def create(self, gid, sid, did, scid):
@ -862,5 +890,30 @@ class FtsParserView(PGChildNodeView):
status=200
)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid):
"""
This function will fetch the list of all the fts parsers for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
SQL = render_template("/".join([self.template_path,
'nodes.sql']), scid=scid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
status, data = self._fetch_properties(scid, row['oid'])
if status:
res[row['name']] = data
return res
FtsParserView.register_node_view(blueprint)

View File

@ -24,6 +24,8 @@ from pgadmin.utils import IS_PY2
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
make_response as ajax_response, gone
from pgadmin.utils.driver import get_driver
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
# If we are in Python3
if not IS_PY2:
@ -90,7 +92,7 @@ class FtsTemplateModule(SchemaChildModule):
blueprint = FtsTemplateModule(__name__)
class FtsTemplateView(PGChildNodeView):
class FtsTemplateView(PGChildNodeView, SchemaDiffObjectCompare):
"""
class FtsTemplateView(PGChildNodeView)
@ -154,6 +156,9 @@ class FtsTemplateView(PGChildNodeView):
- This function get the dependencies and return ajax response for the
FTS Template node.
* compare(**kwargs):
- This function will compare the fts template nodes from two
different schemas.
"""
node_type = blueprint.node_type
@ -184,7 +189,7 @@ class FtsTemplateView(PGChildNodeView):
'dependency': [{'get': 'dependencies'}],
'dependent': [{'get': 'dependents'}],
'get_lexize': [{'get': 'get_lexize'}, {'get': 'get_lexize'}],
'get_init': [{'get': 'get_init'}, {'get': 'get_init'}],
'get_init': [{'get': 'get_init'}, {'get': 'get_init'}]
})
def _init_(self, **kwargs):
@ -281,25 +286,47 @@ class FtsTemplateView(PGChildNodeView):
@check_precondition
def properties(self, gid, sid, did, scid, tid):
"""
:param gid:
:param sid:
:param did:
:param scid:
:param tid:
:return:
"""
status, res = self._fetch_properties(scid, tid)
if not status:
return res
return ajax_response(
response=res,
status=200
)
def _fetch_properties(self, scid, tid):
"""
This function is used to fetch the properties of specified object.
:param scid:
:param pid:
:return:
"""
sql = render_template(
"/".join([self.template_path, 'properties.sql']),
scid=scid,
tid=tid
)
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(
return False, gone(
gettext("Could not find the requested FTS template.")
)
return ajax_response(
response=res['rows'][0],
status=200
)
return True, res['rows'][0]
@check_precondition
def create(self, gid, sid, did, scid):
@ -734,5 +761,30 @@ class FtsTemplateView(PGChildNodeView):
status=200
)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid):
"""
This function will fetch the list of all the fts templates for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
SQL = render_template("/".join([self.template_path,
'nodes.sql']), scid=scid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
status, data = self._fetch_properties(scid, row['oid'])
if status:
res[row['name']] = data
return res
FtsTemplateView.register_node_view(blueprint)

View File

@ -21,7 +21,7 @@ from flask import render_template, make_response, request, jsonify, \
current_app
from flask_babelex import gettext
from pgadmin.browser.server_groups.servers.databases.schemas.utils import \
SchemaChildModule, DataTypeReader
SchemaChildModule, DataTypeReader, get_schema
from pgadmin.browser.server_groups.servers.databases.utils import \
parse_sec_labels_from_db, parse_variables_from_db
from pgadmin.browser.server_groups.servers.utils import parse_priv_from_db, \
@ -30,8 +30,10 @@ from pgadmin.browser.utils import PGChildNodeView
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
make_response as ajax_response, gone
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.model import SchemaDiffModel
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
class FunctionModule(SchemaChildModule):
@ -115,7 +117,7 @@ class FunctionModule(SchemaChildModule):
blueprint = FunctionModule(__name__)
class FunctionView(PGChildNodeView, DataTypeReader):
class FunctionView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
"""
class FunctionView(PGChildNodeView)
@ -177,6 +179,10 @@ class FunctionView(PGChildNodeView, DataTypeReader):
* exec_sql(gid, sid, did, scid, fnid):
- Returns sql for Script
* compare(**kwargs):
- This function will compare the function nodes from two
different schemas.
"""
node_type = blueprint.node_type
@ -213,6 +219,9 @@ class FunctionView(PGChildNodeView, DataTypeReader):
{'get': 'get_support_functions'}]
})
keys_to_ignore = ['oid', 'proowner', 'typnsp', 'xmin', 'prokind',
'proisagg', 'pronamespace', 'proargdefaults']
@property
def required_args(self):
"""
@ -790,7 +799,7 @@ class FunctionView(PGChildNodeView, DataTypeReader):
)
@check_precondition
def delete(self, gid, sid, did, scid, fnid=None):
def delete(self, gid, sid, did, scid, fnid=None, only_sql=False):
"""
Drop the Function.
@ -841,6 +850,8 @@ class FunctionView(PGChildNodeView, DataTypeReader):
func_args=res['rows'][0]['func_args'],
nspname=res['rows'][0]['nspname'],
cascade=cascade)
if only_sql:
return SQL
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
@ -915,7 +926,8 @@ class FunctionView(PGChildNodeView, DataTypeReader):
)
@check_precondition
def sql(self, gid, sid, did, scid, fnid=None):
def sql(self, gid, sid, did, scid, fnid=None, diff_schema=None,
json_resp=True):
"""
Returns the SQL for the Function object.
@ -989,6 +1001,8 @@ class FunctionView(PGChildNodeView, DataTypeReader):
if not status:
return internal_server_error(errormsg=res)
if diff_schema:
res['rows'][0]['nspname'] = diff_schema
name_with_default_args = self.qtIdent(
self.conn,
res['rows'][0]['nspname'],
@ -1040,6 +1054,10 @@ class FunctionView(PGChildNodeView, DataTypeReader):
if not status:
return internal_server_error(errormsg=res)
if diff_schema:
res['rows'][0]['nspname'] = diff_schema
resp_data['pronamespace'] = diff_schema
name_with_default_args = self.qtIdent(
self.conn,
res['rows'][0]['nspname'],
@ -1071,6 +1089,9 @@ class FunctionView(PGChildNodeView, DataTypeReader):
resp_data['proname']),
resp_data['proargtypenames'].lstrip('(').rstrip(')'))
if not json_resp:
return re.sub('\n{2,}', '\n\n', func_def)
SQL = sql_header + func_def
SQL = re.sub('\n{2,}', '\n\n', SQL)
@ -1597,7 +1618,66 @@ class FunctionView(PGChildNodeView, DataTypeReader):
status=200
)
def get_sql_from_diff(self, gid, sid, did, scid, oid, data=None,
diff_schema=None, drop_sql=False):
sql = ''
if data:
if diff_schema:
data['schema'] = diff_schema
status, sql = self._get_sql(gid, sid, did, scid, data, oid)
else:
if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did,
scid=scid, fnid=oid, only_sql=True)
elif diff_schema:
sql = self.sql(gid=gid, sid=sid, did=did, scid=scid, fnid=oid,
diff_schema=diff_schema, json_resp=False)
else:
sql = self.sql(gid=gid, sid=sid, did=did, scid=scid, fnid=oid,
json_resp=False)
return sql
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid, oid=None):
"""
This function will fetch the list of all the functions for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
server_type = self.manager.server_type
server_version = self.manager.sversion
if server_type == 'pg' and self.blueprint.min_ver is not None and \
server_version < self.blueprint.min_ver:
return res
if server_type == 'ppas' and self.blueprint.min_ppasver is not None \
and server_version < self.blueprint.min_ppasver:
return res
if not oid:
SQL = render_template("/".join([self.sql_template_path,
'node.sql']), scid=scid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
data = self._fetch_properties(0, sid, did, scid, row['oid'])
if isinstance(data, dict):
res[row['name']] = data
else:
data = self._fetch_properties(0, sid, did, scid, oid)
res = data
return res
SchemaDiffRegistry(blueprint.node_type, FunctionView)
FunctionView.register_node_view(blueprint)
@ -1698,6 +1778,7 @@ class ProcedureView(FunctionView):
'prosrc']
SchemaDiffRegistry(procedure_blueprint.node_type, ProcedureView)
ProcedureView.register_node_view(procedure_blueprint)
@ -1796,4 +1877,5 @@ class TriggerFunctionView(FunctionView):
'prosrc']
SchemaDiffRegistry(trigger_function_blueprint.node_type, TriggerFunctionView)
TriggerFunctionView.register_node_view(trigger_function_blueprint)

View File

@ -27,6 +27,8 @@ from pgadmin.utils.ajax import make_json_response, \
make_response as ajax_response, internal_server_error, \
precondition_required, gone
from pgadmin.utils.driver import get_driver
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
# If we are in Python3
if not IS_PY2:
@ -83,7 +85,7 @@ class PackageModule(SchemaChildModule):
blueprint = PackageModule(__name__)
class PackageView(PGChildNodeView):
class PackageView(PGChildNodeView, SchemaDiffObjectCompare):
node_type = blueprint.node_type
parent_ids = [
@ -111,6 +113,8 @@ class PackageView(PGChildNodeView):
'dependent': [{'get': 'dependents'}]
})
keys_to_ignore = ['oid', 'schema', 'xmin']
def check_precondition(action=None):
"""
This function will behave as a decorator which will checks
@ -297,16 +301,32 @@ class PackageView(PGChildNodeView):
Returns:
"""
status, res = self._fetch_properties(scid, pkgid)
if not status:
return res
return ajax_response(
response=res,
status=200
)
def _fetch_properties(self, scid, pkgid):
"""
This function is used to fetch the properties of specified object.
:param scid:
:param pkgid:
:return:
"""
SQL = render_template("/".join([self.template_path, 'properties.sql']),
scid=scid, pkgid=pkgid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(
return False, gone(
errormsg=_("Could not find the package in the database.")
)
@ -321,16 +341,13 @@ class PackageView(PGChildNodeView):
status, rset1 = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=rset1)
return False, internal_server_error(errormsg=rset1)
for row in rset1['rows']:
priv = parse_priv_from_db(row)
res['rows'][0].setdefault(row['deftype'], []).append(priv)
return ajax_response(
response=res['rows'][0],
status=200
)
return True, res['rows'][0]
@check_precondition(action="create")
def create(self, gid, sid, did, scid):
@ -396,7 +413,7 @@ class PackageView(PGChildNodeView):
)
@check_precondition(action='delete')
def delete(self, gid, sid, did, scid, pkgid=None):
def delete(self, gid, sid, did, scid, pkgid=None, only_sql=False):
"""
This function will drop the object
@ -453,6 +470,9 @@ class PackageView(PGChildNodeView):
data=res['rows'][0],
cascade=cascade)
if only_sql:
return SQL
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
@ -552,7 +572,8 @@ class PackageView(PGChildNodeView):
status=200
)
def getSQL(self, gid, sid, did, data, scid, pkgid=None, sqltab=False):
def getSQL(self, gid, sid, did, data, scid, pkgid=None, sqltab=False,
diff_schema=None):
"""
This function will generate sql from model data.
@ -621,6 +642,9 @@ class PackageView(PGChildNodeView):
if arg not in data:
data[arg] = old_data[arg]
if diff_schema:
data['schema'] = diff_schema
SQL = render_template("/".join([self.template_path, 'update.sql']),
data=data, o_data=old_data, conn=self.conn)
return SQL, data['name'] if 'name' in data else old_data['name']
@ -635,7 +659,8 @@ class PackageView(PGChildNodeView):
return SQL, data['name']
@check_precondition(action="sql")
def sql(self, gid, sid, did, scid, pkgid):
def sql(self, gid, sid, did, scid, pkgid, diff_schema=None,
json_resp=True):
"""
This function will generate sql for sql panel
@ -645,6 +670,8 @@ class PackageView(PGChildNodeView):
did: Database ID
scid: Schema ID
pkgid: Package ID
diff_schema: Schema diff target schema name
json_resp: json response or plain text response
"""
try:
SQL = render_template(
@ -676,13 +703,18 @@ class PackageView(PGChildNodeView):
res['rows'][0].setdefault(row['deftype'], []).append(priv)
result = res['rows'][0]
sql, name = self.getSQL(gid, sid, did, result, scid, pkgid, True)
sql, name = self.getSQL(gid, sid, did, result, scid, pkgid, True,
diff_schema)
# Most probably this is due to error
if not isinstance(sql, (str, unicode)):
return sql
sql = sql.strip('\n').strip(' ')
# Return sql for schema diff
if not json_resp:
return sql
sql_header = u"-- Package: {}\n\n-- ".format(
self.qtIdent(self.conn, self.schema, result['name'])
)
@ -756,5 +788,54 @@ class PackageView(PGChildNodeView):
return sql[start:end].strip("\n")
@check_precondition(action="fetch_objects_to_compare")
def fetch_objects_to_compare(self, sid, did, scid):
"""
This function will fetch the list of all the packages for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
if self.manager.server_type != 'ppas':
return res
SQL = render_template("/".join([self.template_path,
'nodes.sql']), scid=scid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
status, data = self._fetch_properties(scid, row['oid'])
if status:
res[row['name']] = data
return res
def get_sql_from_diff(self, gid, sid, did, scid, oid, data=None,
diff_schema=None, drop_sql=False):
sql = ''
if data:
if diff_schema:
data['schema'] = diff_schema
status, sql = self.getSQL(gid, sid, did, data, scid, oid)
else:
if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did,
scid=scid, pkgid=oid, only_sql=True)
elif diff_schema:
sql = self.sql(gid=gid, sid=sid, did=did, scid=scid, pkgid=oid,
diff_schema=diff_schema, json_resp=False)
else:
sql = self.sql(gid=gid, sid=sid, did=did, scid=scid, pkgid=oid,
json_resp=False)
return sql
SchemaDiffRegistry(blueprint.node_type, PackageView)
PackageView.register_node_view(blueprint)

View File

@ -161,6 +161,8 @@ class EdbFuncView(PGChildNodeView, DataTypeReader):
* dependencies(gid, sid, did, scid, pkgid, edbfnid):
- Returns the dependencies for the Functions object.
* compare(**kwargs):
- This function will compare the nodes from two different schemas.
"""
node_type = blueprint.node_type
@ -184,7 +186,8 @@ class EdbFuncView(PGChildNodeView, DataTypeReader):
'nodes': [{'get': 'nodes'}, {'get': 'nodes'}],
'sql': [{'get': 'sql'}],
'dependency': [{'get': 'dependencies'}],
'dependent': [{'get': 'dependents'}]
'dependent': [{'get': 'dependents'}],
'compare': [{'get': 'compare'}, {'get': 'compare'}]
})
def check_precondition(f):

View File

@ -137,6 +137,8 @@ class EdbVarView(PGChildNodeView, DataTypeReader):
* sql(gid, sid, did, scid, pkgid, varid):
- Returns the SQL for the Functions object.
* compare(**kwargs):
- This function will compare the nodes from two different schemas.
"""
node_type = blueprint.node_type
@ -158,7 +160,8 @@ class EdbVarView(PGChildNodeView, DataTypeReader):
{'get': 'list'}
],
'nodes': [{'get': 'nodes'}, {'get': 'nodes'}],
'sql': [{'get': 'sql'}]
'sql': [{'get': 'sql'}],
'compare': [{'get': 'compare'}, {'get': 'compare'}]
})
def check_precondition(f):

View File

@ -24,6 +24,9 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.utils import IS_PY2
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
# If we are in Python3
if not IS_PY2:
unicode = str
@ -88,7 +91,7 @@ class SequenceModule(SchemaChildModule):
blueprint = SequenceModule(__name__)
class SequenceView(PGChildNodeView):
class SequenceView(PGChildNodeView, SchemaDiffObjectCompare):
node_type = blueprint.node_type
parent_ids = [
@ -273,6 +276,23 @@ class SequenceView(PGChildNodeView):
Returns:
"""
status, res = self._fetch_properties(scid, seid)
if not status:
return res
return ajax_response(
response=res,
status=200
)
def _fetch_properties(self, scid, seid):
"""
This function is used to fetch the properties of the specified object.
:param scid:
:param seid:
:return:
"""
SQL = render_template(
"/".join([self.template_path, 'properties.sql']),
scid=scid, seid=seid
@ -280,10 +300,11 @@ class SequenceView(PGChildNodeView):
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(_("Could not find the sequence in the database."))
return False, gone(
_("Could not find the sequence in the database."))
for row in res['rows']:
SQL = render_template(
@ -292,7 +313,7 @@ class SequenceView(PGChildNodeView):
)
status, rset1 = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=rset1)
return False, internal_server_error(errormsg=rset1)
row['current_value'] = rset1['rows'][0]['last_value']
row['minimum'] = rset1['rows'][0]['min_value']
@ -319,7 +340,7 @@ class SequenceView(PGChildNodeView):
)
status, dataclres = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
for row in dataclres['rows']:
priv = parse_priv_from_db(row)
@ -328,10 +349,7 @@ class SequenceView(PGChildNodeView):
else:
res['rows'][0][row['deftype']] = [priv]
return ajax_response(
response=res['rows'][0],
status=200
)
return True, res['rows'][0]
@check_precondition(action="create")
def create(self, gid, sid, did, scid):
@ -869,5 +887,30 @@ class SequenceView(PGChildNodeView):
status=200
)
@check_precondition(action="fetch_objects_to_compare")
def fetch_objects_to_compare(self, sid, did, scid):
"""
This function will fetch the list of all the sequences for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
SQL = render_template("/".join([self.template_path,
'nodes.sql']), scid=scid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
status, data = self._fetch_properties(scid, row['oid'])
if status:
res[row['name']] = data
return res
SequenceView.register_node_view(blueprint)

View File

@ -24,6 +24,8 @@ from pgadmin.utils.ajax import precondition_required
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.utils import IS_PY2
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
# If we are in Python3
if not IS_PY2:
@ -91,7 +93,7 @@ class SynonymModule(SchemaChildModule):
blueprint = SynonymModule(__name__)
class SynonymView(PGChildNodeView):
class SynonymView(PGChildNodeView, SchemaDiffObjectCompare):
"""
This class is responsible for generating routes for Synonym node
@ -143,6 +145,10 @@ class SynonymView(PGChildNodeView):
* dependent(gid, sid, did, scid):
- This function will generate dependent list to show it in dependent
pane for the selected Synonym node.
* compare(**kwargs):
- This function will compare the synonyms nodes from two
different schemas.
"""
node_type = blueprint.node_type
@ -385,26 +391,36 @@ class SynonymView(PGChildNodeView):
Returns:
JSON of selected synonym node
"""
status, res = self._fetch_properties(scid, syid)
if not status:
return res
return ajax_response(
response=res,
status=200
)
def _fetch_properties(self, scid, syid):
"""
This function is used to fetch the properties of the specified object
:param scid:
:param syid:
:return:
"""
try:
SQL = render_template("/".join([self.template_path,
'properties.sql']),
scid=scid, syid=syid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) > 0:
return ajax_response(
response=res['rows'][0],
status=200
)
else:
return gone(
if len(res['rows']) == 0:
return False, gone(
gettext('The specified synonym could not be found.')
)
return True, res['rows'][0]
except Exception as e:
return internal_server_error(errormsg=str(e))
@ -707,5 +723,33 @@ class SynonymView(PGChildNodeView):
status=200
)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid):
"""
This function will fetch the list of all the synonyms for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
if self.manager.server_type != 'ppas':
return res
SQL = render_template("/".join([self.template_path,
'properties.sql']), scid=scid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
status, data = self._fetch_properties(scid, row['name'])
if status:
res[row['name']] = data
return res
SynonymView.register_node_view(blueprint)

View File

@ -11,9 +11,11 @@
import simplejson as json
import re
import copy
import random
import pgadmin.browser.server_groups.servers.databases as database
from flask import render_template, request, jsonify, url_for
from flask import render_template, request, jsonify, url_for, current_app
from flask_babelex import gettext
from pgadmin.browser.server_groups.servers.databases.schemas.utils \
import SchemaChildModule, DataTypeReader, VacuumSettings
@ -22,8 +24,15 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
make_response as ajax_response, gone
from .utils import BaseTableView
from pgadmin.utils.preferences import Preferences
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries,\
directory_diff
from pgadmin.tools.schema_diff.model import SchemaDiffModel
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
constraints.foreign_key import utils as fkey_utils
from .schema_diff_utils import SchemaDiffTableCompare
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
columns import utils as column_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
@ -132,7 +141,8 @@ class TableModule(SchemaChildModule):
blueprint = TableModule(__name__)
class TableView(BaseTableView, DataTypeReader, VacuumSettings):
class TableView(BaseTableView, DataTypeReader, VacuumSettings,
SchemaDiffTableCompare):
"""
This class is responsible for generating routes for Table node
@ -229,6 +239,10 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
* delete_sql(gid, sid, did, scid, foid):
- Returns sql for Script
* compare(**kwargs):
- This function will compare the table nodes from two
different schemas.
"""
node_type = blueprint.node_type
@ -277,7 +291,8 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
'insert_sql': [{'get': 'insert_sql'}],
'update_sql': [{'get': 'update_sql'}],
'delete_sql': [{'get': 'delete_sql'}],
'count_rows': [{'get': 'count_rows'}]
'count_rows': [{'get': 'count_rows'}],
'compare': [{'get': 'compare'}, {'get': 'compare'}]
})
@BaseTableView.check_precondition
@ -464,9 +479,9 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
- setting
values
"""
res = self.get_vacuum_table_settings(self.conn)
res = self.get_vacuum_table_settings(self.conn, sid)
return ajax_response(
response=res['rows'],
response=res,
status=200
)
@ -480,9 +495,9 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
- setting
values
"""
res = self.get_vacuum_toast_settings(self.conn)
res = self.get_vacuum_toast_settings(self.conn, sid)
return ajax_response(
response=res['rows'],
response=res,
status=200
)
@ -582,7 +597,22 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
Returns:
JSON of selected table node
"""
status, res = self._fetch_properties(did, scid, tid)
if not status:
return res
return super(TableView, self).properties(
gid, sid, did, scid, tid, res
)
def _fetch_properties(self, did, scid, tid):
"""
This function is used to fetch the properties of the specified object
:param did:
:param scid:
:param tid:
:return:
"""
SQL = render_template(
"/".join([self.table_template_path, 'properties.sql']),
did=did, scid=scid, tid=tid,
@ -590,10 +620,11 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(gettext("The specified table could not be found."))
return False, gone(
gettext("The specified table could not be found."))
# We will check the threshold set by user before executing
# the query because that can cause performance issues
@ -620,7 +651,7 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
status, count = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=count)
return False, internal_server_error(errormsg=count)
res['rows'][0]['rows_cnt'] = count
@ -628,9 +659,7 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
elif not estimated_row_count:
res['rows'][0]['rows_cnt'] = estimated_row_count
return super(TableView, self).properties(
gid, sid, did, scid, tid, res
)
return True, res
@BaseTableView.check_precondition
def types(self, gid, sid, did, scid, tid=None, clid=None):
@ -1168,6 +1197,69 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
"""
return BaseTableView.reset_statistics(self, scid, tid)
@BaseTableView.check_precondition
def get_sql_from_table_diff(self, **kwargs):
"""
This function will create sql on the basis the difference of 2 tables
"""
data = dict()
res = None
sid = kwargs['sid']
did = kwargs['did']
scid = kwargs['scid']
tid = kwargs['tid']
diff_data = kwargs['diff_data'] if 'diff_data' in kwargs else None
json_resp = kwargs['json_resp'] if 'json_resp' in kwargs else True
diff_schema = kwargs['diff_schema'] if 'diff_schema' in kwargs else\
None
schema_diff_table = kwargs['schema_diff_table'] if\
'schema_diff_table' in kwargs else None
if diff_data:
return self._fetch_sql(did, scid, tid, diff_data, json_resp)
else:
main_sql = []
SQL = render_template(
"/".join([self.table_template_path, 'properties.sql']),
did=did, scid=scid, tid=tid,
datlastsysoid=self.datlastsysoid
)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(gettext("The specified table could not be found."
))
if status:
data = res['rows'][0]
if diff_schema:
data['schema'] = diff_schema
if schema_diff_table:
data['orig_name'] = data['name']
data['name'] = 'schema_diff_temp_{0}'.format(
random.randint(1, 9999999))
sql, partition_sql = BaseTableView.get_reverse_engineered_sql(
self, did, scid, tid, main_sql, data, json_resp,
diff_partition_sql=True)
else:
sql, partition_sql = BaseTableView.get_reverse_engineered_sql(
self, did, scid, tid, main_sql, data, json_resp)
if schema_diff_table:
# If partition tables have different partitions
sql += render_template(
"/".join([self.table_template_path, 'schema_diff.sql']),
conn=self.conn, data=data, partition_sql=partition_sql
)
return sql
@BaseTableView.check_precondition
def msql(self, gid, sid, did, scid, tid=None):
"""
@ -1181,7 +1273,7 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
tid: Table ID
"""
data = dict()
res = None
SQL = ''
for k, v in request.args.items():
try:
# comments should be taken as is because if user enters a
@ -1193,6 +1285,11 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
except (ValueError, TypeError, KeyError):
data[k] = v
return self._fetch_sql(did, scid, tid, data)
def _fetch_sql(self, did, scid, tid, data, json_resp=True):
res = None
if tid is not None:
SQL = render_template(
"/".join([self.table_template_path, 'properties.sql']),
@ -1201,13 +1298,18 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return internal_server_error(errormsg=SQL)
SQL, name = self.get_sql(did, scid, tid, data, res)
SQL = re.sub('\n{2,}', '\n\n', SQL)
SQL = SQL.strip('\n')
if not json_resp:
return SQL
if SQL == '':
SQL = "--modified SQL"
return make_json_response(
data=SQL,
status=200
@ -1419,7 +1521,7 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
return ajax_response(response=sql)
@BaseTableView.check_precondition
def delete_sql(self, gid, sid, did, scid, tid):
def delete_sql(self, gid, sid, did, scid, tid, json_resp=True):
"""
DELETE script sql for the object
@ -1448,6 +1550,9 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
self.qtIdent(self.conn, data['schema'], data['name'])
)
if not json_resp:
return sql
return ajax_response(response=sql)
@BaseTableView.check_precondition
@ -1502,5 +1607,60 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
data={'total_rows': count}
)
def get_delete_sql(self, res):
self.cmd = 'delete'
sql = super(TableView, self).get_delete_sql(res)
self.cmd = None
return sql
@BaseTableView.check_precondition
def fetch_tables(self, sid, did, scid, tid=None, keys_to_remove=None):
"""
This function will fetch the list of all the tables
and will be used by schema diff.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:param tid: Table Id
:param keys_to_remove: Table columns to be removed from the dataset
:return: Table dataset
"""
if tid:
status, data = self._fetch_properties(did, scid, tid)
if not status:
current_app.logger.error(data)
return False
data = super(TableView, self).properties(
0, sid, did, scid, tid, data, False
)
self.remove_keys_for_comparision(data, keys_to_remove)
return data
else:
res = dict()
SQL = render_template("/".join([self.table_template_path,
'nodes.sql']), scid=scid)
status, tables = self.conn.execute_2darray(SQL)
if not status:
current_app.logger.error(tables)
return False
for row in tables['rows']:
status, data = self._fetch_properties(did, scid, row['oid'])
if status:
data = super(TableView, self).properties(
0, sid, did, scid, row['oid'], data, False
)
self.remove_keys_for_comparision(data, keys_to_remove)
res[row['name']] = data
return res
SchemaDiffRegistry(blueprint.node_type, TableView)
TableView.register_node_view(blueprint)

View File

@ -29,6 +29,7 @@ from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.utils import IS_PY2
from pgadmin.utils.ajax import ColParamsJSONDecoder
# If we are in Python3
if not IS_PY2:
unicode = str

View File

@ -26,6 +26,10 @@ from pgadmin.browser.server_groups.servers.databases.schemas.utils \
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.utils import IS_PY2
from pgadmin.utils.compile_template_name import compile_template_path
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
# If we are in Python3
if not IS_PY2:
unicode = str
@ -155,7 +159,7 @@ class CompoundTriggerModule(CollectionNodeModule):
blueprint = CompoundTriggerModule(__name__)
class CompoundTriggerView(PGChildNodeView):
class CompoundTriggerView(PGChildNodeView, SchemaDiffObjectCompare):
"""
This class is responsible for generating routes for Compound Trigger node
@ -245,6 +249,10 @@ class CompoundTriggerView(PGChildNodeView):
'enable': [{'put': 'enable_disable_trigger'}]
})
# Schema Diff: Keys to ignore while comparing
keys_to_ignore = ['oid', 'xmin', 'nspname', 'tfunction',
'tgrelid', 'tgfoid']
def check_precondition(f):
"""
This function will behave as a decorator which will checks
@ -267,6 +275,12 @@ class CompoundTriggerView(PGChildNodeView):
]['datlastsysoid'] if self.manager.db_info is not None and \
kwargs['did'] in self.manager.db_info else 0
self.table_template_path = compile_template_path(
'tables/sql',
self.manager.server_type,
self.manager.version
)
# we will set template path for sql scripts
self.template_path = 'compound_triggers/sql/{0}/#{1}#'.format(
self.manager.server_type, self.manager.version)
@ -417,6 +431,18 @@ class CompoundTriggerView(PGChildNodeView):
JSON of selected compound trigger node
"""
data = self._fetch_properties(tid, trid)
if not status:
return data
return ajax_response(
response=data,
status=200
)
def _fetch_properties(self, tid, trid):
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, trid=trid,
@ -440,10 +466,7 @@ class CompoundTriggerView(PGChildNodeView):
data = trigger_definition(data)
return ajax_response(
response=data,
status=200
)
return True, data
@check_precondition
def create(self, gid, sid, did, scid, tid):
@ -519,7 +542,7 @@ class CompoundTriggerView(PGChildNodeView):
return internal_server_error(errormsg=str(e))
@check_precondition
def delete(self, gid, sid, did, scid, tid, trid=None):
def delete(self, gid, sid, did, scid, tid, trid=None, only_sql=False):
"""
This function will updates existing the compound trigger object
@ -579,6 +602,9 @@ class CompoundTriggerView(PGChildNodeView):
conn=self.conn,
cascade=cascade
)
if only_sql:
return SQL
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
@ -846,5 +872,109 @@ class CompoundTriggerView(PGChildNodeView):
status=200
)
@check_precondition
def get_sql_from_diff(self, gid, sid, did, scid, tid, oid,
data=None, diff_schema=None, drop_sql=False):
if data:
sql, name = self.get_sql(scid, tid, oid, data)
if not isinstance(sql, (str, unicode)):
return sql
sql = sql.strip('\n').strip(' ')
else:
if drop_sql:
SQL = self.delete(gid=gid, sid=sid, did=did,
scid=scid, tid=tid,
trid=oid, only_sql=True)
else:
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, trid=oid,
datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(gettext("""Could not find the compound
trigger in the table."""))
data = dict(res['rows'][0])
# Adding parent into data dict,
# will be using it while creating sql
data['schema'] = self.schema
data['table'] = self.table
if len(data['tgattr']) >= 1:
columns = ', '.join(data['tgattr'].split(' '))
data['columns'] = self._column_details(tid, columns)
data = self._trigger_definition(data)
if diff_schema:
data['schema'] = diff_schema
SQL, name = self.get_sql(scid, tid, None, data)
sql_header = u"-- Compound Trigger: {0}\n\n-- ".format(
data['name'])
sql_header += render_template("/".join([self.template_path,
'delete.sql']),
data=data, conn=self.conn)
SQL = sql_header + '\n\n' + SQL.strip('\n')
# If compound trigger is disbaled then add sql
# code for the same
if not data['is_enable_trigger']:
SQL += '\n\n'
SQL += render_template("/".join([
self.template_path,
'enable_disable_trigger.sql']),
data=data, conn=self.conn)
return SQL
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None,
ignore_keys=False):
"""
This function will fetch the list of all the triggers for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:param tid: Table Id
:return:
"""
res = dict()
if oid:
status, data = self._fetch_properties(tid, oid)
if not status:
current_app.logger.error(data)
return False
res = data
else:
SQL = render_template("/".join([self.template_path,
'nodes.sql']), tid=tid)
status, triggers = self.conn.execute_2darray(SQL)
if not status:
current_app.logger.error(triggers)
return False
for row in triggers['rows']:
status, data = self._fetch_properties(tid, row['oid'])
if status:
if ignore_keys:
for key in self.keys_to_ignore:
if key in data:
del data[key]
res[row['name']] = data
return res
SchemaDiffRegistry(blueprint.node_type, CompoundTriggerView, 'table')
CompoundTriggerView.register_node_view(blueprint)

View File

@ -152,7 +152,8 @@ def get_index_constraint_sql(conn, did, tid, data, template_path=None):
modified_sql, name = get_sql(conn, c, did, tid, ctype,
c['oid'])
sql.append(modified_sql.strip('\n'))
if modified_sql:
sql.append(modified_sql.strip('\n'))
if 'added' in constraint:
for c in constraint['added']:
@ -183,6 +184,7 @@ def get_sql(conn, data, did, tid, ctype, cid=None, template_path=None):
:return:
"""
name = data['name'] if 'name' in data else None
sql = None
if cid is not None:
sql = render_template("/".join([template_path, 'properties.sql']),
did=did, tid=tid, cid=cid,

View File

@ -13,7 +13,7 @@ import simplejson as json
from functools import wraps
import pgadmin.browser.server_groups.servers.databases as database
from flask import render_template, request, jsonify
from flask import render_template, request, jsonify, current_app
from flask_babelex import gettext
from pgadmin.browser.collection import CollectionNodeModule
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
@ -25,8 +25,14 @@ from pgadmin.utils.compile_template_name import compile_template_path
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.utils import IS_PY2
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries,\
directory_diff
from pgadmin.tools.schema_diff.model import SchemaDiffModel
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
from pgadmin.browser.server_groups.servers.databases.schemas. \
tables.indexes import utils as index_utils
# If we are in Python3
if not IS_PY2:
unicode = str
@ -135,7 +141,7 @@ class IndexesModule(CollectionNodeModule):
blueprint = IndexesModule(__name__)
class IndexesView(PGChildNodeView):
class IndexesView(PGChildNodeView, SchemaDiffObjectCompare):
"""
This class is responsible for generating routes for Index node
@ -227,6 +233,11 @@ class IndexesView(PGChildNodeView):
{'get': 'get_op_class'}]
})
# Schema Diff: Keys to ignore while comparing
keys_to_ignore = ['oid', 'relowner', 'schema',
'indrelid', 'nspname'
]
def check_precondition(f):
"""
This function will behave as a decorator which will checks
@ -248,6 +259,12 @@ class IndexesView(PGChildNodeView):
]['datlastsysoid'] if self.manager.db_info is not None and \
kwargs['did'] in self.manager.db_info else 0
self.table_template_path = compile_template_path(
'tables/sql',
self.manager.server_type,
self.manager.version
)
# we will set template path for sql scripts
self.template_path = compile_template_path(
'indexes/sql/',
@ -485,19 +502,35 @@ class IndexesView(PGChildNodeView):
Returns:
JSON of selected schema node
"""
status, data = self._fetch_properties(did, tid, idx)
if not status:
return data
return ajax_response(
response=data,
status=200
)
def _fetch_properties(self, did, tid, idx):
"""
This function is used to fetch the properties of specified object.
:param did:
:param tid:
:param idx:
:return:
"""
SQL = render_template(
"/".join([self.template_path, 'properties.sql']),
did=did, tid=tid, idx=idx, datlastsysoid=self.datlastsysoid
)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(gettext("""Could not find the index in the table."""))
return False, gone(
gettext("""Could not find the index in the table."""))
# Making copy of output for future use
data = dict(res['rows'][0])
@ -509,10 +542,7 @@ class IndexesView(PGChildNodeView):
if self.manager.version >= 110000:
data = index_utils.get_include_details(self.conn, idx, data)
return ajax_response(
response=data,
status=200
)
return True, data
@check_precondition
def create(self, gid, sid, did, scid, tid):
@ -620,7 +650,8 @@ class IndexesView(PGChildNodeView):
return internal_server_error(errormsg=str(e))
@check_precondition
def delete(self, gid, sid, did, scid, tid, idx=None):
def delete(self, gid, sid, did, scid, tid, idx=None,
only_sql=False):
"""
This function will updates existing the schema object
@ -676,6 +707,9 @@ class IndexesView(PGChildNodeView):
"/".join([self.template_path, 'delete.sql']),
data=data, conn=self.conn, cascade=cascade
)
if only_sql:
return SQL
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
@ -792,6 +826,32 @@ class IndexesView(PGChildNodeView):
return ajax_response(response=SQL)
@check_precondition
def get_sql_from_index_diff(self, sid, did, scid, tid, idx, data=None,
diff_schema=None, drop_req=False):
tmp_idx = idx
schema = ''
if data:
schema = self.schema
elif diff_schema:
schema = diff_schema
sql = index_utils.get_reverse_engineered_sql(
self.conn, schema,
self.table, did, tid, idx,
self.datlastsysoid,
template_path=None, with_header=False)
drop_sql = ''
if drop_req:
drop_sql = '\n' + render_template(
"/".join([self.template_path, 'delete.sql']),
data=data, conn=self.conn
)
return drop_sql + '\n\n' + sql
@check_precondition
def dependents(self, gid, sid, did, scid, tid, idx):
"""
@ -914,5 +974,129 @@ class IndexesView(PGChildNodeView):
status=200
)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None,
ignore_keys=False):
"""
This function will fetch the list of all the indexes for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
if not oid:
SQL = render_template("/".join([self.template_path,
'nodes.sql']), tid=tid)
status, indexes = self.conn.execute_2darray(SQL)
if not status:
current_app.logger.error(indexes)
return False
for row in indexes['rows']:
status, data = self._fetch_properties(did, tid,
row['oid'])
if status:
if ignore_keys:
for key in self.keys_to_ignore:
if key in data:
del data[key]
res[row['name']] = data
else:
status, data = self._fetch_properties(did, tid,
oid)
if not status:
current_app.logger.error(data)
return False
res = data
return res
def ddl_compare(self, **kwargs):
"""
This function will compare index properties and
return the difference of SQL
"""
src_sid = kwargs.get('source_sid')
src_did = kwargs.get('source_did')
src_scid = kwargs.get('source_scid')
src_tid = kwargs.get('source_tid')
src_oid = kwargs.get('source_oid')
tar_sid = kwargs.get('target_sid')
tar_did = kwargs.get('target_did')
tar_scid = kwargs.get('target_scid')
tar_tid = kwargs.get('target_tid')
tar_oid = kwargs.get('target_oid')
comp_status = kwargs.get('comp_status')
source = ''
target = ''
diff = ''
status, target_schema = self.get_schema(tar_sid,
tar_did,
tar_scid
)
if not status:
return internal_server_error(errormsg=target_schema)
if comp_status == SchemaDiffModel.COMPARISON_STATUS['source_only']:
diff = self.get_sql_from_index_diff(sid=src_sid,
did=src_did, scid=src_scid,
tid=src_tid, idx=src_oid,
diff_schema=target_schema)
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['target_only']:
diff = self.delete(gid=1, sid=tar_sid, did=tar_did,
scid=tar_scid, tid=tar_tid,
idx=tar_oid, only_sql=True)
else:
source = self.fetch_objects_to_compare(sid=src_sid, did=src_did,
scid=src_scid, tid=src_tid,
oid=src_oid)
target = self.fetch_objects_to_compare(sid=tar_sid, did=tar_did,
scid=tar_scid, tid=tar_tid,
oid=tar_oid)
if not (source or target):
return None
diff_dict = directory_diff(
source, target, ignore_keys=self.keys_to_ignore,
difference={}
)
required_create_keys = ['columns']
create_req = False
for key in required_create_keys:
if key in diff_dict:
create_req = True
if create_req:
diff = self.get_sql_from_index_diff(sid=src_sid,
did=src_did,
scid=src_scid,
tid=src_tid,
idx=src_oid,
diff_schema=target_schema,
drop_req=True)
else:
diff = self.get_sql_from_index_diff(sid=tar_sid,
did=tar_did,
scid=tar_scid,
tid=tar_tid,
idx=tar_oid,
data=diff_dict)
return diff
SchemaDiffRegistry(blueprint.node_type, IndexesView, 'table')
IndexesView.register_node_view(blueprint)

View File

@ -26,7 +26,7 @@ def get_template_path(f):
def wrap(*args, **kwargs):
# Here args[0] will hold the connection object
conn_obj = args[0]
if 'template_path' not in kwargs:
if 'template_path' not in kwargs or kwargs['template_path'] is None:
kwargs['template_path'] = \
'indexes/sql/#{0}#'.format(conn_obj.manager.version)
@ -229,7 +229,7 @@ def get_sql(conn, data, did, tid, idx, datlastsysoid,
@get_template_path
def get_reverse_engineered_sql(conn, schema, table, did, tid, idx,
datlastsysoid,
template_path=None):
template_path=None, with_header=True):
"""
This function will return reverse engineered sql for specified trigger.
@ -240,6 +240,8 @@ def get_reverse_engineered_sql(conn, schema, table, did, tid, idx,
:param idx: Index ID
:param datlastsysoid:
:param template_path: Optional template path
:param with_header: Optional parameter to decide whether the SQL will be
returned with header or not
:return:
"""
SQL = render_template("/".join([template_path, 'properties.sql']),
@ -267,11 +269,12 @@ def get_reverse_engineered_sql(conn, schema, table, did, tid, idx,
SQL, name = get_sql(conn, data, did, tid, None, datlastsysoid)
sql_header = u"-- Index: {0}\n\n-- ".format(data['name'])
if with_header:
sql_header = u"-- Index: {0}\n\n-- ".format(data['name'])
sql_header += render_template("/".join([template_path, 'delete.sql']),
data=data, conn=conn)
sql_header += render_template("/".join([template_path, 'delete.sql']),
data=data, conn=conn)
SQL = sql_header + '\n\n' + SQL
SQL = sql_header + '\n\n' + SQL
return SQL

View File

@ -24,6 +24,11 @@ from pgadmin.browser.collection import CollectionNodeModule
from pgadmin.utils.ajax import make_json_response, precondition_required
from config import PG_DEFAULT_DRIVER
from pgadmin.browser.utils import PGChildModule
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries,\
directory_diff
from pgadmin.tools.schema_diff.model import SchemaDiffModel
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
def backend_supported(module, manager, **kwargs):
@ -152,7 +157,8 @@ class PartitionsModule(CollectionNodeModule):
blueprint = PartitionsModule(__name__)
class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings,
SchemaDiffObjectCompare):
"""
This class is responsible for generating routes for Partition node
@ -200,6 +206,10 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
})
# Schema Diff: Keys to ignore while comparing
keys_to_ignore = ['oid', 'schema', 'vacuum_table',
'vacuum_toast', 'edit_types']
def get_children_nodes(self, manager, **kwargs):
nodes = []
# treat partition table as normal table.
@ -342,6 +352,63 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
return super(PartitionsView, self).properties(
gid, sid, did, scid, ptid, res)
@BaseTableView.check_precondition
def fetch_objects_to_compare(self, sid, did, scid, tid, ptid=None):
"""
This function will fetch the list of all the tables for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:param tid: Table Id
:param ptif: Partition table Id
:return:
"""
res = {}
if ptid:
SQL = render_template("/".join([self.partition_template_path,
'properties.sql']),
did=did, scid=scid, tid=tid,
ptid=ptid, datlastsysoid=self.datlastsysoid)
status, result = self.conn.execute_dict(SQL)
if not status:
current_app.logger.error(result)
return False
res = super(PartitionsView, self).properties(
0, sid, did, scid, ptid, result)
else:
SQL = render_template(
"/".join([self.partition_template_path, 'nodes.sql']),
scid=scid, tid=tid
)
status, partitions = self.conn.execute_2darray(SQL)
if not status:
current_app.logger.error(partitions)
return False
for row in partitions['rows']:
SQL = render_template("/".join([self.partition_template_path,
'properties.sql']),
did=did, scid=scid, tid=tid,
ptid=row['oid'],
datlastsysoid=self.datlastsysoid)
status, result = self.conn.execute_dict(SQL)
if not status:
current_app.logger.error(result)
return False
data = super(PartitionsView, self).properties(
0, sid, did, scid, row['oid'], result, False
)
res[row['name']] = data
return res
@BaseTableView.check_precondition
def sql(self, gid, sid, did, scid, tid, ptid):
"""
@ -375,6 +442,62 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
return BaseTableView.get_reverse_engineered_sql(self, did, scid, ptid,
main_sql, data)
@BaseTableView.check_precondition
def get_sql_from_diff(self, **kwargs):
"""
This function will create sql on the basis the difference of 2 tables
"""
data = dict()
res = None
sid = kwargs['sid']
did = kwargs['did']
scid = kwargs['scid']
tid = kwargs['tid']
ptid = kwargs['ptid']
diff_data = kwargs['diff_data'] if 'diff_data' in kwargs else None
json_resp = kwargs['json_resp'] if 'json_resp' in kwargs else True
diff_schema = kwargs['diff_schema'] if 'diff_schema' in kwargs else\
None
if diff_data:
SQL = render_template("/".join([self.partition_template_path,
'properties.sql']),
did=did, scid=scid, tid=tid,
ptid=ptid, datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
SQL, name = self.get_sql(did, scid, ptid, diff_data, res)
SQL = re.sub('\n{2,}', '\n\n', SQL)
SQL = SQL.strip('\n')
return SQL
else:
main_sql = []
SQL = render_template("/".join([self.partition_template_path,
'properties.sql']),
did=did, scid=scid, tid=tid,
ptid=ptid, datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(gettext(
"The specified partitioned table could not be found."))
data = res['rows'][0]
if diff_schema:
data['schema'] = diff_schema
data['parent_schema'] = diff_schema
return BaseTableView.get_reverse_engineered_sql(self, did,
scid, ptid,
main_sql, data,
False)
@BaseTableView.check_precondition
def detach(self, gid, sid, did, scid, tid, ptid):
"""
@ -576,7 +699,7 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
return internal_server_error(errormsg=str(e))
@BaseTableView.check_precondition
def delete(self, gid, sid, did, scid, tid, ptid=None):
def delete(self, gid, sid, did, scid, tid, ptid=None, only_sql=False):
"""
This function will delete the table object
@ -631,5 +754,61 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
except Exception as e:
return internal_server_error(errormsg=str(e))
def ddl_compare(self, **kwargs):
"""
This function will compare index properties and
return the difference of SQL
"""
src_sid = kwargs.get('source_sid')
src_did = kwargs.get('source_did')
src_scid = kwargs.get('source_scid')
src_tid = kwargs.get('source_tid')
src_oid = kwargs.get('source_oid')
tar_sid = kwargs.get('target_sid')
tar_did = kwargs.get('target_did')
tar_scid = kwargs.get('target_scid')
tar_tid = kwargs.get('target_tid')
tar_oid = kwargs.get('target_oid')
comp_status = kwargs.get('comp_status')
source = ''
target = ''
diff = ''
status, target_schema = self.get_schema_for_schema_diff(tar_sid,
tar_did,
tar_scid
)
if not status:
return internal_server_error(errormsg=target_schema)
if comp_status == SchemaDiffModel.COMPARISON_STATUS['source_only']:
diff = self.get_sql_from_diff(sid=src_sid,
did=src_did, scid=src_scid,
tid=src_tid, ptid=src_oid,
diff_schema=target_schema)
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['target_only']:
SQL = render_template("/".join([self.partition_template_path,
'properties.sql']),
did=did, scid=scid, tid=tid,
ptid=ptid, datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
SQL = render_template(
"/".join([self.table_template_path, 'properties.sql']),
did=tar_did, scid=tar_scid, tid=tar_oid,
datlastsysoid=self.datlastsysoid
)
status, res = self.conn.execute_dict(SQL)
if status:
self.cmd = 'delete'
diff = super(PartitionsView, self).get_delete_sql(res)
self.cmd = None
return diff
SchemaDiffRegistry(blueprint.node_type, PartitionsView, 'table')
PartitionsView.register_node_view(blueprint)

View File

@ -13,7 +13,8 @@ import simplejson as json
from functools import wraps
import pgadmin.browser.server_groups.servers.databases.schemas as schemas
from flask import render_template, make_response, request, jsonify
from flask import render_template, make_response, request, jsonify,\
current_app
from flask_babelex import gettext
from pgadmin.browser.collection import CollectionNodeModule
from pgadmin.browser.server_groups.servers.databases.schemas.utils import \
@ -23,7 +24,11 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
make_response as ajax_response, gone
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.utils.compile_template_name import compile_template_path
from pgadmin.utils import IS_PY2
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
# If we are in Python3
if not IS_PY2:
unicode = str
@ -134,7 +139,7 @@ class RuleModule(CollectionNodeModule):
blueprint = RuleModule(__name__)
class RuleView(PGChildNodeView):
class RuleView(PGChildNodeView, SchemaDiffObjectCompare):
"""
This is a class for rule node which inherits the
properties and methods from PGChildNodeView class and define
@ -178,6 +183,9 @@ class RuleView(PGChildNodeView):
'configs': [{'get': 'configs'}]
})
# Schema Diff: Keys to ignore while comparing
keys_to_ignore = ['oid', 'schema', 'definition']
def check_precondition(f):
"""
This function will behave as a decorator which will check the
@ -197,6 +205,12 @@ class RuleView(PGChildNodeView):
]['datlastsysoid'] if self.manager.db_info is not None and \
kwargs['did'] in self.manager.db_info else 0
self.template_path = 'rules/sql'
self.table_template_path = compile_template_path(
'tables/sql',
self.manager.server_type,
self.manager.version
)
return f(*args, **kwargs)
return wrap
@ -278,6 +292,21 @@ class RuleView(PGChildNodeView):
"""
Fetch the properties of an individual rule and render in properties tab
"""
status, data = self._fetch_properties(rid)
if not status:
return data
return ajax_response(
response=data,
status=200
)
def _fetch_properties(self, rid):
"""
This function is used to fetch the properties of the specified object
:param rid:
:return:
"""
SQL = render_template("/".join(
[self.template_path, 'properties.sql']
@ -285,15 +314,13 @@ class RuleView(PGChildNodeView):
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(gettext("""Could not find the rule in the table."""))
return False, gone(
gettext("""Could not find the rule in the table."""))
return ajax_response(
response=parse_rule_definition(res),
status=200
)
return True, parse_rule_definition(res)
@check_precondition
def create(self, gid, sid, did, scid, tid):
@ -369,7 +396,7 @@ class RuleView(PGChildNodeView):
return internal_server_error(errormsg=str(e))
@check_precondition
def delete(self, gid, sid, did, scid, tid, rid=None):
def delete(self, gid, sid, did, scid, tid, rid=None, only_sql=False):
"""
This function will drop a rule object
"""
@ -412,6 +439,8 @@ class RuleView(PGChildNodeView):
nspname=rset['nspname'],
cascade=cascade
)
if only_sql:
return SQL
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
@ -489,6 +518,44 @@ class RuleView(PGChildNodeView):
[self.template_path, 'create.sql']), data=data)
return SQL, data['name'] if 'name' in data else old_data['name']
@check_precondition
def get_sql_from_diff(self, gid, sid, did, scid, tid, oid, data=None,
diff_schema=None, drop_sql=False):
if drop_sql:
SQL = self.delete(gid=gid, sid=sid, did=did,
scid=scid, tid=tid,
rid=oid, only_sql=True)
else:
SQL = render_template("/".join(
[self.template_path, 'properties.sql']), rid=oid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(
gettext("""Could not find the rule in the table.""")
)
res_data = parse_rule_definition(res)
SQL = ''
if data:
old_data = res_data
SQL = render_template(
"/".join([self.template_path, 'update.sql']),
data=data, o_data=old_data
)
else:
if diff_schema:
res_data['schema'] = diff_schema
SQL = render_template("/".join(
[self.template_path, 'create.sql']),
data=res_data, display_comments=True)
return SQL
@check_precondition
def dependents(self, gid, sid, did, scid, tid, rid):
"""
@ -527,5 +594,47 @@ class RuleView(PGChildNodeView):
status=200
)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None,
ignore_keys=False):
"""
This function will fetch the list of all the rules for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:param tid: Table Id
:return:
"""
res = {}
if oid:
status, data = self._fetch_properties(oid)
if not status:
current_app.logger.error(data)
return False
res = data
else:
SQL = render_template("/".join([self.template_path,
'nodes.sql']),
tid=tid)
status, rules = self.conn.execute_2darray(SQL)
if not status:
current_app.logger.error(rules)
return False
for row in rules['rows']:
status, data = self._fetch_properties(row['oid'])
if status:
if ignore_keys:
for key in self.keys_to_ignore:
if key in data:
del data[key]
res[row['name']] = data
return res
SchemaDiffRegistry(blueprint.node_type, RuleView, 'table')
RuleView.register_node_view(blueprint)

View File

@ -0,0 +1,507 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
""" Implements Utility class for Table and Partitioned Table. """
import copy
from flask import render_template
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries,\
directory_diff
from pgadmin.tools.schema_diff.model import SchemaDiffModel
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
class SchemaDiffTableCompare(SchemaDiffObjectCompare):
keys_to_ignore = ['oid', 'schema', 'vacuum_table',
'vacuum_toast', 'edit_types', 'attnum', 'col_type',
'references', 'reltuples', 'rows_cnt']
keys_to_ignore_ddl_comp = ['oid',
'schema',
'columns',
'edit_types',
'primary_key',
'exclude_constraint',
'check_constraint',
'foreign_key',
'reltuples',
'rows_cnt'
]
keys_to_remove = {
'columns': ['relname', 'nspname', 'parent_tbl', 'attrelid', 'adrelid'],
'primary_key': ['oid'],
'unique_constraint': ['oid'],
'check_constraint': ['oid', 'nspname'],
'foreign_key': ['oid', 'fknsp', 'confrelid'],
'exclude_constraint': ['oid'],
'partitions': ['oid'],
}
keys_to_remove_ddl_comp = {
'columns': ['relname', 'nspname', 'parent_tbl', 'attrelid', 'adrelid'],
'check_constraint': ['nspname'],
'foreign_key': ['fknsp', 'confrelid']
}
def compare(self, **kwargs):
"""
This function is used to compare all the table objects
from two different schemas.
:return: Comparison Dictionary
"""
src_sid = kwargs.get('source_sid')
src_did = kwargs.get('source_did')
src_scid = kwargs.get('source_scid')
tar_sid = kwargs.get('target_sid')
tar_did = kwargs.get('target_did')
tar_scid = kwargs.get('target_scid')
sub_modules = ['index', 'rule', 'trigger']
source_tables = self.fetch_tables(sid=src_sid, did=src_did,
scid=src_scid)
target_tables = self.fetch_tables(sid=tar_sid, did=tar_did,
scid=tar_scid)
if self.manager.version >= 120000:
sub_modules.append('compound_trigger')
# If both the dict have no items then return None.
if not (source_tables or target_tables) or (
len(source_tables) <= 0 and len(target_tables) <= 0):
return None
src_server_type, tar_server_type = self.get_server_type(src_sid,
tar_sid)
for module in sub_modules:
module_view = SchemaDiffRegistry.get_node_view(
module)
# Get sub module data for source tables
if module_view.blueprint.server_type is None or \
src_server_type in module_view.blueprint.server_type:
for key, val in source_tables.items():
source = module_view.fetch_objects_to_compare(
sid=src_sid,
did=src_did,
scid=src_scid,
tid=val['oid'],
oid=None,
ignore_keys=True
)
source_tables[key][module] = source
# Get sub module data for target tables
if module_view.blueprint.server_type is None or \
tar_server_type in module_view.blueprint.server_type:
for key, val in target_tables.items():
target = module_view.fetch_objects_to_compare(
sid=tar_sid,
did=tar_did,
scid=tar_scid,
tid=val['oid'],
oid=None,
ignore_keys=True
)
target_tables[key][module] = target
return compare_dictionaries(source_tables, target_tables,
self.node_type,
self.blueprint.COLLECTION_LABEL,
self.keys_to_ignore)
@staticmethod
def get_server_type(src_id, tar_id):
"""Get server types of source and target servers."""
driver = get_driver(PG_DEFAULT_DRIVER)
src_manager = driver.connection_manager(src_id)
tar_manager = driver.connection_manager(tar_id)
return src_manager.server_type, tar_manager.server_type
def ddl_compare(self, **kwargs):
"""
This function will compare properties of 2 tables and
return the source DDL, target DDL and Difference of them.
"""
src_sid = kwargs.get('source_sid')
src_did = kwargs.get('source_did')
src_scid = kwargs.get('source_scid')
src_oid = kwargs.get('source_oid')
tar_sid = kwargs.get('target_sid')
tar_did = kwargs.get('target_did')
tar_scid = kwargs.get('target_scid')
tar_oid = kwargs.get('target_oid')
comp_status = kwargs.get('comp_status')
generate_script = False
if 'generate_script' in kwargs and kwargs['generate_script']:
generate_script = True
source = ''
target = ''
diff = ''
ignore_sub_modules = ['column', 'constraints']
src_server_type, tar_server_type = self.get_server_type(src_sid,
tar_sid)
status, target_schema = self.get_schema(tar_sid,
tar_did,
tar_scid
)
if not status:
return internal_server_error(errormsg=target_schema)
if comp_status == SchemaDiffModel.COMPARISON_STATUS['source_only']:
if not generate_script:
source = self.get_sql_from_table_diff(sid=src_sid,
did=src_did,
scid=src_scid,
tid=src_oid,
json_resp=False)
diff = self.get_sql_from_table_diff(sid=src_sid, did=src_did,
scid=src_scid, tid=src_oid,
diff_schema=target_schema,
json_resp=False)
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['target_only']:
if not generate_script:
target = self.get_sql_from_table_diff(sid=tar_sid,
did=tar_did,
scid=tar_scid,
tid=tar_oid,
json_resp=False)
SQL = render_template(
"/".join([self.table_template_path, 'properties.sql']),
did=tar_did, scid=tar_scid, tid=tar_oid,
datlastsysoid=self.datlastsysoid
)
status, res = self.conn.execute_dict(SQL)
if status:
diff = self.get_delete_sql(res)
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['different']:
source = self.fetch_tables(
sid=src_sid, did=src_did,
scid=src_scid, tid=src_oid,
keys_to_remove=self.keys_to_remove_ddl_comp
)
target = self.fetch_tables(
sid=tar_sid, did=tar_did,
scid=tar_scid, tid=tar_oid,
keys_to_remove=self.keys_to_remove_ddl_comp
)
if self.manager.version < 100000:
ignore_sub_modules.append('partition')
if self.manager.version < 120000:
ignore_sub_modules.append('compound_trigger')
# In case of error return None
if not (source or target):
return None
diff_dict = directory_diff(
source, target, ignore_keys=self.keys_to_ignore_ddl_comp,
difference={}
)
# Column comparison
col_diff = self.table_col_ddl_comp(source, target)
diff_dict.update(col_diff)
# Constraint comparison
pk_diff = self.constraint_ddl_comp(source, target)
diff_dict.update(pk_diff)
diff_dict['relacl'] = self.parce_acl(source, target)
if not generate_script:
source = self.get_sql_from_table_diff(sid=src_sid,
did=src_did,
scid=src_scid,
tid=src_oid,
json_resp=False)
target = self.get_sql_from_table_diff(sid=tar_sid,
did=tar_did,
scid=tar_scid,
tid=tar_oid,
json_resp=False)
diff = self.get_sql_from_table_diff(sid=tar_sid, did=tar_did,
scid=tar_scid, tid=tar_oid,
diff_data=diff_dict,
json_resp=False)
for module in self.blueprint.submodules:
if module.NODE_TYPE not in ignore_sub_modules:
module_view = SchemaDiffRegistry.get_node_view(
module.NODE_TYPE)
if module_view.blueprint.server_type and (
src_server_type not in
module_view.blueprint.server_type and
tar_server_type not in
module_view.blueprint.server_type
):
continue
if module_view.blueprint.server_type and (
(src_server_type in
module_view.blueprint.server_type and
tar_server_type not in
module_view.blueprint.server_type) or (
src_server_type not in
module_view.blueprint.server_type and
tar_server_type in
module_view.blueprint.server_type)
):
continue
result = module_view.compare(
source_sid=src_sid, source_did=src_did,
source_scid=src_scid, source_tid=src_oid,
target_sid=tar_sid, target_did=tar_did,
target_scid=tar_scid, target_tid=tar_oid
)
if result and module.NODE_TYPE != 'partition':
child_diff = ''
for res in result:
if res['status'] == \
SchemaDiffModel.COMPARISON_STATUS[
'different']:
source_oid = res['source_oid']
target_oid = res['target_oid']
else:
source_oid = res['oid']
target_oid = res['oid']
if res['status'] != \
SchemaDiffModel.COMPARISON_STATUS[
'identical']:
child_diff = module_view.ddl_compare(
source_sid=src_sid, source_did=src_did,
source_scid=src_scid,
source_oid=source_oid,
source_tid=src_oid, target_sid=tar_sid,
target_did=tar_did, target_scid=tar_scid,
target_tid=tar_oid, target_oid=target_oid,
comp_status=res['status']
)
if child_diff:
diff += child_diff
elif result:
# For partition module
identical = False
source_only = False
target_only = False
different = False
for res in result:
if res['status'] == \
SchemaDiffModel.COMPARISON_STATUS[
'identical']:
identical = True
elif res['status'] == \
SchemaDiffModel.COMPARISON_STATUS[
'source_only']:
source_only = True
elif res['status'] == \
SchemaDiffModel.COMPARISON_STATUS[
'target_only']:
target_only = True
else:
different = True
if identical:
pass
elif (source_only or target_only) and not different:
for res in result:
source_oid = res['oid']
target_oid = res['oid']
child_diff = module_view.ddl_compare(
source_sid=src_sid, source_did=src_did,
source_scid=src_scid,
source_oid=source_oid,
source_tid=src_oid, target_sid=tar_sid,
target_did=tar_did, target_scid=tar_scid,
target_tid=tar_oid, target_oid=target_oid,
comp_status=res['status']
)
if ddl_compare:
diff += child_diff
else:
diff = self.get_sql_from_table_diff(
sid=src_sid,
did=src_did,
scid=src_scid,
tid=src_oid,
diff_schema=target_schema,
json_resp=False,
schema_diff_table=True
)
else:
source = self.get_sql_from_table_diff(sid=src_sid, did=src_did,
scid=src_scid, tid=src_oid,
json_resp=False)
target = self.get_sql_from_table_diff(sid=tar_sid, did=tar_did,
scid=tar_scid, tid=tar_oid,
json_resp=False)
return {'source_ddl': source,
'target_ddl': target,
'diff_ddl': diff
}
@staticmethod
def table_col_ddl_comp(source, target):
"""
Table Column comparison
:param source: Source columns
:param target: Target columns
:return: Difference of the columns
"""
source_cols = source['columns']
target_cols = copy.deepcopy(target['columns'])
added = []
updated = []
different = {'columns': {}}
for source in source_cols:
if 'name' in source:
if type(target_cols) is list and len(
target_cols) > 0:
tmp = None
for item in target_cols:
if item['name'] == source['name']:
tmp = copy.deepcopy(item)
if tmp and source != tmp:
tmp_updated = copy.deepcopy(source)
# Preserve the column number
tmp_updated['attnum'] = tmp['attnum']
if item['typname'] not in tmp_updated['edit_types']:
tmp_updated['col_type_conversion'] = False
updated.append(tmp_updated)
target_cols.remove(tmp)
elif tmp and source == tmp:
target_cols.remove(tmp)
elif tmp is None:
added.append(source)
else:
added.append(source)
different['columns']['added'] = added
different['columns']['changed'] = updated
if target_cols and len(target_cols) > 0:
different['columns']['deleted'] = target_cols
return different
@staticmethod
def constraint_ddl_comp(source_table, target_table):
"""
Table Constraint DDL comparison
:param source: Source Table
:param target: Target Table
:return: Difference of constraints
"""
different = {}
non_editable_keys = {}
non_editable_keys = {'primary_key': ['col_count',
'condeferrable',
'condeffered',
'columns'],
'check_constraint': ['consrc'],
'exclude_constraint': ['amname',
'indconstraint',
'columns']
}
for constraint in ['primary_key', 'check_constraint',
'exclude_constraint']:
source_cols = source_table[constraint] if \
constraint in source_table else []
target_cols = copy.deepcopy(target_table[constraint]) if\
constraint in target_table else []
added = []
updated = []
deleted = []
different[constraint] = {}
for source in source_cols:
if 'name' in source:
if type(target_cols) is list and len(
target_cols) > 0:
tmp_src = copy.deepcopy(source)
tmp_src.pop('oid')
tmp_tar = None
tmp = None
for item in target_cols:
if item['name'] == source['name']:
tmp_tar = copy.deepcopy(item)
tmp = copy.deepcopy(item)
tmp_tar.pop('oid')
if tmp_tar and tmp_src != tmp_tar:
tmp_updated = copy.deepcopy(source)
for key in non_editable_keys[constraint]:
if key in tmp_updated and \
tmp_updated[key] != tmp_tar[key]:
added.append(source)
deleted.append(tmp_updated)
tmp_updated = None
break
if tmp_updated:
tmp_updated['oid'] = tmp_tar['oid']
updated.append(tmp_updated)
target_cols.remove(tmp)
elif tmp_tar and tmp_src == tmp_tar:
target_cols.remove(tmp)
elif tmp_tar is None:
added.append(source)
else:
added.append(source)
different[constraint]['added'] = added
different[constraint]['changed'] = updated
different[constraint]['deleted'] = deleted
if target_cols and len(target_cols) > 0:
different[constraint]['deleted'] = target_cols
return different
def remove_keys_for_comparision(self, data, keys=None):
"""
This function is used to remove specific keys from data
"""
keys_to_remove = keys if keys else self.keys_to_remove
for p_key, p_val in keys_to_remove.items():
if p_key in data and data[p_key] is not None \
and len(data[p_key]) > 0:
for item in data[p_key]:
# Remove keys that should not be the part of comparision.
for key in p_val:
if key in item:
item.pop(key)

View File

@ -21,7 +21,7 @@ CACHE {{data.seqcache|int}} {% endif %}
{% endif %}{% endif %};
{### Add comments ###}
{% if data and data.description %}
{% if data and data.description and data.description != None %}
COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, data.name)}}
IS {{data.description|qtLiteral}};

View File

@ -10,8 +10,14 @@ ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{% endif %}
{### Alter column type and collation ###}
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen is defined and data.attlen != o_data.attlen) or (data.attprecision is defined and data.attprecision != o_data.attprecision) or (data.collspcname and data.collspcname != o_data.collspcname)%}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ALTER COLUMN {% if data.name %}{{conn|qtTypeIdent(data.name)}}{% else %}{{conn|qtTypeIdent(o_data.name)}}{% endif %} TYPE {{ GET_TYPE.UPDATE_TYPE_SQL(conn, data, o_data) }}{% if data.collspcname and data.collspcname != o_data.collspcname %}
{% if data.col_type_conversion is defined and data.col_type_conversion == False %}
-- WARNING:
-- The SQL statement below would normally be used to alter the datatype for the {{o_data.name}} column, however,
-- the current datatype cannot be cast to the target datatype so this conversion cannot be made automatically.
{% endif %}
{% if data.col_type_conversion is defined and data.col_type_conversion == False %} -- {% endif %}ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{% if data.col_type_conversion is defined and data.col_type_conversion == False %} -- {% endif %} ALTER COLUMN {% if data.name %}{{conn|qtTypeIdent(data.name)}}{% else %}{{conn|qtTypeIdent(o_data.name)}}{% endif %} TYPE {{ GET_TYPE.UPDATE_TYPE_SQL(conn, data, o_data) }}{% if data.collspcname and data.collspcname != o_data.collspcname %}
COLLATE {{data.collspcname}}{% elif o_data.collspcname %} COLLATE {{o_data.collspcname}}{% endif %};
{% endif %}
{### Alter column default value ###}
@ -95,7 +101,7 @@ COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, o_data.name)}}
{% endif %}
{### Update column variables ###}
{% if 'attoptions' in data and data.attoptions|length > 0 %}
{% if 'attoptions' in data and data.attoptions and data.attoptions|length > 0 %}
{% set variables = data.attoptions %}
{% if 'deleted' in variables and variables.deleted|length > 0 %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}

View File

@ -21,7 +21,7 @@ CACHE {{data.seqcache|int}} {% endif %}
{% endif %}{% endif %}{% if data.colconstype == 'g' and data.genexpr and data.genexpr != '' %} GENERATED ALWAYS AS ({{data.genexpr}}) STORED{% endif %};
{### Add comments ###}
{% if data and data.description %}
{% if data and data.description and data.description != None %}
COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, data.name)}}
IS {{data.description|qtLiteral}};

View File

@ -6,6 +6,12 @@
{% if data.name and data.name != o_data.name %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
RENAME {{conn|qtIdent(o_data.name)}} TO {{conn|qtIdent(data.name)}};
{% endif %}
{% if data.col_type_conversion is defined and data.col_type_conversion == False %}
-- WARNING:
-- The SQL statement below would normally be used to alter the datatype for the {{o_data.name}} column, however,
-- the current datatype cannot be cast to the target datatype so this conversion cannot be made automatically.
{% endif %}
{### Alter column type and collation ###}
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen is defined and data.attlen != o_data.attlen) or (data.attprecision is defined and data.attprecision != o_data.attprecision) or (data.collspcname and data.collspcname != o_data.collspcname)%}
@ -45,7 +51,7 @@ PLAIN{% elif data.attstorage == 'm'%}MAIN{% elif data.attstorage == 'e'%}
EXTERNAL{% elif data.attstorage == 'x'%}EXTENDED{% endif %};
{% endif %}
{% if data.description is defined %}
{% if data.description is defined and data.description != None %}
{% if data.name %}
COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, data.name)}}
{% else %}
@ -55,7 +61,7 @@ COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, o_data.name)}}
{% endif %}
{### Update column variables ###}
{% if 'attoptions' in data and data.attoptions|length > 0 %}
{% if 'attoptions' in data and data.attoptions != None and data.attoptions|length > 0 %}
{% set variables = data.attoptions %}
{% if 'deleted' in variables and variables.deleted|length > 0 %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}

View File

@ -12,7 +12,7 @@ ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{% endif %}
{### Add comments ###}
{% if data and data.description %}
{% if data and data.description and data.description != None %}
COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, data.name)}}
IS {{data.description|qtLiteral}};

View File

@ -9,9 +9,15 @@ ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{% endif %}
{### Alter column type and collation ###}
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen is defined and data.attlen != o_data.attlen) or (data.attprecision is defined and data.attprecision != o_data.attprecision) or (data.collspcname and data.collspcname != o_data.collspcname) %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ALTER COLUMN {% if data.name %}{{conn|qtTypeIdent(data.name)}}{% else %}{{conn|qtTypeIdent(o_data.name)}}{% endif %} TYPE {{ GET_TYPE.UPDATE_TYPE_SQL(conn, data, o_data) }}{% if data.collspcname and data.collspcname != o_data.collspcname %}
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen is defined and data.attlen != o_data.attlen) or (data.attprecision is defined and data.attprecision != o_data.attprecision) or (data.collspcname and data.collspcname != o_data.collspcname)%}
{% if data.col_type_conversion is defined and data.col_type_conversion == False %}
-- WARNING:
-- The SQL statement below would normally be used to alter the datatype for the XXX column, however,
-- the current datatype cannot be cast to the target datatype so this conversion cannot be made automatically.
{% endif %}
{% if data.col_type_conversion is defined and data.col_type_conversion == False %} -- {% endif %}ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{% if data.col_type_conversion is defined and data.col_type_conversion == False %} -- {% endif %} ALTER COLUMN {% if data.name %}{{conn|qtTypeIdent(data.name)}}{% else %}{{conn|qtTypeIdent(o_data.name)}}{% endif %} TYPE {{ GET_TYPE.UPDATE_TYPE_SQL(conn, data, o_data) }}{% if data.collspcname and data.collspcname != o_data.collspcname %}
COLLATE {{data.collspcname}}{% elif o_data.collspcname %} COLLATE {{o_data.collspcname}}{% endif %};
{% endif %}
{### Alter column default value ###}
@ -46,7 +52,7 @@ PLAIN{% elif data.attstorage == 'm'%}MAIN{% elif data.attstorage == 'e'%}
EXTERNAL{% elif data.attstorage == 'x'%}EXTENDED{% endif %};
{% endif %}
{% if data.description is defined %}
{% if data.description is defined and data.description != None %}
{% if data.name %}
COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, data.name)}}
{% else %}

View File

@ -3,6 +3,7 @@
{#====== MAIN TABLE TEMPLATE STARTS HERE ======#}
{#===========================================#}
{### CREATE TABLE STATEMENT FOR partitions ###}
CREATE {% if data.relpersistence %}UNLOGGED {% endif %}TABLE {{conn|qtIdent(data.schema, data.name)}}{% if data.relispartition is defined and data.relispartition %} PARTITION OF {{conn|qtIdent(data.parent_schema, data.partitioned_table_name)}}{% endif %}
{# Macro to render for constraints #}

View File

@ -0,0 +1,14 @@
INSERT INTO {{conn|qtIdent(data.schema, data.name)}}(
{% if data.columns and data.columns|length > 0 %}
{% for c in data.columns %}{{c.name}}{% if not loop.last %},{% endif %}{% endfor %}{% endif %})
SELECT {% if data.columns and data.columns|length > 0 %}{% for c in data.columns %}{{c.name}}{% if not loop.last %},{% endif %}{% endfor %}{% endif %}
FROM {{conn|qtIdent(data.schema, data.orig_name)}};
DROP TABLE {{conn|qtIdent(data.schema, data.orig_name)}};
{{partition_sql}}
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}}
RENAME TO {{conn|qtIdent(data.orig_name)}};

View File

@ -13,7 +13,7 @@ import simplejson as json
from functools import wraps
import pgadmin.browser.server_groups.servers.databases as database
from flask import render_template, request, jsonify
from flask import render_template, request, jsonify, current_app
from flask_babelex import gettext
from pgadmin.browser.collection import CollectionNodeModule
from pgadmin.browser.utils import PGChildNodeView
@ -25,7 +25,11 @@ from pgadmin.browser.server_groups.servers.databases.schemas.utils \
import trigger_definition
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.utils.compile_template_name import compile_template_path
from pgadmin.utils import IS_PY2
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
# If we are in Python3
if not IS_PY2:
unicode = str
@ -151,7 +155,7 @@ class TriggerModule(CollectionNodeModule):
blueprint = TriggerModule(__name__)
class TriggerView(PGChildNodeView):
class TriggerView(PGChildNodeView, SchemaDiffObjectCompare):
"""
This class is responsible for generating routes for Trigger node
@ -244,6 +248,10 @@ class TriggerView(PGChildNodeView):
'enable': [{'put': 'enable_disable_trigger'}]
})
# Schema Diff: Keys to ignore while comparing
keys_to_ignore = ['oid', 'xmin', 'nspname', 'tfunction',
'tgrelid', 'tgfoid', 'prosrc']
def check_precondition(f):
"""
This function will behave as a decorator which will checks
@ -267,6 +275,11 @@ class TriggerView(PGChildNodeView):
kwargs['did'] in self.manager.db_info else 0
# we will set template path for sql scripts
self.table_template_path = compile_template_path(
'tables/sql',
self.manager.server_type,
self.manager.version
)
self.template_path = 'triggers/sql/{0}/#{1}#'.format(
self.manager.server_type, self.manager.version)
# Store server type
@ -450,7 +463,22 @@ class TriggerView(PGChildNodeView):
Returns:
JSON of selected trigger node
"""
status, data = self._fetch_properties(tid, trid)
if not status:
return data
return ajax_response(
response=data,
status=200
)
def _fetch_properties(self, tid, trid):
"""
This function is used to fetch the properties of the specified object
:param tid:
:param trid:
:return:
"""
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, trid=trid,
@ -459,10 +487,10 @@ class TriggerView(PGChildNodeView):
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(
return False, gone(
gettext("""Could not find the trigger in the table."""))
# Making copy of output for future use
@ -472,10 +500,7 @@ class TriggerView(PGChildNodeView):
data = trigger_definition(data)
return ajax_response(
response=data,
status=200
)
return True, data
@check_precondition
def create(self, gid, sid, did, scid, tid):
@ -552,7 +577,7 @@ class TriggerView(PGChildNodeView):
return internal_server_error(errormsg=str(e))
@check_precondition
def delete(self, gid, sid, did, scid, tid, trid=None):
def delete(self, gid, sid, did, scid, tid, trid=None, only_sql=False):
"""
This function will updates existing the trigger object
@ -610,6 +635,8 @@ class TriggerView(PGChildNodeView):
conn=self.conn,
cascade=cascade
)
if only_sql:
return SQL
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
@ -761,6 +788,36 @@ class TriggerView(PGChildNodeView):
return ajax_response(response=SQL)
@check_precondition
def get_sql_from_diff(self, gid, sid, did, scid, tid, oid,
data=None, diff_schema=None, drop_sql=False):
if data:
SQL, name = trigger_utils.get_sql(
self.conn, data, tid, oid,
self.datlastsysoid,
self.blueprint.show_system_objects)
if not isinstance(SQL, (str, unicode)):
return SQL
SQL = SQL.strip('\n').strip(' ')
else:
if drop_sql:
SQL = self.delete(gid=gid, sid=sid, did=did,
scid=scid, tid=tid, trid=oid,
only_sql=True)
else:
schema = self.schema
if diff_schema:
schema = diff_schema
SQL = trigger_utils.get_reverse_engineered_sql(
self.conn, schema,
self.table, tid, oid,
self.datlastsysoid,
self.blueprint.show_system_objects,
template_path=None, with_header=False)
return SQL
@check_precondition
def enable_disable_trigger(self, gid, sid, did, scid, tid, trid):
"""
@ -875,5 +932,46 @@ class TriggerView(PGChildNodeView):
status=200
)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None,
ignore_keys=False):
"""
This function will fetch the list of all the triggers for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:param tid: Table Id
:return:
"""
res = dict()
if oid:
status, data = self._fetch_properties(tid, oid)
if not status:
current_app.logger.error(data)
return False
res = data
else:
SQL = render_template("/".join([self.template_path,
'nodes.sql']), tid=tid)
status, triggers = self.conn.execute_2darray(SQL)
if not status:
current_app.logger.error(triggers)
return False
for row in triggers['rows']:
status, data = self._fetch_properties(tid, row['oid'])
if status:
if ignore_keys:
for key in self.keys_to_ignore:
if key in data:
del data[key]
res[row['name']] = data
return res
SchemaDiffRegistry(blueprint.node_type, TriggerView, 'table')
TriggerView.register_node_view(blueprint)

View File

@ -30,7 +30,7 @@ def get_template_path(f):
def wrap(*args, **kwargs):
# Here args[0] will hold the connection object
conn_obj = args[0]
if 'template_path' not in kwargs:
if 'template_path' not in kwargs or kwargs['template_path'] is None:
kwargs['template_path'] = 'triggers/sql/{0}/#{1}#'.format(
conn_obj.manager.server_type, conn_obj.manager.version)
@ -201,7 +201,7 @@ def get_sql(conn, data, tid, trid, datlastsysoid,
@get_template_path
def get_reverse_engineered_sql(conn, schema, table, tid, trid,
datlastsysoid, show_system_objects,
template_path=None):
template_path=None, with_header=True):
"""
This function will return reverse engineered sql for specified trigger.
@ -213,6 +213,8 @@ def get_reverse_engineered_sql(conn, schema, table, tid, trid,
:param datlastsysoid:
:param show_system_objects: Show System Object value True or False
:param template_path: Optional template path
:param with_header: Optional parameter to decide whether the SQL will be
returned with header or not
:return:
"""
SQL = render_template("/".join([template_path, 'properties.sql']),
@ -240,12 +242,15 @@ def get_reverse_engineered_sql(conn, schema, table, tid, trid,
SQL, name = get_sql(conn, data, tid, None, datlastsysoid,
show_system_objects)
sql_header = u"-- Trigger: {0}\n\n-- ".format(data['name'])
if with_header:
sql_header = u"-- Trigger: {0}\n\n-- ".format(data['name'])
sql_header += render_template("/".join([template_path, 'delete.sql']),
data=data, conn=conn)
sql_header += render_template("/".join([template_path, 'delete.sql']),
data=data, conn=conn)
SQL = sql_header + '\n\n' + SQL.strip('\n')
SQL = sql_header + '\n\n' + SQL.strip('\n')
else:
SQL = SQL.strip('\n')
# If trigger is disabled then add sql code for the same
if data['is_enable_trigger'] != 'O':

View File

@ -10,6 +10,7 @@
""" Implements Utility class for Table and Partitioned Table. """
import re
import copy
from functools import wraps
import simplejson as json
from flask import render_template, jsonify, request
@ -179,8 +180,10 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
data[row['deftype']] = [priv]
# We will add Auto vacuum defaults with out result for grid
data['vacuum_table'] = self.parse_vacuum_data(self.conn, data, 'table')
data['vacuum_toast'] = self.parse_vacuum_data(self.conn, data, 'toast')
data['vacuum_table'] = copy.deepcopy(
self.parse_vacuum_data(self.conn, data, 'table'))
data['vacuum_toast'] = copy.deepcopy(
self.parse_vacuum_data(self.conn, data, 'toast'))
# Fetch columns for the table logic
#
@ -405,7 +408,8 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
status=200
)
def get_reverse_engineered_sql(self, did, scid, tid, main_sql, data):
def get_reverse_engineered_sql(self, did, scid, tid, main_sql, data,
json_resp=True, diff_partition_sql=False):
"""
This function will creates reverse engineered sql for
the table object
@ -416,6 +420,9 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
tid: Table ID
main_sql: List contains all the reversed engineered sql
data: Table's Data
json_resp: Json response or plain SQL
diff_partition_sql: In Schema diff, the Partition sql should be
return separately to perform further task
"""
"""
#####################################
@ -427,6 +434,7 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
schema = data['schema']
table = data['name']
is_partitioned = 'is_partitioned' in data and data['is_partitioned']
sql_header = ''
data = self._formatter(did, scid, tid, data)
@ -444,18 +452,20 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
c['cltype'], c['hasSqrBracket'] = \
column_utils.type_formatter(c['cltype'])
sql_header = u"-- Table: {0}\n\n-- ".format(
self.qtIdent(self.conn, data['schema'], data['name']))
if json_resp:
sql_header = u"-- Table: {0}\n\n-- ".format(
self.qtIdent(self.conn, data['schema'], data['name']))
sql_header += render_template("/".join([self.table_template_path,
'delete.sql']),
data=data, conn=self.conn)
sql_header += render_template("/".join([self.table_template_path,
'delete.sql']),
data=data, conn=self.conn)
sql_header = sql_header.strip('\n')
sql_header += '\n'
sql_header = sql_header.strip('\n')
sql_header += '\n'
# Add into main sql
main_sql.append(sql_header)
# Add into main sql
main_sql.append(sql_header)
partition_main_sql = ""
# Parse privilege data
if 'relacl' in data:
@ -493,12 +503,14 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
for row in rset['rows']:
index_sql = index_utils.get_reverse_engineered_sql(
self.conn, schema, table, did, tid, row['oid'],
self.datlastsysoid)
self.datlastsysoid,
template_path=None, with_header=json_resp)
index_sql = u"\n" + index_sql
# Add into main sql
index_sql = re.sub('\n{2,}', '\n\n', index_sql)
main_sql.append(index_sql)
main_sql.append(index_sql.strip('\n'))
"""
########################################
@ -514,7 +526,8 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
for row in rset['rows']:
trigger_sql = trigger_utils.get_reverse_engineered_sql(
self.conn, schema, table, tid, row['oid'],
self.datlastsysoid, self.blueprint.show_system_objects)
self.datlastsysoid, self.blueprint.show_system_objects,
template_path=None, with_header=json_resp)
trigger_sql = u"\n" + trigger_sql
# Add into main sql
@ -571,10 +584,13 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
if not status:
return internal_server_error(errormsg=res)
display_comments = True
if not json_resp:
display_comments = False
res_data = parse_rule_definition(res)
rules_sql += render_template("/".join(
[self.rules_template_path, 'create.sql']),
data=res_data, display_comments=True)
data=res_data, display_comments=display_comments)
# Add into main sql
rules_sql = re.sub('\n{2,}', '\n\n', rules_sql)
@ -594,13 +610,17 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
return internal_server_error(errormsg=rset)
if len(rset['rows']):
sql_header = u"\n-- Partitions SQL"
if json_resp:
sql_header = u"\n-- Partitions SQL"
partition_sql = ''
for row in rset['rows']:
part_data = dict()
part_data['partitioned_table_name'] = table
part_data['parent_schema'] = schema
part_data['schema'] = row['schema_name']
part_data['partitioned_table_name'] = data['name']
part_data['parent_schema'] = data['schema']
if not json_resp:
part_data['schema'] = data['schema']
else:
part_data['schema'] = row['schema_name']
part_data['relispartition'] = True
part_data['name'] = row['name']
part_data['partition_value'] = row['partition_value']
@ -612,13 +632,18 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
data=part_data, conn=self.conn)
# Add into main sql
partition_sql = re.sub('\n{2,}', '\n\n', partition_sql)
main_sql.append(
sql_header + '\n\n' + partition_sql.strip('\n')
)
partition_sql = re.sub('\n{2,}', '\n\n', partition_sql
).strip('\n')
partition_main_sql = partition_sql.strip('\n')
if not diff_partition_sql:
main_sql.append(
sql_header + '\n\n' + partition_main_sql
)
sql = '\n'.join(main_sql)
if not json_resp:
return sql, partition_main_sql
return ajax_response(response=sql.strip('\n'))
def reset_statistics(self, scid, tid):
@ -907,7 +932,8 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
conn=self.conn).strip('\n') + '\n\n'
# If partition(s) is/are added
if 'added' in partitions:
if 'added' in partitions and 'partition_scheme' in old_data\
and old_data['partition_scheme'] != '':
temp_data = dict()
temp_data['schema'] = data['schema']
temp_data['name'] = data['name']
@ -1133,7 +1159,8 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
except Exception as e:
return internal_server_error(errormsg=str(e))
def properties(self, gid, sid, did, scid, tid, res):
def properties(self, gid, sid, did, scid, tid, res,
return_ajax_response=True):
"""
This function will show the properties of the selected table node.
@ -1145,6 +1172,7 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
scid: Schema ID
tid: Table ID
res: Table/Partition table properties
return_ajax_response: If True then return the ajax response
Returns:
JSON of selected table node
@ -1242,6 +1270,9 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
data['partitions'] = partitions
if not return_ajax_response:
return data
return ajax_response(
response=data,
status=200
@ -1359,6 +1390,22 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
}
)
def get_delete_sql(self, res):
# Below will decide if it's simple drop or drop with cascade call
if self.cmd == 'delete':
# This is a cascade operation
cascade = True
else:
cascade = False
data = res['rows'][0]
return render_template(
"/".join([self.table_template_path, 'delete.sql']),
data=data, cascade=cascade,
conn=self.conn
)
def delete(self, gid, sid, did, scid, tid, res):
"""
This function will delete the table object
@ -1371,20 +1418,8 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
tid: Table ID
"""
# Below will decide if it's simple drop or drop with cascade call
if self.cmd == 'delete':
# This is a cascade operation
cascade = True
else:
cascade = False
SQL = self.get_delete_sql(res)
data = res['rows'][0]
SQL = render_template(
"/".join([self.table_template_path, 'delete.sql']),
data=data, cascade=cascade,
conn=self.conn
)
status, res = self.conn.execute_scalar(SQL)
if not status:
return status, res

View File

@ -26,6 +26,8 @@ from pgadmin.utils import IS_PY2
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
make_response as ajax_response, gone
from pgadmin.utils.driver import get_driver
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
# If we are in Python3
if not IS_PY2:
@ -94,7 +96,7 @@ class TypeModule(SchemaChildModule):
blueprint = TypeModule(__name__)
class TypeView(PGChildNodeView, DataTypeReader):
class TypeView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
"""
This class is responsible for generating routes for Type node
@ -173,6 +175,10 @@ class TypeView(PGChildNodeView, DataTypeReader):
* get_external_functions_list(gid, sid, did, scid, tid):
- This function will return list of external functions
in ajax response
* compare(**kwargs):
- This function will compare the type nodes from two
different schemas.
"""
node_type = blueprint.node_type
@ -559,6 +565,22 @@ class TypeView(PGChildNodeView, DataTypeReader):
Returns:
JSON of selected type node
"""
status, res = self._fetch_properties(scid, tid)
if not status:
return res
return ajax_response(
response=res,
status=200
)
def _fetch_properties(self, scid, tid):
"""
This function is used to fecth the properties of the specified object.
:param scid:
:param tid:
:return:
"""
SQL = render_template(
"/".join([self.template_path,
@ -569,10 +591,10 @@ class TypeView(PGChildNodeView, DataTypeReader):
)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(
return False, gone(
gettext("""Could not find the type in the database."""))
# Making copy of output for future use
@ -583,7 +605,7 @@ class TypeView(PGChildNodeView, DataTypeReader):
scid=scid, tid=tid)
status, acl = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=acl)
return False, internal_server_error(errormsg=acl)
# We will set get privileges from acl sql so we don't need
# it from properties sql
@ -599,10 +621,7 @@ class TypeView(PGChildNodeView, DataTypeReader):
# Calling function to check and additional properties if available
copy_dict.update(self.additional_properties(copy_dict, tid))
return ajax_response(
response=copy_dict,
status=200
)
return True, copy_dict
@check_precondition
def get_collations(self, gid, sid, did, scid, tid=None):
@ -1428,5 +1447,31 @@ class TypeView(PGChildNodeView, DataTypeReader):
status=200
)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid):
"""
This function will fetch the list of all the types for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
SQL = render_template("/".join([self.template_path,
'nodes.sql']),
scid=scid, datlastsysoid=self.datlastsysoid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
status, data = self._fetch_properties(scid, row['oid'])
if status:
res[row['name']] = data
return res
TypeView.register_node_view(blueprint)

View File

@ -15,6 +15,8 @@ from flask import render_template
from pgadmin.browser.collection import CollectionNodeModule
from pgadmin.utils.ajax import internal_server_error
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
class SchemaChildModule(CollectionNodeModule):
@ -485,11 +487,50 @@ class VacuumSettings:
* type - table/toast vacuum type
"""
vacuum_settings = dict()
def __init__(self):
pass
def get_vacuum_table_settings(self, conn):
def fetch_default_vacuum_settings(self, conn, sid, setting_type):
"""
This function is used to fetch and cached the default vacuum settings
for specified server id.
:param conn: Connection Object
:param sid: Server ID
:param setting_type: Type (table or toast)
:return:
"""
if sid in VacuumSettings.vacuum_settings:
if setting_type in VacuumSettings.vacuum_settings[sid]:
return VacuumSettings.vacuum_settings[sid][setting_type]
else:
VacuumSettings.vacuum_settings[sid] = dict()
# returns an array of name & label values
vacuum_fields = render_template("vacuum_settings/vacuum_fields.json")
vacuum_fields = json.loads(vacuum_fields)
# returns an array of setting & name values
vacuum_fields_keys = "'" + "','".join(
vacuum_fields[setting_type].keys()) + "'"
SQL = render_template('vacuum_settings/sql/vacuum_defaults.sql',
columns=vacuum_fields_keys)
status, res = conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in res['rows']:
row_name = row['name']
row['name'] = vacuum_fields[setting_type][row_name][0]
row['label'] = vacuum_fields[setting_type][row_name][1]
row['column_type'] = vacuum_fields[setting_type][row_name][2]
VacuumSettings.vacuum_settings[sid][setting_type] = res['rows']
return VacuumSettings.vacuum_settings[sid][setting_type]
def get_vacuum_table_settings(self, conn, sid):
"""
Fetch the default values for autovacuum
fields, return an array of
@ -498,31 +539,9 @@ class VacuumSettings:
- setting
values
"""
return self.fetch_default_vacuum_settings(conn, sid, 'table')
# returns an array of name & label values
vacuum_fields = render_template("vacuum_settings/vacuum_fields.json")
vacuum_fields = json.loads(vacuum_fields)
# returns an array of setting & name values
vacuum_fields_keys = "'" + "','".join(
vacuum_fields['table'].keys()) + "'"
SQL = render_template('vacuum_settings/sql/vacuum_defaults.sql',
columns=vacuum_fields_keys)
status, res = conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in res['rows']:
row_name = row['name']
row['name'] = vacuum_fields['table'][row_name][0]
row['label'] = vacuum_fields['table'][row_name][1]
row['column_type'] = vacuum_fields['table'][row_name][2]
return res
def get_vacuum_toast_settings(self, conn):
def get_vacuum_toast_settings(self, conn, sid):
"""
Fetch the default values for autovacuum
fields, return an array of
@ -531,29 +550,7 @@ class VacuumSettings:
- setting
values
"""
# returns an array of name & label values
vacuum_fields = render_template("vacuum_settings/vacuum_fields.json")
vacuum_fields = json.loads(vacuum_fields)
# returns an array of setting & name values
vacuum_fields_keys = "'" + "','".join(
vacuum_fields['toast'].keys()) + "'"
SQL = render_template('vacuum_settings/sql/vacuum_defaults.sql',
columns=vacuum_fields_keys)
status, res = conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in res['rows']:
row_name = row['name']
row['name'] = vacuum_fields['toast'][row_name][0]
row['label'] = vacuum_fields['toast'][row_name][1]
row['column_type'] = vacuum_fields['table'][row_name][2]
return res
return self.fetch_default_vacuum_settings(conn, sid, 'toast')
def parse_vacuum_data(self, conn, result, type):
"""
@ -567,47 +564,46 @@ class VacuumSettings:
* type - table/toast vacuum type
"""
# returns an array of name & label values
vacuum_fields = render_template("vacuum_settings/vacuum_fields.json")
vacuum_settings_tmp = self.fetch_default_vacuum_settings(
conn, self.manager.sid, type)
vacuum_fields = json.loads(vacuum_fields)
for row in vacuum_settings_tmp:
row_name = row['name']
if type is 'toast':
row_name = 'toast_{0}'.format(row['name'])
if row_name in result and result[row_name] is not None:
if row['column_type'] == 'number':
value = float(result[row_name])
else:
value = int(result[row_name])
row['value'] = value
else:
if 'value' in row:
row.pop('value')
# returns an array of setting & name values
vacuum_fields_keys = "'" + "','".join(
vacuum_fields[type].keys()) + "'"
SQL = render_template('vacuum_settings/sql/vacuum_defaults.sql',
columns=vacuum_fields_keys)
status, res = conn.execute_dict(SQL)
return vacuum_settings_tmp
if not status:
return internal_server_error(errormsg=res)
if type is 'table':
for row in res['rows']:
row_name = row['name']
row['name'] = vacuum_fields[type][row_name][0]
row['label'] = vacuum_fields[type][row_name][1]
row['column_type'] = vacuum_fields[type][row_name][2]
if result[row['name']] is not None:
if row['column_type'] == 'number':
value = float(result[row['name']])
else:
value = int(result[row['name']])
row['value'] = row['setting'] = value
def get_schema(sid, did, scid):
"""
This function will return the schema name.
"""
elif type is 'toast':
for row in res['rows']:
row_old_name = row['name']
row_name = 'toast_{0}'.format(
vacuum_fields[type][row_old_name][0])
row['name'] = vacuum_fields[type][row_old_name][0]
row['label'] = vacuum_fields[type][row_old_name][1]
row['column_type'] = vacuum_fields[type][row_old_name][2]
if result[row_name] and result[row_name] is not None:
if row['column_type'] == 'number':
value = float(result[row_name])
else:
value = int(result[row_name])
row['value'] = row['setting'] = value
driver = get_driver(PG_DEFAULT_DRIVER)
manager = driver.connection_manager(sid)
conn = manager.connection(did=did)
return res['rows']
ver = manager.version
server_type = manager.server_type
# Fetch schema name
status, schema_name = conn.execute_scalar(
render_template("/".join(['schemas',
'{0}/#{1}#'.format(server_type,
ver),
'sql/get_name.sql']),
conn=conn, scid=scid
)
)
return status, schema_name

View File

@ -9,6 +9,7 @@
"""Implements View and Materialized View Node"""
import copy
from functools import wraps
import simplejson as json
@ -18,13 +19,16 @@ from flask_babelex import gettext
import pgadmin.browser.server_groups.servers.databases as databases
from config import PG_DEFAULT_DRIVER
from pgadmin.browser.server_groups.servers.databases.schemas.utils import \
SchemaChildModule, parse_rule_definition, VacuumSettings
SchemaChildModule, parse_rule_definition, VacuumSettings, get_schema
from pgadmin.browser.server_groups.servers.utils import parse_priv_from_db, \
parse_priv_to_db
from pgadmin.browser.utils import PGChildNodeView
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
make_response as ajax_response, gone
from pgadmin.utils.driver import get_driver
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
"""
This module is responsible for generating two nodes
@ -197,7 +201,7 @@ def check_precondition(f):
return wrap
class ViewNode(PGChildNodeView, VacuumSettings):
class ViewNode(PGChildNodeView, VacuumSettings, SchemaDiffObjectCompare):
"""
This class is responsible for generating routes for view node.
@ -250,6 +254,10 @@ class ViewNode(PGChildNodeView, VacuumSettings):
* dependent(gid, sid, did, scid):
- This function will generate dependent list to show it in dependent
pane for the selected view node.
* compare(**kwargs):
- This function will compare the view nodes from two
different schemas.
"""
node_type = view_blueprint.node_type
@ -290,6 +298,8 @@ class ViewNode(PGChildNodeView, VacuumSettings):
{'get': 'get_toast_table_vacuum'}]
})
keys_to_ignore = ['oid', 'schema', 'xmin']
def __init__(self, *args, **kwargs):
"""
Initialize the variables used by methods of ViewNode.
@ -400,21 +410,37 @@ class ViewNode(PGChildNodeView, VacuumSettings):
Fetches the properties of an individual view
and render in the properties tab
"""
status, res = self._fetch_properties(scid, vid)
if not status:
return res
return ajax_response(
response=res,
status=200
)
def _fetch_properties(self, scid, vid):
"""
This function is used to fetch the properties of the specified object
:param scid:
:param vid:
:return:
"""
SQL = render_template("/".join(
[self.template_path, 'sql/properties.sql']
), vid=vid, datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(gettext("""Could not find the view."""))
return False, gone(gettext("""Could not find the view."""))
SQL = render_template("/".join(
[self.template_path, 'sql/acl.sql']), vid=vid)
status, dataclres = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
for row in dataclres['rows']:
priv = parse_priv_from_db(row)
@ -428,10 +454,7 @@ class ViewNode(PGChildNodeView, VacuumSettings):
# merging formated result with main result again
result.update(frmtd_reslt)
return ajax_response(
response=result,
status=200
)
return True, result
@staticmethod
def formatter(result):
@ -556,7 +579,7 @@ class ViewNode(PGChildNodeView, VacuumSettings):
return internal_server_error(errormsg=str(e))
@check_precondition
def delete(self, gid, sid, did, scid, vid=None):
def delete(self, gid, sid, did, scid, vid=None, only_sql=False):
"""
This function will drop a view object
"""
@ -604,6 +627,10 @@ class ViewNode(PGChildNodeView, VacuumSettings):
nspname=res_data['rows'][0]['schema'],
name=res_data['rows'][0]['name'], cascade=cascade
)
if only_sql:
return SQL
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
@ -840,7 +867,7 @@ class ViewNode(PGChildNodeView, VacuumSettings):
return columns
def get_rule_sql(self, vid):
def get_rule_sql(self, vid, display_comments=True):
"""
Get all non system rules of view node,
generate their sql and render
@ -869,12 +896,12 @@ class ViewNode(PGChildNodeView, VacuumSettings):
res = parse_rule_definition(res)
SQL = render_template("/".join(
[self.rule_temp_path, 'sql/create.sql']),
data=res, display_comments=True)
data=res, display_comments=display_comments)
SQL_data += '\n'
SQL_data += SQL
return SQL_data
def get_compound_trigger_sql(self, vid):
def get_compound_trigger_sql(self, vid, display_comments=True):
"""
Get all compound trigger nodes associated with view node,
generate their sql and render into sql tab
@ -945,13 +972,13 @@ class ViewNode(PGChildNodeView, VacuumSettings):
[self.ct_trigger_temp_path,
'sql/{0}/#{1}#/create.sql'.format(
self.manager.server_type, self.manager.version)]),
data=res_rows, display_comments=True)
data=res_rows, display_comments=display_comments)
SQL_data += '\n'
SQL_data += SQL
return SQL_data
def get_trigger_sql(self, vid):
def get_trigger_sql(self, vid, display_comments=True):
"""
Get all trigger nodes associated with view node,
generate their sql and render
@ -1038,13 +1065,13 @@ class ViewNode(PGChildNodeView, VacuumSettings):
[self.trigger_temp_path,
'sql/{0}/#{1}#/create.sql'.format(
self.manager.server_type, self.manager.version)]),
data=res_rows, display_comments=True)
data=res_rows, display_comments=display_comments)
SQL_data += '\n'
SQL_data += SQL
return SQL_data
def get_index_sql(self, did, vid):
def get_index_sql(self, did, vid, display_comments=True):
"""
Get all index associated with view node,
generate their sql and render
@ -1084,17 +1111,23 @@ class ViewNode(PGChildNodeView, VacuumSettings):
SQL = render_template("/".join(
[self.index_temp_path,
'sql/#{0}#/create.sql'.format(self.manager.version)]),
data=data, display_comments=True)
data=data, display_comments=display_comments)
SQL_data += '\n'
SQL_data += SQL
return SQL_data
@check_precondition
def sql(self, gid, sid, did, scid, vid):
def sql(self, gid, sid, did, scid, vid, diff_schema=None,
json_resp=True):
"""
This function will generate sql to render into the sql panel
"""
display_comments = True
if not json_resp:
display_comments = False
SQL_data = ''
SQL = render_template("/".join(
[self.template_path, 'sql/properties.sql']),
@ -1111,6 +1144,9 @@ class ViewNode(PGChildNodeView, VacuumSettings):
)
result = res['rows'][0]
if diff_schema:
result['schema'] = diff_schema
# sending result to formtter
frmtd_reslt = self.formatter(result)
@ -1152,18 +1188,20 @@ class ViewNode(PGChildNodeView, VacuumSettings):
[self.template_path, 'sql/create.sql']),
data=result,
conn=self.conn,
display_comments=True
display_comments=display_comments
)
SQL += "\n"
SQL += render_template("/".join(
[self.template_path, 'sql/grant.sql']), data=result)
SQL_data += SQL
SQL_data += self.get_rule_sql(vid)
SQL_data += self.get_trigger_sql(vid)
SQL_data += self.get_compound_trigger_sql(vid)
SQL_data += self.get_index_sql(did, vid)
SQL_data += self.get_rule_sql(vid, display_comments)
SQL_data += self.get_trigger_sql(vid, display_comments)
SQL_data += self.get_compound_trigger_sql(vid, display_comments)
SQL_data += self.get_index_sql(did, vid, display_comments)
if not json_resp:
return SQL_data
return ajax_response(response=SQL_data)
@check_precondition
@ -1357,6 +1395,60 @@ class ViewNode(PGChildNodeView, VacuumSettings):
return ajax_response(response=sql)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid, oid=None):
"""
This function will fetch the list of all the views for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
if not oid:
SQL = render_template("/".join([self.template_path,
'sql/nodes.sql']), did=did,
scid=scid, datlastsysoid=self.datlastsysoid)
status, views = self.conn.execute_2darray(SQL)
if not status:
current_app.logger.error(views)
return False
for row in views['rows']:
status, data = self._fetch_properties(scid, row['oid'])
if status:
res[row['name']] = data
else:
status, data = self._fetch_properties(scid, oid)
if not status:
current_app.logger.error(data)
return False
res = data
return res
def get_sql_from_diff(self, gid, sid, did, scid, oid, data=None,
diff_schema=None, drop_sql=False):
sql = ''
if data:
if diff_schema:
data['schema'] = diff_schema
sql, nameOrError = self.getSQL(gid, sid, did, data, oid)
else:
if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did,
scid=scid, vid=oid, only_sql=True)
elif diff_schema:
sql = self.sql(gid=gid, sid=sid, did=did, scid=scid, vid=oid,
diff_schema=diff_schema, json_resp=False)
else:
sql = self.sql(gid=gid, sid=sid, did=did, scid=scid, vid=oid,
json_resp=False)
return sql
# Override the operations for materialized view
mview_operations = {
@ -1631,11 +1723,17 @@ class MViewNode(ViewNode, VacuumSettings):
return SQL, data['name'] if 'name' in data else old_data['name']
@check_precondition
def sql(self, gid, sid, did, scid, vid):
def sql(self, gid, sid, did, scid, vid, diff_schema=None,
json_resp=True):
"""
This function will generate sql to render into the sql panel
"""
display_comments = True
if not json_resp:
display_comments = False
SQL_data = ''
SQL = render_template("/".join(
[self.template_path, 'sql/properties.sql']),
@ -1654,6 +1752,9 @@ class MViewNode(ViewNode, VacuumSettings):
result = res['rows'][0]
if diff_schema:
result['schema'] = diff_schema
# sending result to formtter
frmtd_reslt = self.formatter(result)
@ -1732,17 +1833,20 @@ class MViewNode(ViewNode, VacuumSettings):
[self.template_path, 'sql/create.sql']),
data=result,
conn=self.conn,
display_comments=True
display_comments=display_comments
)
SQL += "\n"
SQL += render_template("/".join(
[self.template_path, 'sql/grant.sql']), data=result)
SQL_data += SQL
SQL_data += self.get_rule_sql(vid)
SQL_data += self.get_trigger_sql(vid)
SQL_data += self.get_index_sql(did, vid)
SQL_data += self.get_rule_sql(vid, display_comments)
SQL_data += self.get_trigger_sql(vid, display_comments)
SQL_data += self.get_index_sql(did, vid, display_comments)
SQL_data = SQL_data.strip('\n')
if not json_resp:
return SQL_data
return ajax_response(response=SQL_data)
@check_precondition
@ -1756,9 +1860,9 @@ class MViewNode(ViewNode, VacuumSettings):
values
"""
res = self.get_vacuum_table_settings(self.conn)
res = self.get_vacuum_table_settings(self.conn, sid)
return ajax_response(
response=res['rows'],
response=res,
status=200
)
@ -1772,10 +1876,10 @@ class MViewNode(ViewNode, VacuumSettings):
- setting
values
"""
res = self.get_vacuum_toast_settings(self.conn)
res = self.get_vacuum_toast_settings(self.conn, sid)
return ajax_response(
response=res['rows'],
response=res,
status=200
)
@ -1785,21 +1889,39 @@ class MViewNode(ViewNode, VacuumSettings):
Fetches the properties of an individual view
and render in the properties tab
"""
status, res = self._fetch_properties(did, scid, vid)
if not status:
return res
return ajax_response(
response=res,
status=200
)
def _fetch_properties(self, did, scid, vid):
"""
This function is used to fetch the properties of the specified object
:param did:
:param scid:
:param vid:
:return:
"""
SQL = render_template("/".join(
[self.template_path, 'sql/properties.sql']
), did=did, vid=vid, datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(gettext("""Could not find the materialized view."""))
return False, gone(
gettext("""Could not find the materialized view."""))
SQL = render_template("/".join(
[self.template_path, 'sql/acl.sql']), vid=vid)
status, dataclres = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return False, internal_server_error(errormsg=res)
for row in dataclres['rows']:
priv = parse_priv_from_db(row)
@ -1818,10 +1940,7 @@ class MViewNode(ViewNode, VacuumSettings):
result['vacuum_toast'] = self.parse_vacuum_data(
self.conn, result, 'toast')
return ajax_response(
response=result,
status=200
)
return True, result
@check_precondition
def refresh_data(self, gid, sid, did, scid, vid):
@ -1873,6 +1992,34 @@ class MViewNode(ViewNode, VacuumSettings):
current_app.logger.exception(e)
return internal_server_error(errormsg=str(e))
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid, oid=None):
"""
This function will fetch the list of all the mviews for
specified schema id.
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:return:
"""
res = dict()
SQL = render_template("/".join([self.template_path,
'sql/nodes.sql']), did=did,
scid=scid, datlastsysoid=self.datlastsysoid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
status, data = self._fetch_properties(did, scid, row['oid'])
if status:
res[row['name']] = data
return res
SchemaDiffRegistry(view_blueprint.node_type, ViewNode)
ViewNode.register_node_view(view_blueprint)
SchemaDiffRegistry(mview_blueprint.node_type, MViewNode)
MViewNode.register_node_view(mview_blueprint)

View File

@ -19,7 +19,7 @@ ALTER TABLE {{ conn|qtIdent(view_schema, view_name) }}
{% endif %}
{% if def and def != o_data.definition.rstrip(';') %}
CREATE OR REPLACE VIEW {{ conn|qtIdent(view_schema, view_name) }}
WITH (security_barrier={{ data.security_barrier|lower if data.security_barrier else o_data.security_barrier|default('false', 'true')|lower }})
WITH (security_barrier={{ data.security_barrier|lower if data.security_barrier is defined else o_data.security_barrier|default('false', 'true')|lower }})
AS
{{ def }};
{% else %}

View File

@ -19,7 +19,7 @@ ALTER TABLE {{ conn|qtIdent(view_schema, view_name) }}
{% endif %}
{% if def and def != o_data.definition.rstrip(';') %}
CREATE OR REPLACE VIEW {{ conn|qtIdent(view_schema, view_name) }}
WITH (security_barrier={{ data.security_barrier|lower if data.security_barrier else o_data.security_barrier|default('false', 'true')|lower }})
WITH (security_barrier={{ data.security_barrier|lower if data.security_barrier is defined else o_data.security_barrier|default('false', 'true')|lower }})
AS
{{ def }};
{% else %}

View File

@ -19,7 +19,7 @@ ALTER TABLE {{ conn|qtIdent(view_schema, view_name) }}
{% endif %}
{% if def and def != o_data.definition.rstrip(';') %}
CREATE OR REPLACE VIEW {{ conn|qtIdent(view_schema, view_name) }}
WITH ({% if (data.check_option or o_data.check_option) %}check_option={{ data.check_option if data.check_option else o_data.check_option }}{{', ' }}{% endif %}security_barrier={{ data.security_barrier|lower if data.security_barrier else o_data.security_barrier|default('false', 'true')|lower }})
WITH ({% if (data.check_option or o_data.check_option) %}check_option={{ data.check_option if data.check_option else o_data.check_option }}{{', ' }}{% endif %}security_barrier={{ data.security_barrier|lower if data.security_barrier is defined else o_data.security_barrier|default('false', 'true')|lower }})
AS
{{ def }};
{% else %}

View File

@ -11,4 +11,9 @@
background-image: url('{{ url_for('NODE-database.static', filename='img/databasebad.svg') }}') !important;
border-radius: 10px;
background-size: 20px !important;
background-repeat: no-repeat;
vertical-align: middle;
align-content: center;
height: 1.3em;
}

View File

@ -14,9 +14,12 @@ import 'slickgrid/slick.grid';
import 'slickgrid/slick.dataview';
import 'slickgrid/slick.editors';
import 'slickgrid/slick.formatters';
import 'slickgrid/slick.groupitemmetadataprovider';
import 'slickgrid/plugins/slick.autotooltips';
import 'slickgrid/plugins/slick.cellrangedecorator';
import 'slickgrid/plugins/slick.cellrangeselector';
import 'slickgrid/plugins/slick.checkboxselectcolumn';
import 'slickgrid/plugins/slick.rowselectionmodel';
import 'sources/slickgrid/custom_header_buttons';
export default window.Slick;

View File

@ -817,7 +817,7 @@ define([
},
});
Backform.Accordian = Backform.Dialog.extend({
Backform.Accordian = Backform.Dialog.extend({
className: function() {
return 'set-group pg-el-12';
},
@ -2129,7 +2129,9 @@ define([
formatter: Select2Formatter,
template: _.template([
'<label class="<%=Backform.controlLabelClassName%>" for="<%=cId%>"><%=label%></label>',
'<% if(label == false) {} else {%>',
' <label class="<%=Backform.controlLabelClassName%>" for="<%=cId%>"><%=label%></label>',
'<% }%>',
'<div class="<%=Backform.controlsClassName%>">',
' <select id="<%=cId%>" class="<%=Backform.controlClassName%> <%=extraClasses.join(\' \')%>"',
' name="<%=name%>" value="<%-value%>" <%=disabled ? "disabled" : ""%>',
@ -2161,7 +2163,6 @@ define([
'</div>',
].join('\n')),
render: function() {
if (this.$sel && this.$sel.select2 &&
this.$sel.select2.hasOwnProperty('destroy')) {
this.$sel.select2('destroy');
@ -2467,7 +2468,7 @@ define([
self.sqlCtrl.setOption('autoCloseBrackets', sqlEditPreferences.insert_pair_brackets);
self.sqlCtrl.setOption('matchBrackets', sqlEditPreferences.brace_matching);
setTimeout(function() {
self.sqlCtrl.refresh();
if (self.sqlCtrl) self.sqlCtrl.refresh();
}, 500);
}
},

View File

@ -256,4 +256,11 @@ $loading-bg : rgba($black,0.6);
$loading-fg : $white;
$loader-icon : url("data:image/svg+xml;charset=UTF-8,%3c?xml version='1.0' encoding='utf-8'?%3e%3csvg version='1.1' id='Layer_1' xmlns='http://www.w3.org/2000/svg' xmlns:xlink='http://www.w3.org/1999/xlink' x='0px' y='0px' viewBox='0 0 38 38' style='enable-background:new 0 0 38 38;' xml:space='preserve'%3e%3cstyle type='text/css'%3e .st0%7bfill:none;stroke:%23ebeef3;stroke-width:2;%7d .st1%7bfill:none;stroke:%23326690;stroke-width:2;%7d %3c/style%3e%3cg%3e%3cg transform='translate(1 1)'%3e%3ccircle class='st0' cx='18' cy='18' r='18'/%3e%3cpath class='st1' d='M36,18c0-9.9-8.1-18-18-18 '%3e%3canimateTransform accumulate='none' additive='replace' attributeName='transform' calcMode='linear' dur='0.7s' fill='remove' from='0 18 18' repeatCount='indefinite' restart='always' to='360 18 18' type='rotate'%3e%3c/animateTransform%3e%3c/path%3e%3c/g%3e%3c/g%3e%3c/svg%3e ") !default;
$loader-icon-small: url("data:image/svg+xml,%3C%3Fxml version='1.0' encoding='utf-8'%3F%3E%3C!-- Generator: Adobe Illustrator 23.1.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) --%3E%3Csvg version='1.1' id='Layer_1' xmlns='http://www.w3.org/2000/svg' xmlns:xlink='http://www.w3.org/1999/xlink' x='0px' y='0px' viewBox='0 0 38 38' style='enable-background:new 0 0 38 38;' xml:space='preserve'%3E%3Cstyle type='text/css'%3E .st0%7Bfill:none;stroke:%23EBEEF3;stroke-width:5;%7D .st1%7Bfill:none;stroke:%23326690;stroke-width:5;%7D%0A%3C/style%3E%3Cg%3E%3Cg transform='translate(1 1)'%3E%3Ccircle class='st0' cx='18' cy='18' r='16'/%3E%3Cpath class='st1' d='M34,18c0-8.8-7.2-16-16-16 '%3E%3CanimateTransform accumulate='none' additive='replace' attributeName='transform' calcMode='linear' dur='0.7s' fill='remove' from='0 18 18' repeatCount='indefinite' restart='always' to='360 18 18' type='rotate'%3E%3C/animateTransform%3E%3C/path%3E%3C/g%3E%3C/g%3E%3C/svg%3E%0A") !default;
/***************/
$schemadiff-diff-row-color: #fff9c4;
$schemadiff-source-row-color: #ffebee;
$schemadiff-target-row-color: #fbe3bf;

View File

@ -11,6 +11,7 @@ import gettext from '../../../../static/js/gettext';
import url_for from '../../../../static/js/url_for';
import {getTreeNodeHierarchyFromIdentifier} from '../../../../static/js/tree/pgadmin_tree_node';
import {getPanelTitle} from './datagrid_panel_title';
import {getRandomInt} from 'sources/utils';
function hasDatabaseInformation(parentData) {
return parentData.database;
@ -66,3 +67,21 @@ export function showQueryTool(datagrid, pgBrowser, alertify, url, aciTreeIdentif
datagrid.launch_grid(transId, gridUrl, true, queryToolTitle, sURL);
}
export function generateScript(parentData, datagrid) {
const queryToolTitle = `${parentData.database}/${parentData.user}@${parentData.server}`;
const transId = getRandomInt(1, 9999999);
let url_endpoint = url_for('datagrid.panel', {
'trans_id': transId,
});
url_endpoint += `?is_query_tool=${true}`
+`&sgid=${parentData.sgid}`
+`&sid=${parentData.sid}`
+`&server_type=${parentData.stype}`
+`&did=${parentData.did}`;
datagrid.launch_grid(transId, url_endpoint, true, queryToolTitle, '');
}

View File

@ -443,6 +443,15 @@ require(['sources/generated/browser_nodes', 'sources/generated/codemirror', 'sou
{{ url_params|safe}},
'{{ layout|safe }}'
);
// If opening from schema diff, set the generated script to the SQL Editor
var schema_ddl_diff = (window.opener !== null) ? window.opener.pgAdmin.ddl_diff : (window.parent !== null) ? window.parent.pgAdmin.ddl_diff : window.top.pgAdmin.ddl_diff;
sqlEditorController.set_value_to_editor(schema_ddl_diff);
if (window.opener !== null) window.opener.pgAdmin.ddl_diff = '';
else if (window.parent !== null) window.parent.pgAdmin.ddl_diff = '';
else if (window.top !== null) window.top.pgAdmin.ddl_diff = '';
});
});
{% endblock %}

View File

@ -0,0 +1,601 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""A blueprint module implementing the schema_diff frame."""
MODULE_NAME = 'schema_diff'
import simplejson as json
import pickle
import random
from flask import Response, session, url_for, request
from flask import render_template, current_app as app
from flask_security import current_user, login_required
from flask_babelex import gettext
from pgadmin.utils import PgAdminModule
from pgadmin.utils.ajax import make_json_response, bad_request, \
make_response as ajax_response, not_implemented
from pgadmin.model import Server
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.model import SchemaDiffModel
from config import PG_DEFAULT_DRIVER
from pgadmin.utils.driver import get_driver
class SchemaDiffModule(PgAdminModule):
"""
class SchemaDiffModule(PgAdminModule)
A module class for Schema Diff derived from PgAdminModule.
"""
LABEL = "Schema Diff"
def get_own_menuitems(self):
return {}
def get_own_javascripts(self):
return [{
'name': 'pgadmin.schema_diff',
'path': url_for('schema_diff.index') + "schema_diff",
'when': None
}]
def get_panels(self):
return []
def get_exposed_url_endpoints(self):
"""
Returns:
list: URL endpoints for Schema Diff module
"""
return [
'schema_diff.initialize',
'schema_diff.panel',
'schema_diff.servers',
'schema_diff.databases',
'schema_diff.schemas',
'schema_diff.compare',
'schema_diff.poll',
'schema_diff.ddl_compare',
'schema_diff.connect_server',
'schema_diff.connect_database',
'schema_diff.get_server',
'schema_diff.generate_script'
]
def register_preferences(self):
self.preference.register(
'display', 'schema_diff_new_browser_tab',
gettext("Open in new browser tab"), 'boolean', False,
category_label=gettext('Display'),
help_str=gettext('If set to True, the Schema Diff '
'will be opened in a new browser tab.')
)
blueprint = SchemaDiffModule(MODULE_NAME, __name__, static_url_path='/static')
@blueprint.route("/")
@login_required
def index():
return bad_request(
errormsg=gettext('This URL cannot be requested directly.')
)
@blueprint.route(
'/panel/<int:trans_id>/<path:editor_title>',
methods=["GET"],
endpoint='panel'
)
def panel(trans_id, editor_title):
"""
This method calls index.html to render the schema diff.
Args:
editor_title: Title of the editor
"""
# If title has slash(es) in it then replace it
if request.args and request.args['fslashes'] != '':
try:
fslashesList = request.args['fslashes'].split(',')
for idx in fslashesList:
idx = int(idx)
editor_title = editor_title[:idx] + '/' + editor_title[idx:]
except IndexError as e:
app.logger.exception(e)
return render_template(
"schema_diff/index.html",
_=gettext,
trans_id=trans_id,
editor_title=editor_title
)
@blueprint.route("/schema_diff.js")
@login_required
def script():
"""render the required javascript"""
return Response(
response=render_template("schema_diff/js/schema_diff.js", _=gettext),
status=200,
mimetype="application/javascript"
)
def check_transaction_status(trans_id):
"""
This function is used to check the transaction id
is available in the session object.
Args:
trans_id:
"""
if 'schemaDiff' not in session:
return False, gettext(
'Transaction ID not found in the session.'
), None, None
schema_diff_data = session['schemaDiff']
# Return from the function if transaction id not found
if str(trans_id) not in schema_diff_data:
return False, gettext(
'Transaction ID not found in the session.'
), None, None
# Fetch the object for the specified transaction id.
# Use pickle.loads function to get the model object
session_obj = schema_diff_data[str(trans_id)]
diff_model_obj = pickle.loads(session_obj['diff_model_obj'])
return True, None, diff_model_obj, session_obj
def update_session_diff_transaction(trans_id, session_obj, diff_model_obj):
"""
This function is used to update the diff model into the session.
:param trans_id:
:param session_obj:
:param diff_model_obj:
:return:
"""
session_obj['diff_model_obj'] = pickle.dumps(diff_model_obj, -1)
if 'schemaDiff' in session:
schema_diff_data = session['schemaDiff']
schema_diff_data[str(trans_id)] = session_obj
session['schemaDiff'] = schema_diff_data
@blueprint.route(
'/initialize',
methods=["GET"],
endpoint="initialize"
)
@login_required
def initialize():
"""
This function will initialize the schema diff and return the list
of all the server's.
"""
trans_id = None
try:
# Create a unique id for the transaction
trans_id = str(random.randint(1, 9999999))
if 'schemaDiff' not in session:
schema_diff_data = dict()
else:
schema_diff_data = session['schemaDiff']
# Use pickle to store the Schema Diff Model which will be used
# later by the diff module.
schema_diff_data[trans_id] = {
'diff_model_obj': pickle.dumps(SchemaDiffModel(), -1)
}
# Store the schema diff dictionary into the session variable
session['schemaDiff'] = schema_diff_data
except Exception as e:
app.logger.exception(e)
return make_json_response(
data={'schemaDiffTransId': trans_id})
@blueprint.route(
'/servers',
methods=["GET"],
endpoint="servers"
)
@login_required
def servers():
"""
This function will return the list of servers for the specified
server id.
"""
res = []
try:
"""Return a JSON document listing the server groups for the user"""
driver = get_driver(PG_DEFAULT_DRIVER)
from pgadmin.browser.server_groups.servers import\
server_icon_and_background
for server in Server.query.filter_by(user_id=current_user.id):
manager = driver.connection_manager(server.id)
conn = manager.connection()
connected = conn.connected()
res.append({
"value": server.id,
"label": server.name,
"image": server_icon_and_background(connected, manager,
server),
"_id": server.id,
"connected": connected,
})
except Exception as e:
app.logger.exception(e)
return make_json_response(data=res)
@blueprint.route(
'/get_server/<int:sid>/<int:did>',
methods=["GET"],
endpoint="get_server"
)
@login_required
def get_server(sid, did):
"""
This function will return the server details for the specified
server id.
"""
try:
"""Return a JSON document listing the server groups for the user"""
driver = get_driver(PG_DEFAULT_DRIVER)
server = Server.query.filter_by(id=sid).first()
manager = driver.connection_manager(sid)
conn = manager.connection(did=did)
connected = conn.connected()
res = {
"sid": sid,
"name": server.name,
"user": server.username,
"gid": server.servergroup_id,
"type": manager.server_type,
"connected": connected,
"database": conn.db
}
except Exception as e:
app.logger.exception(e)
return make_json_response(data=res)
@blueprint.route(
'/server/connect/<int:sid>',
methods=["POST"],
endpoint="connect_server"
)
@login_required
def connect_server(sid):
server = Server.query.filter_by(id=sid).first()
view = SchemaDiffRegistry.get_node_view('server')
return view.connect(server.servergroup_id, sid)
@blueprint.route(
'/database/connect/<int:sid>/<int:did>',
methods=["POST"],
endpoint="connect_database"
)
@login_required
def connect_database(sid, did):
server = Server.query.filter_by(id=sid).first()
view = SchemaDiffRegistry.get_node_view('database')
return view.connect(server.servergroup_id, sid, did)
@blueprint.route(
'/databases/<int:sid>',
methods=["GET"],
endpoint="databases"
)
@login_required
def databases(sid):
"""
This function will return the list of databases for the specified
server id.
"""
res = []
try:
view = SchemaDiffRegistry.get_node_view('database')
server = Server.query.filter_by(id=sid).first()
response = view.nodes(gid=server.servergroup_id, sid=sid)
databases = json.loads(response.data)['data']
for db in databases:
res.append({
"value": db['_id'],
"label": db['label'],
"_id": db['_id'],
"connected": db['connected'],
"allowConn": db['allowConn'],
"image": db['icon'],
"canDisconn": db['canDisconn']
})
except Exception as e:
app.logger.exception(e)
return make_json_response(data=res)
@blueprint.route(
'/schemas/<int:sid>/<int:did>',
methods=["GET"],
endpoint="schemas"
)
@login_required
def schemas(sid, did):
"""
This function will return the list of schemas for the specified
server id and database id.
"""
res = []
try:
view = SchemaDiffRegistry.get_node_view('schema')
server = Server.query.filter_by(id=sid).first()
response = view.nodes(gid=server.servergroup_id, sid=sid, did=did)
schemas = json.loads(response.data)['data']
for sch in schemas:
res.append({
"value": sch['_id'],
"label": sch['label'],
"_id": sch['_id'],
"image": sch['icon'],
})
except Exception as e:
app.logger.exception(e)
return make_json_response(data=res)
@blueprint.route(
'/compare/<int:trans_id>/<int:source_sid>/<int:source_did>/'
'<int:source_scid>/<int:target_sid>/<int:target_did>/<int:target_scid>',
methods=["GET"],
endpoint="compare"
)
@login_required
def compare(trans_id, source_sid, source_did, source_scid,
target_sid, target_did, target_scid):
"""
This function will compare the two schemas.
"""
# Check the transaction and connection status
status, error_msg, diff_model_obj, session_obj = \
check_transaction_status(trans_id)
if error_msg == gettext('Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, status=404)
if not check_version_compatibility(source_sid, target_sid):
return not_implemented(errormsg=gettext("Version mismatch."))
comparison_result = []
diff_model_obj.set_comparison_info("Comparing objects...", 0)
update_session_diff_transaction(trans_id, session_obj,
diff_model_obj)
try:
all_registered_nodes = SchemaDiffRegistry.get_registered_nodes()
node_percent = round(100 / len(all_registered_nodes))
total_percent = 0
for node_name, node_view in all_registered_nodes.items():
view = SchemaDiffRegistry.get_node_view(node_name)
if hasattr(view, 'compare'):
msg = "Comparing " + view.blueprint.COLLECTION_LABEL + " ..."
diff_model_obj.set_comparison_info(msg, total_percent)
# Update the message and total percentage in session object
update_session_diff_transaction(trans_id, session_obj,
diff_model_obj)
res = view.compare(source_sid=source_sid,
source_did=source_did,
source_scid=source_scid,
target_sid=target_sid,
target_did=target_did,
target_scid=target_scid)
if res is not None:
comparison_result = comparison_result + res
total_percent = total_percent + node_percent
msg = "Successfully compare the specified schemas."
total_percent = 100
diff_model_obj.set_comparison_info(msg, total_percent)
# Update the message and total percentage done in session object
update_session_diff_transaction(trans_id, session_obj, diff_model_obj)
except Exception as e:
app.logger.exception(e)
return make_json_response(data=comparison_result)
@blueprint.route(
'/poll/<int:trans_id>', methods=["GET"], endpoint="poll"
)
@login_required
def poll(trans_id):
"""
This function is used to check the schema comparison is completed or not.
:param trans_id:
:return:
"""
# Check the transaction and connection status
status, error_msg, diff_model_obj, session_obj = \
check_transaction_status(trans_id)
if error_msg == gettext('Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, status=404)
msg, diff_percentage = diff_model_obj.get_comparison_info()
if diff_percentage == 100:
diff_model_obj.set_comparison_info("Comparing objects...", 0)
update_session_diff_transaction(trans_id, session_obj,
diff_model_obj)
return make_json_response(data={'compare_msg': msg,
'diff_percentage': diff_percentage})
@blueprint.route(
'/generate_script/<int:trans_id>/',
methods=["POST"],
endpoint="generate_script"
)
def generate_script(trans_id):
"""This function will generate the scripts for the selected objects."""
data = request.form if request.form else json.loads(
request.data, encoding='utf-8'
)
status, error_msg, diff_model_obj, session_obj = \
check_transaction_status(trans_id)
if error_msg == gettext('Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, status=404)
source_sid = int(data['source_sid'])
source_did = int(data['source_did'])
source_scid = int(data['source_scid'])
target_sid = int(data['target_sid'])
target_did = int(data['target_did'])
target_scid = int(data['target_scid'])
diff_ddl = ''
for d in data['sel_rows']:
node_type = d['node_type']
source_oid = int(d['source_oid'])
target_oid = int(d['target_oid'])
comp_status = d['comp_status']
view = SchemaDiffRegistry.get_node_view(node_type)
if view and hasattr(view, 'ddl_compare') and \
comp_status != SchemaDiffModel.COMPARISON_STATUS['identical']:
sql = view.ddl_compare(source_sid=source_sid,
source_did=source_did,
source_scid=source_scid,
target_sid=target_sid,
target_did=target_did,
target_scid=target_scid,
source_oid=source_oid,
target_oid=target_oid,
comp_status=comp_status,
generate_script=True)
diff_ddl += sql['diff_ddl']
return ajax_response(
status=200,
response={'diff_ddl': diff_ddl}
)
@blueprint.route(
'/ddl_compare/<int:trans_id>/<int:source_sid>/<int:source_did>/'
'<int:source_scid>/<int:target_sid>/<int:target_did>/<int:target_scid>/'
'<int:source_oid>/<int:target_oid>/<node_type>/<comp_status>/',
methods=["GET"],
endpoint="ddl_compare"
)
@login_required
def ddl_compare(trans_id, source_sid, source_did, source_scid,
target_sid, target_did, target_scid, source_oid,
target_oid, node_type, comp_status):
"""
This function is used to compare the specified object and return the
DDL comparison.
"""
# Check the transaction and connection status
status, error_msg, diff_model_obj, session_obj = \
check_transaction_status(trans_id)
if error_msg == gettext('Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, status=404)
source_ddl = ''
target_ddl = ''
diff_ddl = ''
view = SchemaDiffRegistry.get_node_view(node_type)
if view and hasattr(view, 'ddl_compare'):
sql = view.ddl_compare(source_sid=source_sid, source_did=source_did,
source_scid=source_scid, target_sid=target_sid,
target_did=target_did, target_scid=target_scid,
source_oid=source_oid, target_oid=target_oid,
comp_status=comp_status)
return ajax_response(
status=200,
response={'source_ddl': sql['source_ddl'],
'target_ddl': sql['target_ddl'],
'diff_ddl': sql['diff_ddl']}
)
msg = gettext('Selected object is not supported for DDL comparison.')
return ajax_response(
status=200,
response={'source_ddl': msg,
'target_ddl': msg,
'diff_ddl': msg
}
)
def check_version_compatibility(sid, tid):
"""Check the version compatibility of source and target servers."""
driver = get_driver(PG_DEFAULT_DRIVER)
src_server = Server.query.filter_by(id=sid).first()
src_manager = driver.connection_manager(src_server.id)
tar_server = Server.query.filter_by(id=tid).first()
tar_manager = driver.connection_manager(tar_server.id)
def get_round_val(x):
if x < 10000:
return x if x % 100 == 0 else x + 100 - x % 100
else:
return x if x % 10000 == 0 else x + 10000 - x % 10000
if get_round_val(src_manager.version) == \
get_round_val(tar_manager.version):
return True
return False

View File

@ -0,0 +1,212 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Schema diff object comparison."""
import copy
from flask import render_template
from pgadmin.utils.compile_template_name import compile_template_path
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries,\
directory_diff
from pgadmin.tools.schema_diff.model import SchemaDiffModel
from abc import abstractmethod
class SchemaDiffObjectCompare():
keys_to_ignore = ['oid', 'schema']
@staticmethod
def get_schema(sid, did, scid):
"""
This function will return the schema name.
"""
driver = get_driver(PG_DEFAULT_DRIVER)
manager = driver.connection_manager(sid)
conn = manager.connection(did=did)
ver = manager.version
server_type = manager.server_type
# Fetch schema name
status, schema_name = conn.execute_scalar(
render_template(
"/".join(['schemas',
'{0}/#{1}#'.format(server_type, ver),
'sql/get_name.sql']),
conn=conn, scid=scid
)
)
return status, schema_name
def compare(self, **kwargs):
"""
This function is used to compare all the objects
from two different schemas.
:param kwargs:
:return:
"""
source_params = {'sid': kwargs.get('source_sid'),
'did': kwargs.get('source_did'),
'scid': kwargs.get('source_scid')
}
target_params = {'sid': kwargs.get('target_sid'),
'did': kwargs.get('target_did'),
'scid': kwargs.get('target_scid')
}
if 'source_tid' in kwargs:
source_params['tid'] = kwargs['source_tid']
if 'target_tid' in kwargs:
target_params['tid'] = kwargs['target_tid']
source = self.fetch_objects_to_compare(**source_params)
target = self.fetch_objects_to_compare(**target_params)
# If both the dict have no items then return None.
if not (source or target) or (
len(source) <= 0 and len(target) <= 0):
return None
return compare_dictionaries(source, target,
self.node_type,
self.blueprint.COLLECTION_LABEL,
self.keys_to_ignore)
def ddl_compare(self, **kwargs):
"""
This function will compare object properties and
return the difference of SQL
"""
source = ''
target = ''
diff = ''
comp_status = kwargs.get('comp_status')
only_diff = False
generate_script = False
source_params = {'gid': 1,
'sid': kwargs.get('source_sid'),
'did': kwargs.get('source_did'),
'scid': kwargs.get('source_scid'),
'oid': kwargs.get('source_oid')
}
target_params = {'gid': 1,
'sid': kwargs.get('target_sid'),
'did': kwargs.get('target_did'),
'scid': kwargs.get('target_scid'),
'oid': kwargs.get('target_oid')
}
if 'source_tid' in kwargs:
source_params['tid'] = kwargs['source_tid']
only_diff = True
if 'target_tid' in kwargs:
target_params['tid'] = kwargs['target_tid']
only_diff = True
if 'generate_script' in kwargs and kwargs['generate_script']:
generate_script = True
source_params_adv = copy.deepcopy(source_params)
target_params_adv = copy.deepcopy(target_params)
del source_params_adv['gid']
del target_params_adv['gid']
status, target_schema = self.get_schema(kwargs.get('target_sid'),
kwargs.get('target_did'),
kwargs.get('target_scid')
)
if not status:
return internal_server_error(errormsg=target_schema)
if comp_status == SchemaDiffModel.COMPARISON_STATUS['source_only']:
if not generate_script:
source = self.get_sql_from_diff(**source_params)
source_params.update({
'diff_schema': target_schema
})
diff = self.get_sql_from_diff(**source_params)
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['target_only']:
if not generate_script:
target = self.get_sql_from_diff(**target_params)
target_params.update(
{'drop_sql': True})
diff = self.get_sql_from_diff(**target_params)
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['different']:
source = self.fetch_objects_to_compare(**source_params_adv)
target = self.fetch_objects_to_compare(**target_params_adv)
if not (source or target):
return None
diff_dict = directory_diff(source,
target,
ignore_keys=self.keys_to_ignore,
difference={}
)
diff_dict.update(self.parce_acl(source, target))
if not generate_script:
source = self.get_sql_from_diff(**source_params)
target = self.get_sql_from_diff(**target_params)
target_params.update(
{'data': diff_dict})
diff = self.get_sql_from_diff(**target_params)
else:
source = self.get_sql_from_diff(**source_params)
target = self.get_sql_from_diff(**target_params)
if only_diff:
return diff
return {'source_ddl': source,
'target_ddl': target,
'diff_ddl': diff
}
@staticmethod
def parce_acl(source, target):
key = 'acl'
if 'datacl' in source:
key = 'datacl'
elif 'relacl' in source:
key = 'relacl'
tmp_source = source[key] if\
key in source and source[key] is not None else []
tmp_target = copy.deepcopy(target[key]) if\
key in target and target[key] is not None else []
diff = {'added': [], 'deleted': []}
for acl in tmp_source:
if acl in tmp_target:
tmp_target.remove(acl)
elif acl not in tmp_target:
diff['added'].append(acl)
diff['deleted'] = tmp_target
return {key: diff}

View File

@ -0,0 +1,279 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Directory comparison"""
import copy
from pgadmin.tools.schema_diff.model import SchemaDiffModel
count = 1
def compare_dictionaries(source_dict, target_dict, node, node_label,
ignore_keys=None):
"""
This function will compare the two dictionaries.
:param source_dict: First Dictionary
:param target_dict: Second Dictionary
:param node: node type
:param ignore_keys: List of keys that will be ignored while comparing
:return:
"""
dict1 = copy.deepcopy(source_dict)
dict2 = copy.deepcopy(target_dict)
# Find the duplicate keys in both the dictionaries
dict1_keys = set(dict1.keys())
dict2_keys = set(dict2.keys())
intersect_keys = dict1_keys.intersection(dict2_keys)
# Keys that are available in source and missing in target.
source_only = []
added = dict1_keys - dict2_keys
global count
for item in added:
source_only.append({
'id': count,
'type': node,
'label': node_label,
'title': item,
'oid': source_dict[item]['oid'],
'status': SchemaDiffModel.COMPARISON_STATUS['source_only']
})
count += 1
target_only = []
# Keys that are available in target and missing in source.
removed = dict2_keys - dict1_keys
for item in removed:
target_only.append({
'id': count,
'type': node,
'label': node_label,
'title': item,
'oid': target_dict[item]['oid'],
'status': SchemaDiffModel.COMPARISON_STATUS['target_only']
})
count += 1
# Compare the values of duplicates keys.
identical = []
different = []
for key in intersect_keys:
# ignore the keys if available.
for ig_key in ignore_keys:
if ig_key in dict1[key]:
dict1[key].pop(ig_key)
if ig_key in dict2[key]:
dict2[key].pop(ig_key)
# Recursively Compare the two dictionary
if are_dictionaries_identical(dict1[key], dict2[key], ignore_keys):
identical.append({
'id': count,
'type': node,
'label': node_label,
'title': key,
'oid': source_dict[key]['oid'],
'source_oid': source_dict[key]['oid'],
'target_oid': target_dict[key]['oid'],
'status': SchemaDiffModel.COMPARISON_STATUS['identical']
})
else:
different.append({
'id': count,
'type': node,
'label': node_label,
'title': key,
'oid': source_dict[key]['oid'],
'source_oid': source_dict[key]['oid'],
'target_oid': target_dict[key]['oid'],
'status': SchemaDiffModel.COMPARISON_STATUS['different']
})
count += 1
return source_only + target_only + different + identical
def are_lists_identical(source_list, target_list, ignore_keys):
"""
This function is used to compare two list.
:param source_list:
:param target_list:
:return:
"""
if source_list is None or target_list is None or \
len(source_list) != len(target_list):
return False
else:
for index in range(len(source_list)):
# Check the type of the value if it is an dictionary then
# call are_dictionaries_identical() function.
if type(source_list[index]) is dict:
if not are_dictionaries_identical(source_list[index],
target_list[index],
ignore_keys):
return False
else:
if source_list[index] != target_list[index]:
return False
return True
def are_dictionaries_identical(source_dict, target_dict, ignore_keys):
"""
This function is used to recursively compare two dictionaries with
same keys.
:param source_dict:
:param target_dict:
:return:
"""
src_keys = set(source_dict.keys())
tar_keys = set(target_dict.keys())
# ignore the keys if available.
for ig_key in ignore_keys:
if ig_key in src_keys:
source_dict.pop(ig_key)
if ig_key in target_dict:
target_dict.pop(ig_key)
# Keys that are available in source and missing in target.
src_only = src_keys - tar_keys
# Keys that are available in target and missing in source.
tar_only = tar_keys - src_keys
# If number of keys are different in source and target then
# return False
if len(src_only) != len(tar_only):
return False
else:
# If number of keys are same but key is not present in target then
# return False
for key in src_only:
if key not in tar_only:
return False
for key in source_dict.keys():
if type(source_dict[key]) is dict:
if not are_dictionaries_identical(source_dict[key],
target_dict[key], ignore_keys):
return False
elif type(source_dict[key]) is list:
if not are_lists_identical(source_dict[key], target_dict[key],
ignore_keys):
return False
else:
if source_dict[key] != target_dict[key]:
return False
return True
def directory_diff(source_dict, target_dict, ignore_keys=[], difference={}):
"""
This function is used to recursively compare two dictionaries and
return the difference.
The difference is from source to target
:param source_dict: source dict
:param target_dict: target dict
:param ignore_keys: ignore keys to compare
"""
src_keys = set(source_dict.keys())
tar_keys = set(target_dict.keys())
# Keys that are available in source and missing in target.
src_only = src_keys - tar_keys
# Keys that are available in target and missing in source.
tar_only = tar_keys - src_keys
for key in source_dict.keys():
added = []
deleted = []
updated = []
source = None
# ignore the keys if available.
if key in ignore_keys:
pass
elif key in tar_only:
target_only[key] = target_dict[key]
# Target only values in deleted list
difference[key]['deleted'] = target_dict[key]
elif key in src_only:
# Source only values in the newly added list
if type(source_dict[key]) is list:
difference[key] = {}
difference[key]['added'] = source_dict[key]
elif type(source_dict[key]) is dict:
directory_diff(source_dict[key], target_dict[key],
ignore_keys, difference)
elif type(source_dict[key]) is list:
tmp_target = None
for index in range(len(source_dict[key])):
source = copy.deepcopy(source_dict[key][index])
if type(source) is list:
# TODO
pass
elif type(source) is dict:
if 'name' in source or 'colname' in source:
if type(target_dict[key]) is list and len(
target_dict[key]) > 0:
tmp = None
tmp_target = copy.deepcopy(target_dict[key])
for item in tmp_target:
if (
'name' in item and
item['name'] == source['name']
) or (
'colname' in item and
item['colname'] == source['colname']
):
tmp = copy.deepcopy(item)
if tmp and source != tmp:
updated.append(copy.deepcopy(source))
tmp_target.remove(tmp)
elif tmp and source == tmp:
tmp_target.remove(tmp)
elif tmp is None:
added.append(source)
else:
added.append(source)
difference[key] = {}
difference[key]['added'] = added
difference[key]['changed'] = updated
elif target_dict[key] is None or \
(type(target_dict[key]) is list and
len(target_dict[key]) < index and
source != target_dict[key][index]):
difference[key] = source
elif type(target_dict[key]) is list and\
len(target_dict[key]) > index:
difference[key] = source
if type(source) is dict and tmp_target and key in tmp_target and \
tmp_target[key] and len(tmp_target[key]) > 0:
if type(tmp_target[key]) is list and \
type(tmp_target[key][0]) is dict:
deleted = deleted + tmp_target[key]
else:
deleted.append({key: tmp_target[key]})
difference[key]['deleted'] = deleted
elif tmp_target and type(tmp_target) is list:
difference[key]['deleted'] = tmp_target
else:
if source_dict[key] != target_dict[key]:
difference[key] = source_dict[key]
return difference

View File

@ -0,0 +1,76 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
class SchemaDiffModel(object):
"""
SchemaDiffModel
"""
COMPARISON_STATUS = {
'source_only': 'Source Only',
'target_only': 'Target Only',
'different': 'Different',
'identical': 'Identical'
}
def __init__(self, **kwargs):
"""
This method is used to initialize the class and
create a proper object name which will be used
to fetch the data using namespace name and object name.
Args:
**kwargs : N number of parameters
"""
self._comparison_result = dict()
self._comparison_msg = 'Comparision started...'
self._comparison_percentage = 0
def clear_data(self):
"""
This function clear the model data.
"""
self._comparison_result.clear()
def set_result(self, node_name, compare_result):
"""
This method set the result of the comparision based on nodes.
"""
self._comparison_result[node_name] = compare_result
def get_result(self, node_name=None):
"""
This function will return the result for the node if specified
else return the complete result.
:param node_name: Name of the node ex: Database, Schema, etc..
:return:
"""
if node_name is not None:
return self._comparison_result[node_name]
return self._comparison_result
def get_comparison_info(self):
"""
This function is used to get the comparison information.
:return:
"""
return self._comparison_msg, self._comparison_percentage
def set_comparison_info(self, msg, percentage):
"""
This function is used to set the comparison information.
:param msg:
:param percentage:
:return:
"""
self._comparison_msg = msg
self._comparison_percentage = percentage

View File

@ -0,0 +1,61 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
class SchemaDiffRegistry(object):
"""
SchemaDiffRegistry
It is more of a registry for different type of nodes for schema diff.
"""
_registered_nodes = dict()
def __init__(self, node_name, node_view, parent_node='schema'):
if node_name not in SchemaDiffRegistry._registered_nodes:
SchemaDiffRegistry._registered_nodes[node_name] = {
'view': node_view,
'parent': parent_node
}
@classmethod
def get_registered_nodes(cls, node_name=None, parent_node='schema'):
"""
This function will return the node's view object if node name
is specified or return the complete list of registered nodes.
:param node_name: Name of the node ex: Database, Schema, etc..
:return:
"""
if node_name is not None:
if node_name in cls._registered_nodes:
return cls._registered_nodes[node_name]['view']
else:
return None
registered_nodes = {}
for key, value in cls._registered_nodes.items():
if value['parent'] == parent_node:
registered_nodes[key] = value['view']
return registered_nodes
@classmethod
def get_node_view(cls, node_name):
"""
This function will return the view object for the "nodes"
command as per the specified node name.
:param node_name: Name of the node ex: Database, Schema, etc..
:return:
"""
cmd = {"cmd": "nodes, compare, ddl_compare"}
module = SchemaDiffRegistry.get_registered_nodes(node_name)
if not module:
return None
return module(**cmd)

View File

@ -0,0 +1,189 @@
.icon-schema-diff {
display: inline-block;
align-content: center;
vertical-align: middle;
height: 18px;
width: 18px;
background-size: 20px !important;
background-repeat: no-repeat;
background-position-x: center;
background-position-y: center;
background-image: url('../img/compare.svg') !important;
}
.icon-schema-diff-white {
display: inline-block;
align-content: center;
vertical-align: middle;
height: 18px;
width: 18px;
background-size: 20px !important;
background-repeat: no-repeat;
background-position-x: center;
background-position-y: center;
background-image: url('../img/compare-white.svg') !important;
}
.icon-script {
display: inline-block;
align-content: center;
vertical-align: middle;
height: 18px;
width: 18px;
background-size: 20px !important;
background-repeat: no-repeat;
background-position-x: center;
background-position-y: center;
background-image: url('../img/script.svg') !important;
}
.really-hidden {
display: none !important;
}
#schema-diff-header {
margin-top: 2px;
}
#schema-diff-header .control-label {
width: 120px !important;
padding: 5px 5px !important;
}
.slick-header-column.ui-state-default {
height: 32px !important;
}
#schema-diff-grid .grid-header label {
display: inline-block;
font-weight: bold;
margin: auto auto auto 6px;
}
.grid-header .ui-icon {
margin: 4px 4px auto 6px;
background-color: transparent;
border-color: transparent;
}
.slick-row .cell-actions {
text-align: left;
}
/* Slick.Editors.Text, Slick.Editors.Date */
#schema-diff-grid .slick-header > input.editor-text {
width: 100%;
height: 100%;
border: 0;
margin: 0;
background: transparent;
outline: 0;
padding: 0;
}
/* Slick.Editors.Checkbox */
#schema-diff-grid .slick-header > input.editor-checkbox {
margin: 0;
height: 100%;
padding: 0;
border: 0;
}
.slick-row.selected .cell-selection {
background-color: transparent; /* show default selected row background */
}
#schema-diff-grid .slick-header .ui-state-default,
#schema-diff-grid .slick-header .ui-widget-content.ui-state-default,
#schema-diff-grid .slick-header .ui-widget-header .ui-state-default {
background: none;
}
#schema-diff-grid .slick-header .slick-header-column {
font-weight: bold;
display: block;
}
.slick-group-toggle.collapsed, .slick-group-toggle.expanded {
background: none !important;
width: 20px;
}
.slick-group-toggle.collapsed::before {
font-family: "FontAwesome";
content: "\f054";
font-size: 0.6rem;
border: none;
}
.slick-group-toggle.expanded::before {
font-family: "FontAwesome";
content: "\f078";
font-size: 0.6rem;
margin-left: 0rem;
}
.slick-group-toggle {
margin-right: 0px !important;
height: 11px !important;
}
#schema-diff-ddl-comp .badge .caret {
display: inline-block;
margin-left: 2px;
margin-right: 4px;
width: 0.7rem;
}
#schema-diff-ddl-comp .badge .caret::before {
font-family: "FontAwesome";
content: "\f078";
font-size: 0.7rem;
margin-left: 0rem;
}
#schema-diff-ddl-comp .badge {
font-size: inherit;
padding: 7px;
}
#schema-diff-ddl-comp .accordian-group {
padding: 0px;
}
#ddl_comp_fetching_data.pg-sp-container {
height: 100%;
bottom: 10px;
.pg-sp-content {
position: absolute;
width: 100%;
}
}
.ddl-copy {
z-index: 10;
position: absolute;
right: 1px;
top: 1px;
}
#schema-diff-grid .pg-panel-message {
font-size: 0.875rem;
}
#schema-diff-ddl-comp .sql_field_layout {
overflow: auto !important;
height: 100%;
}
#schema-diff-ddl-comp .source_ddl, #schema-diff-ddl-comp .target_ddl, #schema-diff-ddl-comp .diff_ddl {
height: 300px;
overflow: hidden;
}
.target-buttons {
flex-wrap: wrap;
max-width: 40% !important;
}

View File

@ -0,0 +1,15 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 23.0.2, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Слой_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 26 26" style="enable-background:new 0 0 26 26;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;}
</style>
<path class="st0" d="M21.5,2h-9c-1.1,0-2,0.9-2,2v3h-6c-1.1,0-2,0.9-2,2v13c0,1.1,0.9,2,2,2h8c1.1,0,2-0.9,2-2v-3h7c1.1,0,2-0.9,2-2
V4C23.5,2.9,22.6,2,21.5,2z M12.5,14.8H8.9l1-1c0.1-0.1,0.1-0.2,0.1-0.3s0-0.2-0.1-0.3l-0.3-0.3c-0.1-0.1-0.2-0.1-0.3-0.1
c-0.1,0-0.2,0-0.3,0.1l-2.3,2.3c-0.1,0.1-0.1,0.2-0.1,0.3c0,0.1,0,0.2,0.1,0.3L9,18.1c0.1,0.1,0.2,0.1,0.3,0.1c0.1,0,0.2,0,0.3-0.1
l0.3-0.3c0.1-0.1,0.1-0.2,0.1-0.3c0-0.1,0-0.2-0.1-0.3l-1-1h3.6l0,5.8h-8V9h8L12.5,14.8z M21.5,9.8h-3.6l1-1
c0.1-0.1,0.1-0.2,0.1-0.3s0-0.2-0.1-0.3l-0.3-0.3c-0.1-0.1-0.2-0.1-0.3-0.1c-0.1,0-0.2,0-0.3,0.1l-2.3,2.3c-0.1,0.1-0.1,0.2-0.1,0.3
c0,0.1,0,0.2,0.1,0.3l2.3,2.3c0.1,0.1,0.2,0.1,0.3,0.1c0.1,0,0.2,0,0.3-0.1l0.3-0.3c0.1-0.1,0.1-0.2,0.1-0.3c0-0.1,0-0.2-0.1-0.3
l-1-1h3.6l0,5.8h-7V9c0-1.1-0.9-2-2-2V4h9L21.5,9.8z"/>
</svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@ -0,0 +1,15 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 23.0.2, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Слой_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 26 26" style="enable-background:new 0 0 26 26;" xml:space="preserve">
<style type="text/css">
.st0{fill:#222222;}
</style>
<path class="st0" d="M21.5,2h-9c-1.1,0-2,0.9-2,2v3h-6c-1.1,0-2,0.9-2,2v13c0,1.1,0.9,2,2,2h8c1.1,0,2-0.9,2-2v-3h7c1.1,0,2-0.9,2-2
V4C23.5,2.9,22.6,2,21.5,2z M12.5,14.8H8.9l1-1c0.1-0.1,0.1-0.2,0.1-0.3s0-0.2-0.1-0.3l-0.3-0.3c-0.1-0.1-0.2-0.1-0.3-0.1
c-0.1,0-0.2,0-0.3,0.1l-2.3,2.3c-0.1,0.1-0.1,0.2-0.1,0.3c0,0.1,0,0.2,0.1,0.3L9,18.1c0.1,0.1,0.2,0.1,0.3,0.1c0.1,0,0.2,0,0.3-0.1
l0.3-0.3c0.1-0.1,0.1-0.2,0.1-0.3c0-0.1,0-0.2-0.1-0.3l-1-1h3.6l0,5.8h-8V9h8L12.5,14.8z M21.5,9.8h-3.6l1-1
c0.1-0.1,0.1-0.2,0.1-0.3s0-0.2-0.1-0.3l-0.3-0.3c-0.1-0.1-0.2-0.1-0.3-0.1c-0.1,0-0.2,0-0.3,0.1l-2.3,2.3c-0.1,0.1-0.1,0.2-0.1,0.3
c0,0.1,0,0.2,0.1,0.3l2.3,2.3c0.1,0.1,0.2,0.1,0.3,0.1c0.1,0,0.2,0,0.3-0.1l0.3-0.3c0.1-0.1,0.1-0.2,0.1-0.3c0-0.1,0-0.2-0.1-0.3
l-1-1h3.6l0,5.8h-7V9c0-1.1-0.9-2-2-2V4h9L21.5,9.8z"/>
</svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 23.0.2, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
<style type="text/css">
.st0{fill:#222222;}
</style>
<g>
<path class="st0" d="M40,35V11c0-3.9-3.1-7-7-7H7c-3.9,0-7,3.1-7,7c0,3.1,2.1,5.8,5,6.7V39c0,3.9,3.1,7,7,7h31c3.9,0,7-3.1,7-7v-4
H40z M43,42c-0.8,0-1.8-1.4-2.5-3h5C44.9,40.5,43.8,42,43,42z M7,8h19.6C26.2,8.9,26,9.9,26,11c0,1,0.2,2,0.7,3H7c-1.7,0-3-1.3-3-3
S5.3,8,7,8z M36.7,42H12c-1.7,0-3-1.3-3-3V18h25v-4h-1c-1.7,0-3-1.3-3-3s1.3-3,3-3s3,1.3,3,3v28C36,40,36.2,41,36.7,42z"/>
<rect x="13" y="21" class="st0" width="4" height="4"/>
<rect x="19" y="21" class="st0" width="13" height="4"/>
<rect x="13" y="28" class="st0" width="4" height="4"/>
<rect x="19" y="28" class="st0" width="13" height="4"/>
<rect x="13" y="35" class="st0" width="4" height="4"/>
<rect x="19" y="35" class="st0" width="13" height="4"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@ -0,0 +1,500 @@
/////////////////////////////////////////////////////////////
//
// pgAdmin 4 - PostgreSQL Tools
//
// Copyright (C) 2013 - 2020, The pgAdmin Development Team
// This software is released under the PostgreSQL Licence
//
//////////////////////////////////////////////////////////////
import $ from 'jquery';
import Backbone from 'backbone';
import Backform from 'pgadmin.backform';
import gettext from 'sources/gettext';
import clipboard from 'sources/selection/clipboard';
var formatNode = function (opt) {
if (!opt.id) {
return opt.text;
}
var optimage = $(opt.element).data('image');
if (!optimage) {
return opt.text;
} else {
return $('<span></span>').append(
$('<span></span>', {
class: 'wcTabIcon ' + optimage,
})
).append($('<span></span>').text(opt.text));
}
};
let SchemaDiffSqlControl =
Backform.SqlFieldControl.extend({
defaults: {
label: '',
extraClasses: [], // Add default control height
helpMessage: null,
maxlength: 4096,
rows: undefined,
copyRequired: false,
},
template: _.template([
'<% if (copyRequired) { %><button class="btn btn-secondary ddl-copy d-none">' + gettext('Copy') + '</button> <% } %>',
'<div class="pgadmin-controls pg-el-9 pg-el-12 sql_field_layout <%=extraClasses.join(\' \')%>">',
' <textarea ',
' class="<%=Backform.controlClassName%> " name="<%=name%>"',
' maxlength="<%=maxlength%>" placeholder="<%-placeholder%>" <%=disabled ? "disabled" : ""%>',
' rows=<%=rows%>',
' <%=required ? "required" : ""%>><%-value%></textarea>',
' <% if (helpMessage && helpMessage.length) { %>',
' <span class="<%=Backform.helpMessageClassName%>"><%=helpMessage%></span>',
' <% } %>',
'</div>',
].join('\n')),
initialize: function() {
Backform.TextareaControl.prototype.initialize.apply(this, arguments);
this.sqlCtrl = null;
_.bindAll(this, 'onFocus', 'onBlur', 'refreshTextArea', 'copyData',);
},
render: function() {
let obj = Backform.SqlFieldControl.prototype.render.apply(this, arguments);
if(this.$el.find('.ddl-copy')) this.$el.find('.ddl-copy').on('click', this.copyData);
return obj;
},
copyData() {
event.stopPropagation();
clipboard.copyTextToClipboard(this.model.get('diff_ddl'));
return false;
},
onFocus: function() {
let $ctrl = this.$el.find('.pgadmin-controls').first(),
$copy = this.$el.find('.ddl-copy');
if (!$ctrl.hasClass('focused')) $ctrl.addClass('focused');
if ($copy.hasClass('d-none')) $copy.removeClass('d-none');
},
onBlur: function() {
let $copy = this.$el.find('.ddl-copy');
if (!$(event.relatedTarget).hasClass('ddl-copy')) {
if (!$copy.hasClass('d-none')) $copy.addClass('d-none');
this.$el.find('.pgadmin-controls').first().removeClass('focused');
}
},
});
let SchemaDiffSelect2Control =
Backform.Select2Control.extend({
defaults: _.extend(Backform.Select2Control.prototype.defaults, {
url: undefined,
transform: undefined,
url_with_id: false,
select2: {
allowClear: true,
placeholder: gettext('Select an item...'),
width: 'style',
templateResult: formatNode,
templateSelection: formatNode,
},
controlsClassName: 'pgadmin-controls pg-el-sm-11 pg-el-12',
}),
className: function() {
return 'pgadmin-controls pg-el-sm-4';
},
events: {
'focus select': 'clearInvalid',
'keydown :input': 'processTab',
'select2:select': 'onSelect',
'select2:selecting': 'beforeSelect',
'select2:clear': 'onChange',
},
template: _.template([
'<% if(label == false) {} else {%>',
' <label class="<%=Backform.controlLabelClassName%>"><%=label%></label>',
'<% }%>',
'<div class="<%=controlsClassName%>">',
' <select class="<%=Backform.controlClassName%> <%=extraClasses.join(\' \')%>"',
' name="<%=name%>" value="<%-value%>" <%=disabled ? "disabled" : ""%>',
' <%=required ? "required" : ""%><%= select2.multiple ? " multiple>" : ">" %>',
' <%=select2.first_empty ? " <option></option>" : ""%>',
' <% for (var i=0; i < options.length; i++) {%>',
' <% var option = options[i]; %>',
' <option ',
' <% if (option.image) { %> data-image=<%=option.image%> <%}%>',
' <% if (option.connected) { %> data-connected=connected <%}%>',
' value=<%- formatter.fromRaw(option.value) %>',
' <% if (option.selected) {%>selected="selected"<%} else {%>',
' <% if (!select2.multiple && option.value === rawValue) {%>selected="selected"<%}%>',
' <% if (select2.multiple && rawValue && rawValue.indexOf(option.value) != -1){%>selected="selected" data-index="rawValue.indexOf(option.value)"<%}%>',
' <%}%>',
' <%= disabled ? "disabled" : ""%>><%-option.label%></option>',
' <%}%>',
' </select>',
' <% if (helpMessage && helpMessage.length) { %>',
' <span class="<%=Backform.helpMessageClassName%>"><%=helpMessage%></span>',
' <% } %>',
'</div>',
].join('\n')),
beforeSelect: function() {
var selVal = arguments[0].params.args.data.id;
if(this.field.get('connect') && this.$el.find('option[value="'+selVal+'"]').attr('data-connected') !== 'connected') {
this.field.get('connect').apply(this, [selVal, this.changeIcon.bind(this)]);
} else {
$(this.$sel).trigger('change');
setTimeout(function(){ this.onChange.apply(this); }.bind(this), 200);
}
},
changeIcon: function(data) {
let span = this.$el.find('.select2-selection .select2-selection__rendered span.wcTabIcon'),
selSpan = this.$el.find('option:selected');
if (span.hasClass('icon-server-not-connected')) {
let icon = (data.icon) ? data.icon : 'icon-pg';
span.removeClass('icon-server-not-connected');
span.addClass(icon);
span.attr('data-connected', 'connected');
selSpan.data().image = icon;
selSpan.attr('data-connected', 'connected');
this.onChange.apply(this);
}
else if (span.hasClass('icon-database-not-connected')) {
let icon = (data.icon) ? data.icon : 'pg-icon-database';
span.removeClass('icon-database-not-connected');
span.addClass(icon);
span.attr('data-connected', 'connected');
selSpan.removeClass('icon-database-not-connected');
selSpan.data().image = icon;
selSpan.attr('data-connected', 'connected');
this.onChange.apply(this);
}
},
onChange: function() {
var model = this.model,
attrArr = this.field.get('name').split('.'),
name = attrArr.shift(),
path = attrArr.join('.'),
value = this.getValueFromDOM(),
changes = {},
that = this;
if (this.model.errorModel instanceof Backbone.Model) {
if (_.isEmpty(path)) {
this.model.errorModel.unset(name);
} else {
var nestedError = this.model.errorModel.get(name);
if (nestedError) {
this.keyPathSetter(nestedError, path, null);
this.model.errorModel.set(name, nestedError);
}
}
}
changes[name] = _.isEmpty(path) ? value : _.clone(model.get(name)) || {};
if (!_.isEmpty(path)) that.keyPathSetter(changes[name], path, value);
that.stopListening(that.model, 'change:' + name, that.render);
model.set(changes);
that.listenTo(that.model, 'change:' + name, that.render);
},
render: function() {
/*
* Initialization from the original control.
*/
this.fetchData();
return Backform.Select2Control.prototype.render.apply(this, arguments);
},
fetchData: function() {
/*
* We're about to fetch the options required for this control.
*/
var self = this,
url = self.field.get('url'),
m = self.model;
url = _.isFunction(url) ? url.apply(m) : url;
if (url && self.field.get('deps')) {
url = url.replace('sid', m.get(self.field.get('deps')[0]));
}
// Hmm - we found the url option.
// That means - we needs to fetch the options from that node.
if (url) {
var data;
m.trigger('pgadmin:view:fetching', m, self.field);
$.ajax({
async: false,
url: url,
})
.done(function(res) {
/*
* We will cache this data for short period of time for avoiding
* same calls.
*/
data = res.data;
})
.fail(function() {
m.trigger('pgadmin:view:fetch:error', m, self.field);
});
m.trigger('pgadmin:view:fetched', m, self.field);
// To fetch only options from cache, we do not need time from 'at'
// attribute but only options.
//
/*
* Transform the data
*/
var transform = this.field.get('transform') || self.defaults.transform;
if (transform && _.isFunction(transform)) {
// We will transform the data later, when rendering.
// It will allow us to generate different data based on the
// dependencies.
self.field.set('options', transform.bind(self, data));
} else {
self.field.set('options', data);
}
}
},
});
let SchemaDiffHeaderView = Backform.Form.extend({
label: '',
className: function() {
return 'pg-el-sm-12 pg-el-md-12 pg-el-lg-12 pg-el-12';
},
tabPanelClassName: function() {
return Backform.tabClassName;
},
tabIndex: 0,
initialize: function(opts) {
this.label = opts.label;
Backform.Form.prototype.initialize.apply(this, arguments);
},
template: _.template(`
<div class="row pgadmin-control-group">
<div class="control-label">Select Source</div>
<div class="col-6 source row"></div>
</div>
<div class="row pgadmin-control-group">
<div class="control-label">Select Target</div>
<div class="col-6 target row"></div>
<div class="col-5 target-buttons">
<div class="action-btns d-flex">
<button class="btn btn-primary mr-auto"><i class="icon-schema-diff-white"></i>&nbsp;` + gettext('Compare') + `</button>
<button id="generate-script" class="btn btn-secondary mr-1" disabled><i class="fa fa-file-code-o sql-icon-lg"></i>&nbsp;` + gettext('Generate Script') + `</button>
<div class="btn-group mr-1" role="group" aria-label="">
<button id="btn-filter" type="button" class="btn btn-sm btn-secondary"
title=""
accesskey=""
tabindex="0">
<i class="fa fa-filter sql-icon-lg" aria-hidden="true"></i>&nbsp;` + gettext('Filter') + `
</button>
<button id="btn-filter-dropdown" type="button" class="btn btn-sm btn-secondary dropdown-toggle dropdown-toggle-split"
data-toggle="dropdown" aria-haspopup="true" aria-expanded="false"
title=""
accesskey=""
tabindex="0">
</button>` +
[
'<ul class="dropdown-menu filter">',
'<li>',
'<a class="dropdown-item" id="btn-identical" href="#" tabindex="0">',
'<i class="identical fa fa-check" aria-hidden="true"></i>',
'<span> ' + gettext('Identical') + ' </span>',
'</a>',
'</li>',
'<li>',
'<a class="dropdown-item" id="btn-differentt" href="#" tabindex="0">',
'<i class="different fa fa-check" aria-hidden="true"></i>',
'<span> ' + gettext('Different') + ' </span>',
'</a>',
'</li>',
'<li>',
'<a class="dropdown-item" id="btn-source-only" href="#" tabindex="0">',
'<i class="source-only fa fa-check" aria-hidden="true"></i>',
'<span> ' + gettext('Source Only') + ' </span>',
'</a>',
'</li>',
'<li>',
'<a class="dropdown-item" id="btn-target-only" href="#" tabindex="0">',
'<i class="target-only fa fa-check" aria-hidden="true"></i>',
'<span> ' + gettext('Target Only') + ' </span>',
'</a>',
'</li>',
'</ul>',
'</div>',
'</div>',
'</div>',
'</div>',
].join('\n')
),
render: function() {
this.cleanup();
var controls = this.controls,
m = this.model,
self = this,
idx = (this.tabIndex * 100);
this.$el.empty();
$(this.template()).appendTo(this.$el);
this.fields.each(function(f) {
var cntr = new(f.get('control'))({
field: f,
model: m,
dialog: self,
tabIndex: idx,
});
if (f.get('group') && f.get('group') == 'source') {
self.$el.find('.source').append(cntr.render().$el);
}
else {
self.$el.find('.target').append(cntr.render().$el);
}
controls.push(cntr);
});
return this;
},
remove: function(opts) {
if (opts && opts.data) {
if (this.model) {
if (this.model.reset) {
this.model.reset({
validate: false,
silent: true,
stop: true,
});
}
this.model.clear({
validate: false,
silent: true,
stop: true,
});
delete(this.model);
}
if (this.errorModel) {
this.errorModel.clear({
validate: false,
silent: true,
stop: true,
});
delete(this.errorModel);
}
}
this.cleanup();
Backform.Form.prototype.remove.apply(this, arguments);
},
});
let SchemaDiffFooterView = Backform.Form.extend({
className: function() {
return 'set-group pg-el-12';
},
tabPanelClassName: function() {
return Backform.tabClassName;
},
legendClass: 'badge',
contentClass: Backform.accordianContentClassName,
template: {
'content': _.template(`
<div class="pg-el-sm-12 row <%=contentClass%>">
<div class="pg-el-sm-4 ddl-source">Source</div>
<div class="pg-el-sm-4 ddl-target">Target</div>
<div class="pg-el-sm-4 ddl-diff">Difference
</div>
</div>
</div>
`),
},
initialize: function(opts) {
this.label = opts.label;
Backform.Form.prototype.initialize.apply(this, arguments);
},
render: function() {
this.cleanup();
let m = this.model,
$el = this.$el,
tmpl = this.template,
controls = this.controls,
data = {
'className': _.result(this, 'className'),
'legendClass': _.result(this, 'legendClass'),
'contentClass': _.result(this, 'contentClass'),
'collapse': _.result(this, 'collapse'),
},
idx = (this.tabIndex * 100);
this.$el.empty();
let el = $((tmpl['content'])(data)).appendTo($el);
this.fields.each(function(f) {
let cntr = new(f.get('control'))({
field: f,
model: m,
dialog: self,
tabIndex: idx,
name: f.get('name'),
});
if (f.get('group') && f.get('group') == 'ddl-source') {
el.find('.ddl-source').append(cntr.render().$el);
}
else if (f.get('group') && f.get('group') == 'ddl-target') {
el.find('.ddl-target').append(cntr.render().$el);
}
else {
el.find('.ddl-diff').append(cntr.render().$el);
}
controls.push(cntr);
});
let $diff_sc = this.$el.find('.source_ddl'),
$diff_tr = this.$el.find('.target_ddl'),
$diff = this.$el.find('.diff_ddl'),
footer_height = this.$el.parent().height() - 50;
$diff_sc.height(footer_height);
$diff_sc.css({
'height': footer_height + 'px',
});
$diff_tr.height(footer_height);
$diff_tr.css({
'height': footer_height + 'px',
});
$diff.height(footer_height);
$diff.css({
'height': footer_height + 'px',
});
return this;
},
});
export {
SchemaDiffSelect2Control,
SchemaDiffHeaderView,
SchemaDiffFooterView,
SchemaDiffSqlControl,
};

View File

@ -0,0 +1,145 @@
/////////////////////////////////////////////////////////////
//
// pgAdmin 4 - PostgreSQL Tools
//
// Copyright (C) 2013 - 2020, The pgAdmin Development Team
// This software is released under the PostgreSQL Licence
//
//////////////////////////////////////////////////////////////
define('pgadmin.schemadiff', [
'sources/gettext', 'sources/url_for', 'jquery', 'underscore',
'sources/pgadmin', 'sources/csrf', 'pgadmin.browser.node',
], function(
gettext, url_for, $, _, pgAdmin, csrfToken
) {
var wcDocker = window.wcDocker,
pgBrowser = pgAdmin.Browser;
/* Return back, this has been called more than once */
if (pgBrowser.SchemaDiff)
return pgBrowser.SchemaDiff;
// Create an Object Restore of pgBrowser class
pgBrowser.SchemaDiff = {
init: function() {
if (this.initialized)
return;
this.initialized = true;
csrfToken.setPGCSRFToken(pgAdmin.csrf_token_header, pgAdmin.csrf_token);
// Define the nodes on which the menus to be appear
var menus = [{
name: 'schema_diff',
module: this,
applies: ['tools'],
callback: 'show_schema_diff_tool',
priority: 1,
label: gettext('Schema Diff'),
enable: true,
}];
pgBrowser.add_menus(menus);
// Creating a new pgBrowser frame to show the data.
var schemaDiffFrameType = new pgBrowser.Frame({
name: 'frm_schemadiff',
showTitle: true,
isCloseable: true,
isPrivate: true,
url: 'about:blank',
});
let self = this;
/* Cache may take time to load for the first time
* Keep trying till available
*/
let cacheIntervalId = setInterval(function() {
if(pgBrowser.preference_version() > 0) {
self.preferences = pgBrowser.get_preferences_for_module('schema_diff');
clearInterval(cacheIntervalId);
}
},0);
pgBrowser.onPreferencesChange('schema_diff', function() {
self.preferences = pgBrowser.get_preferences_for_module('schema_diff');
});
// Load the newly created frame
schemaDiffFrameType.load(pgBrowser.docker);
return this;
},
// Callback to draw schema diff for objects
show_schema_diff_tool: function() {
var self = this,
baseUrl = url_for('schema_diff.initialize', null);
$.ajax({
url: baseUrl,
method: 'GET',
dataType: 'json',
contentType: 'application/json',
})
.done(function(res) {
self.trans_id = res.data.schemaDiffTransId;
res.data.panel_title = 'Schema Diff'; //TODO: Set the panel title
// TODO: Following function is used to test the fetching of the
// databases this should be moved to server selection event later.
self.launch_schema_diff(res.data);
})
.fail(function(xhr) {
self.raise_error_on_fail(gettext('Schema Diff initialize error') , xhr);
});
},
launch_schema_diff: function(data) {
var panel_title = data.panel_title,
trans_id = data.schemaDiffTransId,
panel_tooltip = '';
var url_params = {
'trans_id': trans_id,
'editor_title': panel_title,
},
baseUrl = url_for('schema_diff.panel', url_params);
if (this.preferences.schema_diff_new_browser_tab) {
window.open(baseUrl, '_blank');
} else {
var propertiesPanel = pgBrowser.docker.findPanels('properties'),
schemaDiffPanel = pgBrowser.docker.addPanel('frm_schemadiff', wcDocker.DOCK.STACKED, propertiesPanel[0]);
// Set panel title and icon
schemaDiffPanel.title('<span title="'+panel_tooltip+'">'+panel_title+'</span>');
schemaDiffPanel.icon('icon-schema-diff');
schemaDiffPanel.focus();
var openSchemaDiffURL = function(j) {
// add spinner element
$(j).data('embeddedFrame').$container.append(pgBrowser.SchemaDiff.spinner_el);
setTimeout(function() {
var frameInitialized = $(j).data('frameInitialized');
if (frameInitialized) {
var frame = $(j).data('embeddedFrame');
if (frame) {
frame.openURL(baseUrl);
frame.$container.find('.pg-sp-container').delay(1000).hide(1);
}
} else {
openSchemaDiffURL(j);
}
}, 100);
};
openSchemaDiffURL(schemaDiffPanel);
}
},
};
return pgBrowser.SchemaDiff;
});

View File

@ -0,0 +1,38 @@
/////////////////////////////////////////////////////////////
//
// pgAdmin 4 - PostgreSQL Tools
//
// Copyright (C) 2013 - 2020, The pgAdmin Development Team
// This software is released under the PostgreSQL Licence
//
//////////////////////////////////////////////////////////////
define([
'sources/url_for', 'jquery',
'sources/pgadmin', 'pgadmin.tools.schema_diff_ui',
], function(
url_for, $, pgAdmin, SchemaDiffUIModule
) {
var pgTools = pgAdmin.Tools = pgAdmin.Tools || {};
var SchemaDiffUI = SchemaDiffUIModule.default;
/* Return back, this has been called more than once */
if (pgTools.SchemaDiffHook)
return pgTools.SchemaDiffHook;
pgTools.SchemaDiffHook = {
load: function(trans_id) {
window.onbeforeunload = function() {
$.ajax({
url: url_for('schemadiff.index') + 'close/'+trans_id,
method: 'DELETE',
});
};
let schemaUi = new SchemaDiffUI($('#schema-diff-container'), trans_id);
schemaUi.render();
},
};
return pgTools.SchemaDiffHook;
});

View File

@ -0,0 +1,845 @@
/////////////////////////////////////////////////////////////
//
// pgAdmin 4 - PostgreSQL Tools
//
// Copyright (C) 2013 - 2020, The pgAdmin Development Team
// This software is released under the PostgreSQL Licence
//
//////////////////////////////////////////////////////////////
import url_for from 'sources/url_for';
import $ from 'jquery';
import gettext from 'sources/gettext';
import Alertify from 'pgadmin.alertifyjs';
import Backbone from 'backbone';
import Slick from 'sources/../bundle/slickgrid';
import pgAdmin from 'sources/pgadmin';
import {setPGCSRFToken} from 'sources/csrf';
import {generateScript} from 'tools/datagrid/static/js/show_query_tool';
import 'pgadmin.sqleditor';
import pgWindow from 'sources/window';
import {SchemaDiffSelect2Control, SchemaDiffHeaderView,
SchemaDiffFooterView, SchemaDiffSqlControl} from './schema_diff.backform';
var wcDocker = window.wcDocker;
export default class SchemaDiffUI {
constructor(container, trans_id) {
var self = this;
this.$container = container;
this.header = null;
this.trans_id = trans_id;
this.filters = ['Identical', 'Different', 'Source Only', 'Target Only'];
this.sel_filters = ['Identical', 'Different', 'Source Only', 'Target Only'];
this.dataView = null;
this.grid = null,
this.selection = {};
this.model = new Backbone.Model({
source_sid: undefined,
source_did: undefined,
source_scid: undefined,
target_sid: undefined,
target_did: undefined,
target_scid: undefined,
source_ddl: undefined,
target_ddl: undefined,
diff_ddl: undefined,
});
setPGCSRFToken(pgAdmin.csrf_token_header, pgAdmin.csrf_token);
this.docker = new wcDocker(
this.$container, {
allowContextMenu: false,
allowCollapse: false,
loadingClass: 'pg-sp-icon',
themePath: url_for('static', {
'filename': 'css',
}),
theme: 'webcabin.overrides.css',
}
);
this.header_panel = new pgAdmin.Browser.Panel({
name: 'schema_diff_header_panel',
showTitle: false,
isCloseable: false,
isPrivate: true,
content: '<div id="schema-diff-header" class="pg-el-container" el="sm"></div><div id="schema-diff-grid" class="pg-el-container" el="sm"></div>',
elContainer: true,
});
this.footer_panel = new pgAdmin.Browser.Panel({
name: 'schema_diff_footer_panel',
title: gettext('DDL Comparison'),
isCloseable: false,
isPrivate: true,
height: '60',
content: `<div id="schema-diff-ddl-comp" class="pg-el-container" el="sm">
<div id="ddl_comp_fetching_data" class="pg-sp-container schema-diff-busy-fetching d-none">
<div class="pg-sp-content">
<div class="row">
<div class="col-12 pg-sp-icon"></div>
</div>
<div class="row"><div class="col-12 pg-sp-text">` + gettext('Comparing objects...') + `</div></div>
</div>
</div></div>`,
});
this.header_panel.load(this.docker);
this.footer_panel.load(this.docker);
this.panel_obj = this.docker.addPanel('schema_diff_header_panel', wcDocker.DOCK.TOP, {w:'95%', h:'50%'});
this.footer_panel_obj = this.docker.addPanel('schema_diff_footer_panel', wcDocker.DOCK.BOTTOM, this.panel_obj, {w:'95%', h:'50%'});
self.footer_panel_obj.on(wcDocker.EVENT.VISIBILITY_CHANGED, function() {
setTimeout(function() {
this.resize_grid();
}.bind(self), 200);
});
self.footer_panel_obj.on(wcDocker.EVENT.RESIZE_ENDED, function() {
setTimeout(function() {
this.resize_panels();
}.bind(self), 200);
});
}
raise_error_on_fail(alert_title, xhr) {
try {
var err = JSON.parse(xhr.responseText);
Alertify.alert(alert_title, err.errormsg);
} catch (e) {
Alertify.alert(alert_title, e.statusText);
}
}
resize_panels() {
let $src_ddl = $('#schema-diff-ddl-comp .source_ddl'),
$tar_ddl = $('#schema-diff-ddl-comp .target_ddl'),
$diff_ddl = $('#schema-diff-ddl-comp .diff_ddl'),
footer_height = $('#schema-diff-ddl-comp').height() - 50;
$src_ddl.height(footer_height);
$src_ddl.css({
'height': footer_height + 'px',
});
$tar_ddl.height(footer_height);
$tar_ddl.css({
'height': footer_height + 'px',
});
$diff_ddl.height(footer_height);
$diff_ddl.css({
'height': footer_height + 'px',
});
this.resize_grid();
}
compare_schemas() {
var self = this,
url_params = self.model.toJSON();
if (url_params['source_sid'] == '' || _.isUndefined(url_params['source_sid']) ||
url_params['source_did'] == '' || _.isUndefined(url_params['source_did']) ||
url_params['source_scid'] == '' || _.isUndefined(url_params['source_scid']) ||
url_params['target_sid'] == '' || _.isUndefined(url_params['target_sid']) ||
url_params['target_did'] == '' || _.isUndefined(url_params['target_did']) ||
url_params['target_scid'] == '' || _.isUndefined(url_params['target_scid'])
) {
Alertify.alert(gettext('Selection Error'), gettext('Please select source and target.'));
return false;
}
this.selection = JSON.parse(JSON.stringify(url_params));
url_params['trans_id'] = self.trans_id;
_.each(url_params, function(key, val) {
url_params[key] = parseInt(val, 10);
});
var baseUrl = url_for('schema_diff.compare', url_params);
self.model.set({
'source_ddl': undefined,
'target_ddl': undefined,
'diff_ddl': undefined,
});
self.render_grid([]);
self.footer.render();
self.startDiffPoller();
return $.ajax({
url: baseUrl,
method: 'GET',
dataType: 'json',
contentType: 'application/json',
})
.done(function (res) {
self.stopDiffPoller();
self.render_grid(res.data);
})
.fail(function (xhr) {
self.raise_error_on_fail(gettext('Schema compare error'), xhr);
self.stopDiffPoller();
});
}
generate_script() {
var self = this,
baseServerUrl = url_for('schema_diff.get_server', {'sid': self.selection['target_sid'],
'did': self.selection['target_did']}),
sel_rows = self.grid ? self.grid.getSelectedRows() : [],
sel_rows_data = [],
url_params = self.selection,
generated_script = undefined,
open_query_tool;
_.each(url_params, function(key, val) {
url_params[key] = parseInt(val, 10);
});
$('#diff_fetching_data').removeClass('d-none');
$('#diff_fetching_data').find('.schema-diff-busy-text').text('Generating script...');
open_query_tool = function get_server_details() {
$.ajax({
url: baseServerUrl,
method: 'GET',
dataType: 'json',
contentType: 'application/json',
})
.done(function (res) {
let data = res.data;
let server_data = {};
if (data) {
server_data['sgid'] = data.gid;
server_data['sid'] = data.sid;
server_data['stype'] = data.type;
server_data['server'] = data.name;
server_data['user'] = data.user;
server_data['did'] = self.model.get('target_did');
server_data['database'] = data.database;
if (_.isUndefined(generated_script))
generated_script = 'BEGIN;' + '\n' + self.model.get('diff_ddl') + '\n' + 'END;';
let preferences = pgWindow.pgAdmin.Browser.get_preferences_for_module('schema_diff');
if (preferences.schema_diff_new_browser_tab) {
pgWindow.pgAdmin.ddl_diff = generated_script;
generateScript(server_data, pgWindow.pgAdmin.DataGrid);
} else {
pgWindow.pgAdmin.ddl_diff = generated_script;
generateScript(server_data, pgWindow.pgAdmin.DataGrid);
}
}
$('#diff_fetching_data').find('.schema-diff-busy-text').text('');
$('#diff_fetching_data').addClass('d-none');
})
.fail(function (xhr) {
self.raise_error_on_fail(gettext('Generate script error'), xhr);
$('#diff_fetching_data').find('.schema-diff-busy-text').text('');
$('#diff_fetching_data').addClass('d-none');
});
};
if (sel_rows.length > 0) {
for (var row = 0; row < sel_rows.length; row++) {
let data = self.grid.getData().getItem(sel_rows[row]);
if (data.type) {
let tmp_data = {
'node_type': data.type,
'source_oid': parseInt(data.oid, 10),
'target_oid': parseInt(data.oid, 10),
'comp_status': data.status,
};
if(data.status && (data.status.toLowerCase() == 'different' || data.status.toLowerCase() == 'identical')) {
tmp_data['target_oid'] = data.target_oid;
}
sel_rows_data.push(tmp_data);
}
}
url_params['sel_rows'] = sel_rows_data;
let baseUrl = url_for('schema_diff.generate_script', {'trans_id': self.trans_id});
$.ajax({
url: baseUrl,
method: 'POST',
dataType: 'json',
contentType: 'application/json',
data: JSON.stringify(url_params),
})
.done(function (res) {
if (res) {
generated_script = 'BEGIN;' + '\n' + res.diff_ddl + '\n' + 'END;';
}
open_query_tool();
})
.fail(function (xhr) {
self.raise_error_on_fail(gettext('Generate script error'), xhr);
$('#diff_fetching_data').addClass('d-none');
});
} else if (!_.isUndefined(self.model.get('diff_ddl'))) {
open_query_tool();
}
return false;
}
render_grid(data) {
var self = this;
var grid;
if (self.grid) {
// Only render the data
self.render_grid_data(data);
return;
}
// Checkbox Column
var checkboxSelector = new Slick.CheckboxSelectColumn({
cssClass: 'slick-cell-checkboxsel',
minWidth: 30,
});
// Format Schema object title with appropriate icon
var formatColumnTitle = function (row, cell, value, columnDef, dataContext) {
let icon = 'icon-' + dataContext.type;
return '<i class="ml-5 wcTabIcon '+ icon +'"></i><span>' + value + '</span>';
};
// Grid Columns
var grid_width = (self.grid_width - 47) / 2 ;
var columns = [
checkboxSelector.getColumnDefinition(),
{id: 'title', name: 'Schema Objects', field: 'title', minWidth: grid_width, formatter: formatColumnTitle},
{id: 'status', name: 'Comparison Result', field: 'status', minWidth: grid_width},
{id: 'label', name: 'Schema Objects', field: 'label', width: 0, minWidth: 0, maxWidth: 0,
cssClass: 'really-hidden', headerCssClass: 'really-hidden'},
{id: 'type', name: 'Schema Objects', field: 'type', width: 0, minWidth: 0, maxWidth: 0,
cssClass: 'really-hidden', headerCssClass: 'really-hidden'},
{id: 'id', name: 'id', field: 'id', width: 0, minWidth: 0, maxWidth: 0,
cssClass: 'really-hidden', headerCssClass: 'really-hidden' },
];
// Grid Options
var options = {
enableCellNavigation: true,
enableColumnReorder: false,
enableRowSelection: true,
};
// Grouping by Schema Object
self.groupBySchemaObject = function() {
self.dataView.setGrouping({
getter: 'type',
formatter: function (g) {
let icon = 'icon-coll-' + g.value;
return '<i class="wcTabIcon '+ icon +'"></i><span>' + g.rows[0].label + '</span>';
},
aggregateCollapsed: true,
lazyTotalsCalculation: true,
});
};
var groupItemMetadataProvider = new Slick.Data.GroupItemMetadataProvider({ checkboxSelect: true,
checkboxSelectPlugin: checkboxSelector });
// Dataview for grid
self.dataView = new Slick.Data.DataView({
groupItemMetadataProvider: groupItemMetadataProvider,
inlineFilters: false,
});
// Wire up model events to drive the grid
self.dataView.onRowCountChanged.subscribe(function () {
grid.updateRowCount();
grid.render();
});
self.dataView.onRowsChanged.subscribe(function (e, args) {
grid.invalidateRows(args.rows);
grid.render();
});
// Change Row css on the basis of item status
self.dataView.getItemMetadata = function(row) {
var item = self.dataView.getItem(row);
if (item.__group) {
return groupItemMetadataProvider.getGroupRowMetadata(item);
}
if(item.status === 'Different') {
return { cssClasses: 'different' };
} else if (item.status === 'Source Only') {
return { cssClasses: 'source' };
} else if (item.status === 'Target Only') {
return { cssClasses: 'target' };
}
return null;
};
// Grid filter
self.filter = function (item) {
let self = this;
if (self.sel_filters.indexOf(item.status) !== -1) return true;
return false;
};
let $data_grid = $('#schema-diff-grid');
grid = this.grid = new Slick.Grid($data_grid, self.dataView, columns, options);
grid.registerPlugin(groupItemMetadataProvider);
grid.setSelectionModel(new Slick.RowSelectionModel({selectActiveRow: false}));
grid.registerPlugin(checkboxSelector);
grid.onClick.subscribe(function(e, args) {
if (args.row) {
data = args.grid.getData().getItem(args.row);
if (data.status) this.ddlCompare(data);
}
}.bind(self));
grid.onSelectedRowsChanged.subscribe(self.handle_generate_button.bind(self));
self.model.on('change:diff_ddl', self.handle_generate_button.bind(self));
$('#schema-diff-grid').on('keyup', function() {
if ((event.keyCode == 38 || event.keyCode ==40) && this.grid.getActiveCell().row) {
data = this.grid.getData().getItem(this.grid.getActiveCell().row);
this.ddlCompare(data);
}
}.bind(self));
self.render_grid_data(data);
}
render_grid_data(data) {
var self = this;
self.dataView.beginUpdate();
self.dataView.setItems(data);
self.dataView.setFilter(self.filter.bind(self));
self.groupBySchemaObject();
self.dataView.endUpdate();
self.resize_grid();
}
handle_generate_button(){
if (this.grid.getSelectedRows().length > 0 || (this.model.get('diff_ddl') != '' && !_.isUndefined(this.model.get('diff_ddl')))) {
this.header.$el.find('button#generate-script').removeAttr('disabled');
} else {
this.header.$el.find('button#generate-script').attr('disabled', true);
}
}
resize_grid() {
let $data_grid = $('#schema-diff-grid'),
grid_height = (this.panel_obj.height() > 0) ? this.panel_obj.height() - 100 : this.grid_height - 100;
$data_grid.height(grid_height);
$data_grid.css({
'height': grid_height + 'px',
});
if (this.grid) this.grid.resizeCanvas();
}
getCompareStatus() {
var self = this,
url_params = {'trans_id': self.trans_id},
baseUrl = url_for('schema_diff.poll', url_params);
$.ajax({
url: baseUrl,
method: 'GET',
dataType: 'json',
contentType: 'application/json',
})
.done(function (res) {
let msg = res.data.compare_msg + res.data.diff_percentage + '% completed';
$('#diff_fetching_data').find('.schema-diff-busy-text').text(msg);
})
.fail(function (xhr) {
self.raise_error_on_fail(gettext('Poll error'), xhr);
self.stopDiffPoller('fail');
});
}
startDiffPoller() {
$('#ddl_comp_fetching_data').addClass('d-none');
$('#diff_fetching_data').removeClass('d-none');
/* Execute once for the first time as setInterval will not do */
this.getCompareStatus();
this.diff_poller_int_id = setInterval(this.getCompareStatus.bind(this), 1000);
}
stopDiffPoller(status) {
clearInterval(this.diff_poller_int_id);
// The last polling for comparison
if (status !== 'fail') this.getCompareStatus();
$('#diff_fetching_data').find('.schema-diff-busy-text').text('');
$('#diff_fetching_data').addClass('d-none');
}
ddlCompare(data) {
var self = this,
node_type = data.type,
source_oid = data.oid,
target_oid = data.oid;
self.model.set({
'source_ddl': undefined,
'target_ddl': undefined,
'diff_ddl': undefined,
});
var url_params = self.selection;
if(data.status && (data.status.toLowerCase() == 'different' || data.status.toLowerCase() == 'identical')) {
target_oid = data.target_oid;
}
url_params['trans_id'] = self.trans_id;
url_params['source_oid'] = source_oid;
url_params['target_oid'] = target_oid;
url_params['comp_status'] = data.status;
url_params['node_type'] = node_type;
_.each(url_params, function(key, val) {
url_params[key] = parseInt(val, 10);
});
$('#ddl_comp_fetching_data').removeClass('d-none');
var baseUrl = url_for('schema_diff.ddl_compare', url_params);
self.model.url = baseUrl;
self.model.fetch({
success: function() {
self.footer.render();
$('#ddl_comp_fetching_data').addClass('d-none');
},
error: function() {
self.footer.render();
$('#ddl_comp_fetching_data').addClass('d-none');
},
});
}
render() {
let self = this;
let panel = self.docker.findPanels('schema_diff_header_panel')[0];
var header = panel.$container.find('#schema-diff-header');
self.header = new SchemaDiffHeaderView({
el: header,
model: this.model,
fields: [{
name: 'source_sid', label: false,
control: SchemaDiffSelect2Control,
url: url_for('schema_diff.servers'),
select2: {
allowClear: true,
placeholder: gettext('Select server...'),
},
connect: function() {
self.connect_server(arguments[0], arguments[1]);
},
group: 'source',
disabled: function() {
return false;
},
}, {
name: 'source_did',
group: 'source',
deps: ['source_sid'],
control: SchemaDiffSelect2Control,
url: function() {
if (this.get('source_sid'))
return url_for('schema_diff.databases', {'sid': this.get('source_sid')});
return false;
},
select2: {
allowClear: true,
placeholder: gettext('Select database...'),
},
disabled: function(m) {
if (!_.isUndefined(m.get('source_sid')) && !_.isNull(m.get('source_sid')))
return false;
return true;
},
connect: function() {
self.connect_database(this.model.get('source_sid'), arguments[0], arguments[1]);
},
}, {
name: 'source_scid',
control: SchemaDiffSelect2Control,
group: 'source',
deps: ['source_sid', 'source_did'],
url: function() {
if (this.get('source_sid') && this.get('source_did'))
return url_for('schema_diff.schemas', {'sid': this.get('source_sid'), 'did': this.get('source_did')});
return false;
},
select2: {
allowClear: true,
placeholder: gettext('Select schema...'),
},
disabled: function(m) {
if (!_.isUndefined(m.get('source_did')) && !_.isNull(m.get('source_did')))
return false;
return true;
},
}, {
name: 'target_sid', label: false,
control: SchemaDiffSelect2Control,
group: 'target',
url: url_for('schema_diff.servers'),
select2: {
allowClear: true,
placeholder: gettext('Select server...'),
},
disabled: function() {
return false;
},
connect: function() {
self.connect_server(arguments[0], arguments[1]);
},
}, {
name: 'target_did',
control: SchemaDiffSelect2Control,
group: 'target',
deps: ['target_sid'],
url: function() {
if (this.get('target_sid'))
return url_for('schema_diff.databases', {'sid': this.get('target_sid')});
return false;
},
select2: {
allowClear: true,
placeholder: gettext('Select database...'),
},
disabled: function(m) {
if (!_.isUndefined(m.get('target_sid')) && !_.isNull(m.get('target_sid')))
return false;
return true;
},
connect: function() {
self.connect_database(this.model.get('target_sid'), arguments[0], arguments[1]);
},
}, {
name: 'target_scid',
control: SchemaDiffSelect2Control,
group: 'target',
deps: ['target_sid', 'target_did'],
url: function() {
if (this.get('target_sid') && this.get('target_did'))
return url_for('schema_diff.schemas', {'sid': this.get('target_sid'), 'did': this.get('target_did')});
return false;
},
select2: {
allowClear: true,
placeholder: gettext('Select schema...'),
},
disabled: function(m) {
if (!_.isUndefined(m.get('target_did')) && !_.isNull(m.get('target_did')))
return false;
return true;
},
}],
});
self.footer = new SchemaDiffFooterView({
model: this.model,
fields: [{
name: 'source_ddl', label: false,
control: SchemaDiffSqlControl,
group: 'ddl-source',
}, {
name: 'target_ddl', label: false,
control: SchemaDiffSqlControl,
group: 'ddl-target',
}, {
name: 'diff_ddl', label: false,
control: SchemaDiffSqlControl,
group: 'ddl-diff', copyRequired: true,
}],
});
self.header.render();
self.header.$el.find('button.btn-primary').on('click', self.compare_schemas.bind(self));
self.header.$el.find('button#generate-script').on('click', self.generate_script.bind(self));
self.header.$el.find('ul.filter a.dropdown-item').on('click', self.refresh_filters.bind(self));
let footer_panel = self.docker.findPanels('schema_diff_footer_panel')[0],
header_panel = self.docker.findPanels('schema_diff_header_panel')[0];
footer_panel.$container.find('#schema-diff-ddl-comp').append(self.footer.render().$el);
header_panel.$container.find('#schema-diff-grid').append(`<div class='obj_properties container-fluid'>
<div class='alert alert-info pg-panel-message'>` + gettext('Select the server, database and schema for the source and target and click <b>Compare</b> to compare them.') + '</div></div>');
self.grid_width = $('#schema-diff-grid').width();
self.grid_height = this.panel_obj.height();
}
refresh_filters(event) {
let self = this;
_.each(self.filters, function(filter) {
let index = self.sel_filters.indexOf(filter);
let filter_class = '.' + filter.replace(' ', '-').toLowerCase();
if ($(event.currentTarget).find(filter_class).length == 1) {
if ($(filter_class).hasClass('visibility-hidden') === true) {
$(filter_class).removeClass('visibility-hidden');
if (index === -1) self.sel_filters.push(filter);
} else {
$(filter_class).addClass('visibility-hidden');
if(index !== -1 ) delete self.sel_filters[index];
}
}
});
// Refresh the grid
self.dataView.refresh();
}
connect_database(server_id, db_id, callback) {
var url = url_for('schema_diff.connect_database', {'sid': server_id, 'did': db_id});
$.post(url)
.done(function(res) {
if (res.success && res.data) {
callback(res.data);
}
})
.fail(function(xhr, error) {
Alertify.pgNotifier(error, xhr, gettext('Failed to connect the database.'));
});
}
connect_server(server_id, callback) {
var onFailure = function(
xhr, status, error, server_id, callback
) {
Alertify.pgNotifier('error', xhr, error, function(msg) {
setTimeout(function() {
Alertify.dlgServerPass(
gettext('Connect to Server'),
msg,
server_id,
callback
).resizeTo();
}, 100);
});
},
onSuccess = function(res, callback) {
if (res && res.data) {
// We're not reconnecting
callback(res.data);
}
};
// Ask Password and send it back to the connect server
if (!Alertify.dlgServerPass) {
Alertify.dialog('dlgServerPass', function factory() {
return {
main: function(
title, message, server_id, success_callback, _onSuccess, _onFailure, _onCancel
) {
this.set('title', title);
this.message = message;
this.server_id = server_id;
this.success_callback = success_callback;
this.onSuccess = _onSuccess || onSuccess;
this.onFailure = _onFailure || onFailure;
this.onCancel = _onCancel || onCancel;
},
setup:function() {
return {
buttons:[{
text: gettext('Cancel'), className: 'btn btn-secondary fa fa-times pg-alertify-button',
key: 27,
},{
text: gettext('OK'), key: 13, className: 'btn btn-primary fa fa-check pg-alertify-button',
}],
focus: {element: '#password', select: true},
options: {
modal: 0, resizable: false, maximizable: false, pinnable: false,
},
};
},
build:function() {},
prepare:function() {
this.setContent(this.message);
},
callback: function(closeEvent) {
var _onFailure = this.onFailure,
_onSuccess = this.onSuccess,
_onCancel = this.onCancel,
_success_callback = this.success_callback;
if (closeEvent.button.text == gettext('OK')) {
var _url = url_for('schema_diff.connect_server', {'sid': this.server_id});
$.ajax({
type: 'POST',
timeout: 30000,
url: _url,
data: $('#frmPassword').serialize(),
})
.done(function(res) {
if (res.success == 1) {
return _onSuccess(res, _success_callback);
}
})
.fail(function(xhr, status, error) {
return _onFailure(
xhr, status, error, this.server_id, _success_callback
);
});
} else {
_onCancel && typeof(_onCancel) == 'function' &&
_onCancel();
}
},
};
});
}
var onCancel = function() {
return false;
};
var url = url_for('schema_diff.connect_server', {'sid': server_id});
$.post(url)
.done(function(res) {
if (res.success == 1) {
return onSuccess(res, callback);
}
})
.fail(function(xhr, status, error) {
return onFailure(
xhr, status, error, server_id, callback
);
});
}
}

View File

@ -0,0 +1,85 @@
#schema-diff-container {
position: absolute;
left: 0;
right: 0;
top: 0;
bottom: 0;
padding-top: 10px;
background-color: $color-gray-light;
}
#schema-diff-grid {
background: $color-bg;
outline: 0;
font-size: 9pt;
margin-top: 28px;
background: none;
background-color: $color-gray-light;
}
#schema-diff-grid .slick-header .slick-header-columns {
background: $color-bg;
height: 40px;
border-bottom: $panel-border;
}
#schema-diff-grid .slick-header .slick-header-column.ui-state-default {
padding: 4px 0 3px 6px;
border-bottom: $panel-border;
border-right: $panel-border;
}
.slick-row:hover .slick-cell{
border-top: $table-hover-border;
border-bottom: $table-hover-border;
background-color: $table-hover-bg-color;
}
#schema-diff-grid .slick-header .slick-header-column.selected {
background-color: $color-primary;
color: $color-primary-fg;
}
.slick-row .slick-cell {
border-bottom: $panel-border;
border-right: $panel-border;
z-index: 0;
}
#schema-diff-grid .slick-row .slick-cell.l0.r0.selected {
background-color: $color-primary;
color: $color-primary-fg;
}
#schema-diff-grid .slick-row > .slick-cell:not(.l0):not(.r0).selected {
background-color: $table-hover-bg-color;
border-top: $table-hover-border;
border-bottom: $table-hover-border;
}
#schema-diff-grid div.slick-header.ui-state-default {
background: $color-bg;
border-bottom: none;
border-right: none;
border-top: none;
}
#schema-diff-grid .different {
background-color: $schemadiff-diff-row-color !important;
}
#schema-diff-grid .source {
background-color: $schemadiff-source-row-color !important;
}
#schema-diff-grid .target {
background-color: $schemadiff-target-row-color !important;
}
#schema-diff-grid .slick-row.active {
background-color: $table-bg-selected !important;
}
#schema-diff-ddl-comp {
height: 100%;
bottom: 10px;
background-color: $color-bg !important;
overflow-y: hidden;
}

View File

@ -0,0 +1,31 @@
{% extends "base.html" %}
{% block init_script %}
try {
require(
['sources/generated/schema_diff', 'sources/generated/slickgrid', 'sources/generated/codemirror', 'sources/generated/browser_nodes'],
function(pgSchemaDiffHook) {
var pgSchemaDiffHook = pgSchemaDiffHook || pgAdmin.Tools.SchemaDiffHook;
pgSchemaDiffHook.load({{trans_id}});
},
function() {
console.log(arguments);
});
} catch (err) {
console.log(err);
}
{% endblock %}
{% block css_link %}
<link type="text/css" rel="stylesheet" href="{{ url_for('browser.browser_css')}}"/>
{% endblock %}
{% block body %}
<div id="schema-diff-container">
<div id="diff_fetching_data" class="pg-sp-container schema-diff-busy-fetching d-none">
<div class="pg-sp-content">
<div class="row">
<div class="col-12 pg-sp-icon"></div>
</div>
<div class="row"><div class="col-12 pg-sp-text schema-diff-busy-text"></div></div>
</div>
</div>
</div>
{% endblock %}

View File

@ -0,0 +1,440 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.7
-- Dumped by pg_dump version 12beta2
-- Started on 2019-11-01 12:54:15 IST
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- TOC entry 17 (class 2615 OID 139770)
-- Name: source; Type: SCHEMA; Schema: -; Owner: postgres
--
CREATE SCHEMA source;
ALTER SCHEMA source OWNER TO postgres;
SET default_tablespace = '';
CREATE EXTENSION btree_gist
SCHEMA source;
--
-- TOC entry 12272 (class 1259 OID 149205)
-- Name: table_for_partition; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_partition (
col1 bigint NOT NULL
)
PARTITION BY RANGE (col1);
ALTER TABLE source.table_for_partition OWNER TO postgres;
--
-- TOC entry 12273 (class 1259 OID 149208)
-- Name: part1; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.part1 (
col1 bigint NOT NULL
);
ALTER TABLE ONLY source.table_for_partition ATTACH PARTITION source.part1 FOR VALUES FROM ('1') TO ('23');
ALTER TABLE source.part1 OWNER TO postgres;
--
-- TOC entry 12274 (class 1259 OID 149213)
-- Name: table_for_partition_1; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_partition_1 (
col1 bigint
)
PARTITION BY RANGE (col1);
ALTER TABLE source.table_for_partition_1 OWNER TO postgres;
--
-- TOC entry 12275 (class 1259 OID 149216)
-- Name: part3; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.part3 (
col1 bigint
);
ALTER TABLE ONLY source.table_for_partition_1 ATTACH PARTITION source.part3 FOR VALUES FROM ('1') TO ('10');
ALTER TABLE source.part3 OWNER TO postgres;
--
-- TOC entry 12276 (class 1259 OID 149219)
-- Name: part4; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.part4 (
col1 bigint
);
ALTER TABLE ONLY source.table_for_partition_1 ATTACH PARTITION source.part4 FOR VALUES FROM ('11') TO ('20');
ALTER TABLE source.part4 OWNER TO postgres;
--
-- TOC entry 12258 (class 1259 OID 148963)
-- Name: table_for_column; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_column (
col1 bigint NOT NULL,
col2 text,
col3 text
);
ALTER TABLE source.table_for_column OWNER TO postgres;
--
-- TOC entry 12256 (class 1259 OID 148895)
-- Name: table_for_constraints; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_constraints (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_constraints OWNER TO postgres;
--
-- TOC entry 61066 (class 0 OID 0)
-- Dependencies: 12256
-- Name: TABLE table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON TABLE source.table_for_constraints IS 'comments';
--
-- TOC entry 12262 (class 1259 OID 149004)
-- Name: table_for_identical; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_identical (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_identical OWNER TO postgres;
--
-- TOC entry 12260 (class 1259 OID 148977)
-- Name: table_for_index; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_index (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_index OWNER TO postgres;
--
-- TOC entry 12269 (class 1259 OID 149128)
-- Name: table_for_primary_key; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_primary_key (
col1 integer NOT NULL,
col2 text NOT NULL
);
ALTER TABLE source.table_for_primary_key OWNER TO postgres;
--
-- TOC entry 12264 (class 1259 OID 149024)
-- Name: table_for_rule; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_rule (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE source.table_for_rule OWNER TO postgres;
--
-- TOC entry 12266 (class 1259 OID 149048)
-- Name: table_for_trigger; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_trigger (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE source.table_for_trigger OWNER TO postgres;
--
-- TOC entry 56893 (class 2606 OID 148904)
-- Name: table_for_constraints Exclusion; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT "Exclusion" EXCLUDE USING gist (col2 WITH <>) WITH (fillfactor='12') WHERE ((col1 > 1)) DEFERRABLE INITIALLY DEFERRED;
--
-- TOC entry 61067 (class 0 OID 0)
-- Dependencies: 56893
-- Name: CONSTRAINT "Exclusion" ON table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON CONSTRAINT "Exclusion" ON source.table_for_constraints IS 'comments';
--
-- TOC entry 56891 (class 2606 OID 148911)
-- Name: table_for_constraints check_con; Type: CHECK CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE source.table_for_constraints
ADD CONSTRAINT check_con CHECK ((col1 > 10)) NOT VALID;
--
-- TOC entry 61068 (class 0 OID 0)
-- Dependencies: 56891
-- Name: CONSTRAINT check_con ON table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON CONSTRAINT check_con ON source.table_for_constraints IS 'coment';
--
-- TOC entry 56899 (class 2606 OID 148970)
-- Name: table_for_column table_for_column_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_column
ADD CONSTRAINT table_for_column_pkey PRIMARY KEY (col1);
--
-- TOC entry 56895 (class 2606 OID 148902)
-- Name: table_for_constraints table_for_constraints_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT table_for_constraints_pkey PRIMARY KEY (col1);
--
-- TOC entry 56904 (class 2606 OID 148984)
-- Name: table_for_index table_for_index_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_index
ADD CONSTRAINT table_for_index_pkey PRIMARY KEY (col1);
--
-- TOC entry 56913 (class 2606 OID 149135)
-- Name: table_for_primary_key table_for_primary_key_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_primary_key
ADD CONSTRAINT table_for_primary_key_pkey PRIMARY KEY (col1, col2);
--
-- TOC entry 56909 (class 2606 OID 149031)
-- Name: table_for_rule table_for_rule_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_rule
ADD CONSTRAINT table_for_rule_pkey PRIMARY KEY (col1);
--
-- TOC entry 56907 (class 2606 OID 149011)
-- Name: table_for_identical table_for_table_for_identical_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_identical
ADD CONSTRAINT table_for_table_for_identical_pkey PRIMARY KEY (col1);
--
-- TOC entry 56911 (class 2606 OID 149055)
-- Name: table_for_trigger table_for_trigger_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_trigger
ADD CONSTRAINT table_for_trigger_pkey PRIMARY KEY (col1);
--
-- TOC entry 56897 (class 2606 OID 148913)
-- Name: table_for_constraints unique; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT "unique" UNIQUE (col1);
--
-- TOC entry 61069 (class 0 OID 0)
-- Dependencies: 56897
-- Name: CONSTRAINT "unique" ON table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON CONSTRAINT "unique" ON source.table_for_constraints IS 'cmnt';
--
-- TOC entry 56900 (class 1259 OID 149023)
-- Name: index1; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index1 ON source.table_for_index USING btree (col2 varchar_pattern_ops);
--
-- TOC entry 56905 (class 1259 OID 149012)
-- Name: index_identical; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index_identical ON source.table_for_identical USING btree (col2 text_pattern_ops);
--
-- TOC entry 56901 (class 1259 OID 149211)
-- Name: index_same; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index_same ON source.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56902 (class 1259 OID 149022)
-- Name: index_source; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index_source ON source.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 61044 (class 2618 OID 149032)
-- Name: table_for_rule rule1; Type: RULE; Schema: source; Owner: postgres
--
CREATE RULE rule1 AS
ON UPDATE TO source.table_for_rule DO INSTEAD NOTHING;
--
-- TOC entry 61070 (class 0 OID 0)
-- Dependencies: 61044
-- Name: RULE rule1 ON table_for_rule; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON RULE rule1 ON source.table_for_rule IS 'comments';
--
-- TOC entry 61045 (class 2618 OID 149033)
-- Name: table_for_rule rule2; Type: RULE; Schema: source; Owner: postgres
--
CREATE RULE rule2 AS
ON INSERT TO source.table_for_rule DO NOTHING;
--
-- TOC entry 12283 (class 1259 OID 347818)
-- Name: test view; Type: VIEW; Schema: source; Owner: postgres
--
CREATE VIEW source."test view" AS
SELECT pg_class.relname,
pg_class.relnamespace,
pg_class.reltype,
pg_class.reloftype,
pg_class.relowner,
pg_class.relam,
pg_class.relfilenode,
pg_class.reltablespace,
pg_class.relpages,
pg_class.reltuples,
pg_class.relallvisible,
pg_class.reltoastrelid,
pg_class.relhasindex,
pg_class.relisshared,
pg_class.relpersistence,
pg_class.relkind,
pg_class.relnatts,
pg_class.relchecks,
pg_class.relhasoids,
pg_class.relhaspkey,
pg_class.relhasrules,
pg_class.relhastriggers,
pg_class.relhassubclass,
pg_class.relrowsecurity,
pg_class.relforcerowsecurity,
pg_class.relispopulated,
pg_class.relreplident,
pg_class.relispartition,
pg_class.relfrozenxid,
pg_class.relminmxid,
pg_class.relacl,
pg_class.reloptions,
pg_class.relpartbound
FROM pg_class
LIMIT 10;
ALTER TABLE source."test view" OWNER TO postgres;
--
-- TOC entry 12286 (class 1259 OID 347832)
-- Name: test view f; Type: VIEW; Schema: source; Owner: postgres
--
CREATE VIEW source."test view f" WITH (security_barrier='false') AS
SELECT 2;
ALTER TABLE source."test view f" OWNER TO postgres;
--
-- TOC entry 61111 (class 0 OID 0)
-- Dependencies: 12286
-- Name: VIEW "test view f"; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON VIEW source."test view f" IS 'cmn';

View File

@ -0,0 +1,429 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.7
-- Dumped by pg_dump version 12beta2
-- Started on 2019-11-01 12:55:22 IST
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- TOC entry 18 (class 2615 OID 139771)
-- Name: target; Type: SCHEMA; Schema: -; Owner: postgres
--
CREATE SCHEMA target;
ALTER SCHEMA target OWNER TO postgres;
SET default_tablespace = '';
CREATE EXTENSION btree_gist
SCHEMA target;
--
-- TOC entry 12250 (class 1259 OID 139938)
-- Name: MView; Type: MATERIALIZED VIEW; Schema: target; Owner: postgres
--
CREATE MATERIALIZED VIEW target."MView" AS
SELECT 'tekst'::text AS text
WITH NO DATA;
ALTER TABLE target."MView" OWNER TO postgres;
--
-- TOC entry 12277 (class 1259 OID 149234)
-- Name: table_for_partition_1; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_partition_1 (
col1 bigint
)
PARTITION BY RANGE (col1);
ALTER TABLE target.table_for_partition_1 OWNER TO postgres;
--
-- TOC entry 12278 (class 1259 OID 149237)
-- Name: part3; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.part3 (
col1 bigint
);
ALTER TABLE ONLY target.table_for_partition_1 ATTACH PARTITION target.part3 FOR VALUES FROM ('13') TO ('56');
ALTER TABLE target.part3 OWNER TO postgres;
--
-- TOC entry 12259 (class 1259 OID 148971)
-- Name: table_for_column; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_column (
col1 bigint,
col2 bigint,
col4 text
);
ALTER TABLE target.table_for_column OWNER TO postgres;
--
-- TOC entry 12268 (class 1259 OID 149089)
-- Name: table_for_constraints; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_constraints (
col1 integer NOT NULL,
col2 text,
CONSTRAINT check_con CHECK ((col1 > 30))
);
ALTER TABLE target.table_for_constraints OWNER TO postgres;
--
-- TOC entry 61066 (class 0 OID 0)
-- Dependencies: 12268
-- Name: TABLE table_for_constraints; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON TABLE target.table_for_constraints IS 'comments';
--
-- TOC entry 61067 (class 0 OID 0)
-- Dependencies: 12268
-- Name: CONSTRAINT check_con ON table_for_constraints; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON CONSTRAINT check_con ON target.table_for_constraints IS 'coment';
--
-- TOC entry 12257 (class 1259 OID 148960)
-- Name: table_for_del; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_del (
);
ALTER TABLE target.table_for_del OWNER TO postgres;
--
-- TOC entry 12271 (class 1259 OID 149172)
-- Name: table_for_foreign_key; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_foreign_key (
col1 integer NOT NULL,
col2 "char",
col3 bigint
);
ALTER TABLE target.table_for_foreign_key OWNER TO postgres;
--
-- TOC entry 12263 (class 1259 OID 149013)
-- Name: table_for_identical; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_identical (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE target.table_for_identical OWNER TO postgres;
--
-- TOC entry 12261 (class 1259 OID 148986)
-- Name: table_for_index; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_index (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE target.table_for_index OWNER TO postgres;
--
-- TOC entry 12270 (class 1259 OID 149144)
-- Name: table_for_primary_key; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_primary_key (
col1 integer NOT NULL,
col2 text NOT NULL
);
ALTER TABLE target.table_for_primary_key OWNER TO postgres;
--
-- TOC entry 12265 (class 1259 OID 149034)
-- Name: table_for_rule; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_rule (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE target.table_for_rule OWNER TO postgres;
--
-- TOC entry 12267 (class 1259 OID 149066)
-- Name: table_for_trigger; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_trigger (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE target.table_for_trigger OWNER TO postgres;
--
-- TOC entry 56906 (class 2606 OID 149097)
-- Name: table_for_constraints Exclusion; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_constraints
ADD CONSTRAINT "Exclusion" EXCLUDE USING gist (col2 WITH <>) WITH (fillfactor='15') WHERE ((col1 > 1)) DEFERRABLE INITIALLY DEFERRED;
--
-- TOC entry 61068 (class 0 OID 0)
-- Dependencies: 56906
-- Name: CONSTRAINT "Exclusion" ON table_for_constraints; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON CONSTRAINT "Exclusion" ON target.table_for_constraints IS 'comments';
--
-- TOC entry 56910 (class 2606 OID 149176)
-- Name: table_for_foreign_key table_for_foreign_key_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_foreign_key
ADD CONSTRAINT table_for_foreign_key_pkey PRIMARY KEY (col1);
--
-- TOC entry 56897 (class 2606 OID 148993)
-- Name: table_for_index table_for_index_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_index
ADD CONSTRAINT table_for_index_pkey PRIMARY KEY (col1);
--
-- TOC entry 56908 (class 2606 OID 149151)
-- Name: table_for_primary_key table_for_primary_key_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_primary_key
ADD CONSTRAINT table_for_primary_key_pkey PRIMARY KEY (col1);
--
-- TOC entry 56902 (class 2606 OID 149041)
-- Name: table_for_rule table_for_rule_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_rule
ADD CONSTRAINT table_for_rule_pkey PRIMARY KEY (col1);
--
-- TOC entry 56900 (class 2606 OID 149020)
-- Name: table_for_identical table_for_table_for_identical_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_identical
ADD CONSTRAINT table_for_table_for_identical_pkey PRIMARY KEY (col1);
--
-- TOC entry 56904 (class 2606 OID 149073)
-- Name: table_for_trigger table_for_trigger_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_trigger
ADD CONSTRAINT table_for_trigger_pkey PRIMARY KEY (col1);
--
-- TOC entry 56893 (class 1259 OID 148994)
-- Name: index1; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index1 ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56894 (class 1259 OID 148995)
-- Name: index2; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index2 ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56898 (class 1259 OID 149021)
-- Name: index_identical; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index_identical ON target.table_for_identical USING btree (col2 text_pattern_ops);
--
-- TOC entry 56895 (class 1259 OID 149212)
-- Name: index_same; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index_same ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56892 (class 1259 OID 139945)
-- Name: mview_index; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX mview_index ON target."MView" USING btree (text text_pattern_ops);
--
-- TOC entry 61045 (class 2618 OID 149042)
-- Name: table_for_rule rule1; Type: RULE; Schema: target; Owner: postgres
--
CREATE RULE rule1 AS
ON UPDATE TO target.table_for_rule DO INSTEAD NOTHING;
--
-- TOC entry 61069 (class 0 OID 0)
-- Dependencies: 61045
-- Name: RULE rule1 ON table_for_rule; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON RULE rule1 ON target.table_for_rule IS 'comments';
--
-- TOC entry 61046 (class 2618 OID 149043)
-- Name: table_for_rule rule2; Type: RULE; Schema: target; Owner: postgres
--
CREATE RULE rule2 AS
ON UPDATE TO target.table_for_rule DO NOTHING;
--
-- TOC entry 61047 (class 2618 OID 149044)
-- Name: table_for_rule rule3; Type: RULE; Schema: target; Owner: postgres
--
CREATE RULE rule3 AS
ON INSERT TO target.table_for_rule DO NOTHING;
--
-- TOC entry 61050 (class 0 OID 139938)
-- Dependencies: 12250 61062
-- Name: MView; Type: MATERIALIZED VIEW DATA; Schema: target; Owner: postgres
--
REFRESH MATERIALIZED VIEW target."MView";
--
-- TOC entry 12284 (class 1259 OID 347823)
-- Name: test view; Type: VIEW; Schema: target; Owner: postgres
--
CREATE VIEW target."test view" AS
SELECT pg_class.relname,
pg_class.relnamespace,
pg_class.reltype,
pg_class.reloftype,
pg_class.relowner,
pg_class.relam,
pg_class.relfilenode,
pg_class.reltablespace,
pg_class.relpages,
pg_class.reltuples,
pg_class.relallvisible,
pg_class.reltoastrelid,
pg_class.relhasindex,
pg_class.relisshared,
pg_class.relpersistence,
pg_class.relkind,
pg_class.relnatts,
pg_class.relchecks,
pg_class.relhasoids,
pg_class.relhaspkey,
pg_class.relhasrules,
pg_class.relhastriggers,
pg_class.relhassubclass,
pg_class.relrowsecurity,
pg_class.relforcerowsecurity,
pg_class.relispopulated,
pg_class.relreplident,
pg_class.relispartition,
pg_class.relfrozenxid,
pg_class.relminmxid,
pg_class.relacl,
pg_class.reloptions,
pg_class.relpartbound
FROM pg_class
LIMIT 10;
ALTER TABLE target."test view" OWNER TO postgres;
--
-- TOC entry 12285 (class 1259 OID 347828)
-- Name: test view f; Type: VIEW; Schema: target; Owner: postgres
--
CREATE VIEW target."test view f" WITH (security_barrier='true') AS
SELECT 2;
ALTER TABLE target."test view f" OWNER TO postgres;
--
-- TOC entry 61105 (class 0 OID 0)
-- Dependencies: 12285
-- Name: VIEW "test view f"; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON VIEW target."test view f" IS 'cmn';

View File

@ -0,0 +1,439 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.7
-- Dumped by pg_dump version 12beta2
-- Started on 2019-11-01 12:54:15 IST
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- TOC entry 17 (class 2615 OID 139770)
-- Name: source; Type: SCHEMA; Schema: -; Owner: postgres
--
CREATE SCHEMA source;
ALTER SCHEMA source OWNER TO postgres;
SET default_tablespace = '';
CREATE EXTENSION btree_gist
SCHEMA source;
--
-- TOC entry 12272 (class 1259 OID 149205)
-- Name: table_for_partition; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_partition (
col1 bigint NOT NULL
)
PARTITION BY RANGE (col1);
ALTER TABLE source.table_for_partition OWNER TO postgres;
--
-- TOC entry 12273 (class 1259 OID 149208)
-- Name: part1; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.part1 (
col1 bigint NOT NULL
);
ALTER TABLE ONLY source.table_for_partition ATTACH PARTITION source.part1 FOR VALUES FROM ('1') TO ('23');
ALTER TABLE source.part1 OWNER TO postgres;
--
-- TOC entry 12274 (class 1259 OID 149213)
-- Name: table_for_partition_1; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_partition_1 (
col1 bigint
)
PARTITION BY RANGE (col1);
ALTER TABLE source.table_for_partition_1 OWNER TO postgres;
--
-- TOC entry 12275 (class 1259 OID 149216)
-- Name: part3; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.part3 (
col1 bigint
);
ALTER TABLE ONLY source.table_for_partition_1 ATTACH PARTITION source.part3 FOR VALUES FROM ('1') TO ('10');
ALTER TABLE source.part3 OWNER TO postgres;
--
-- TOC entry 12276 (class 1259 OID 149219)
-- Name: part4; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.part4 (
col1 bigint
);
ALTER TABLE ONLY source.table_for_partition_1 ATTACH PARTITION source.part4 FOR VALUES FROM ('11') TO ('20');
ALTER TABLE source.part4 OWNER TO postgres;
--
-- TOC entry 12258 (class 1259 OID 148963)
-- Name: table_for_column; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_column (
col1 bigint NOT NULL,
col2 text,
col3 text
);
ALTER TABLE source.table_for_column OWNER TO postgres;
--
-- TOC entry 12256 (class 1259 OID 148895)
-- Name: table_for_constraints; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_constraints (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_constraints OWNER TO postgres;
--
-- TOC entry 61066 (class 0 OID 0)
-- Dependencies: 12256
-- Name: TABLE table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON TABLE source.table_for_constraints IS 'comments';
--
-- TOC entry 12262 (class 1259 OID 149004)
-- Name: table_for_identical; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_identical (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_identical OWNER TO postgres;
--
-- TOC entry 12260 (class 1259 OID 148977)
-- Name: table_for_index; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_index (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_index OWNER TO postgres;
--
-- TOC entry 12269 (class 1259 OID 149128)
-- Name: table_for_primary_key; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_primary_key (
col1 integer NOT NULL,
col2 text NOT NULL
);
ALTER TABLE source.table_for_primary_key OWNER TO postgres;
--
-- TOC entry 12264 (class 1259 OID 149024)
-- Name: table_for_rule; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_rule (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE source.table_for_rule OWNER TO postgres;
--
-- TOC entry 12266 (class 1259 OID 149048)
-- Name: table_for_trigger; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_trigger (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE source.table_for_trigger OWNER TO postgres;
--
-- TOC entry 56893 (class 2606 OID 148904)
-- Name: table_for_constraints Exclusion; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT "Exclusion" EXCLUDE USING gist (col2 WITH <>) WITH (fillfactor='12') WHERE ((col1 > 1)) DEFERRABLE INITIALLY DEFERRED;
--
-- TOC entry 61067 (class 0 OID 0)
-- Dependencies: 56893
-- Name: CONSTRAINT "Exclusion" ON table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON CONSTRAINT "Exclusion" ON source.table_for_constraints IS 'comments';
--
-- TOC entry 56891 (class 2606 OID 148911)
-- Name: table_for_constraints check_con; Type: CHECK CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE source.table_for_constraints
ADD CONSTRAINT check_con CHECK ((col1 > 10)) NOT VALID;
--
-- TOC entry 61068 (class 0 OID 0)
-- Dependencies: 56891
-- Name: CONSTRAINT check_con ON table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON CONSTRAINT check_con ON source.table_for_constraints IS 'coment';
--
-- TOC entry 56899 (class 2606 OID 148970)
-- Name: table_for_column table_for_column_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_column
ADD CONSTRAINT table_for_column_pkey PRIMARY KEY (col1);
--
-- TOC entry 56895 (class 2606 OID 148902)
-- Name: table_for_constraints table_for_constraints_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT table_for_constraints_pkey PRIMARY KEY (col1);
--
-- TOC entry 56904 (class 2606 OID 148984)
-- Name: table_for_index table_for_index_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_index
ADD CONSTRAINT table_for_index_pkey PRIMARY KEY (col1);
--
-- TOC entry 56913 (class 2606 OID 149135)
-- Name: table_for_primary_key table_for_primary_key_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_primary_key
ADD CONSTRAINT table_for_primary_key_pkey PRIMARY KEY (col1, col2);
--
-- TOC entry 56909 (class 2606 OID 149031)
-- Name: table_for_rule table_for_rule_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_rule
ADD CONSTRAINT table_for_rule_pkey PRIMARY KEY (col1);
--
-- TOC entry 56907 (class 2606 OID 149011)
-- Name: table_for_identical table_for_table_for_identical_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_identical
ADD CONSTRAINT table_for_table_for_identical_pkey PRIMARY KEY (col1);
--
-- TOC entry 56911 (class 2606 OID 149055)
-- Name: table_for_trigger table_for_trigger_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_trigger
ADD CONSTRAINT table_for_trigger_pkey PRIMARY KEY (col1);
--
-- TOC entry 56897 (class 2606 OID 148913)
-- Name: table_for_constraints unique; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT "unique" UNIQUE (col1);
--
-- TOC entry 61069 (class 0 OID 0)
-- Dependencies: 56897
-- Name: CONSTRAINT "unique" ON table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON CONSTRAINT "unique" ON source.table_for_constraints IS 'cmnt';
--
-- TOC entry 56900 (class 1259 OID 149023)
-- Name: index1; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index1 ON source.table_for_index USING btree (col2 varchar_pattern_ops);
--
-- TOC entry 56905 (class 1259 OID 149012)
-- Name: index_identical; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index_identical ON source.table_for_identical USING btree (col2 text_pattern_ops);
--
-- TOC entry 56901 (class 1259 OID 149211)
-- Name: index_same; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index_same ON source.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56902 (class 1259 OID 149022)
-- Name: index_source; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index_source ON source.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 61044 (class 2618 OID 149032)
-- Name: table_for_rule rule1; Type: RULE; Schema: source; Owner: postgres
--
CREATE RULE rule1 AS
ON UPDATE TO source.table_for_rule DO INSTEAD NOTHING;
--
-- TOC entry 61070 (class 0 OID 0)
-- Dependencies: 61044
-- Name: RULE rule1 ON table_for_rule; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON RULE rule1 ON source.table_for_rule IS 'comments';
--
-- TOC entry 61045 (class 2618 OID 149033)
-- Name: table_for_rule rule2; Type: RULE; Schema: source; Owner: postgres
--
CREATE RULE rule2 AS
ON INSERT TO source.table_for_rule DO NOTHING;
--
-- TOC entry 12283 (class 1259 OID 347818)
-- Name: test view; Type: VIEW; Schema: source; Owner: postgres
--
CREATE VIEW source."test view" AS
SELECT pg_class.relname,
pg_class.relnamespace,
pg_class.reltype,
pg_class.reloftype,
pg_class.relowner,
pg_class.relam,
pg_class.relfilenode,
pg_class.reltablespace,
pg_class.relpages,
pg_class.reltuples,
pg_class.relallvisible,
pg_class.reltoastrelid,
pg_class.relhasindex,
pg_class.relisshared,
pg_class.relpersistence,
pg_class.relkind,
pg_class.relnatts,
pg_class.relchecks,
pg_class.relhasoids,
pg_class.relhasrules,
pg_class.relhastriggers,
pg_class.relhassubclass,
pg_class.relrowsecurity,
pg_class.relforcerowsecurity,
pg_class.relispopulated,
pg_class.relreplident,
pg_class.relispartition,
pg_class.relfrozenxid,
pg_class.relminmxid,
pg_class.relacl,
pg_class.reloptions,
pg_class.relpartbound
FROM pg_class
LIMIT 10;
ALTER TABLE source."test view" OWNER TO postgres;
--
-- TOC entry 12286 (class 1259 OID 347832)
-- Name: test view f; Type: VIEW; Schema: source; Owner: postgres
--
CREATE VIEW source."test view f" WITH (security_barrier='false') AS
SELECT 2;
ALTER TABLE source."test view f" OWNER TO postgres;
--
-- TOC entry 61111 (class 0 OID 0)
-- Dependencies: 12286
-- Name: VIEW "test view f"; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON VIEW source."test view f" IS 'cmn';

View File

@ -0,0 +1,428 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.7
-- Dumped by pg_dump version 12beta2
-- Started on 2019-11-01 12:55:22 IST
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- TOC entry 18 (class 2615 OID 139771)
-- Name: target; Type: SCHEMA; Schema: -; Owner: postgres
--
CREATE SCHEMA target;
ALTER SCHEMA target OWNER TO postgres;
SET default_tablespace = '';
CREATE EXTENSION btree_gist
SCHEMA target;
--
-- TOC entry 12250 (class 1259 OID 139938)
-- Name: MView; Type: MATERIALIZED VIEW; Schema: target; Owner: postgres
--
CREATE MATERIALIZED VIEW target."MView" AS
SELECT 'tekst'::text AS text
WITH NO DATA;
ALTER TABLE target."MView" OWNER TO postgres;
--
-- TOC entry 12277 (class 1259 OID 149234)
-- Name: table_for_partition_1; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_partition_1 (
col1 bigint
)
PARTITION BY RANGE (col1);
ALTER TABLE target.table_for_partition_1 OWNER TO postgres;
--
-- TOC entry 12278 (class 1259 OID 149237)
-- Name: part3; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.part3 (
col1 bigint
);
ALTER TABLE ONLY target.table_for_partition_1 ATTACH PARTITION target.part3 FOR VALUES FROM ('13') TO ('56');
ALTER TABLE target.part3 OWNER TO postgres;
--
-- TOC entry 12259 (class 1259 OID 148971)
-- Name: table_for_column; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_column (
col1 bigint,
col2 bigint,
col4 text
);
ALTER TABLE target.table_for_column OWNER TO postgres;
--
-- TOC entry 12268 (class 1259 OID 149089)
-- Name: table_for_constraints; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_constraints (
col1 integer NOT NULL,
col2 text,
CONSTRAINT check_con CHECK ((col1 > 30))
);
ALTER TABLE target.table_for_constraints OWNER TO postgres;
--
-- TOC entry 61066 (class 0 OID 0)
-- Dependencies: 12268
-- Name: TABLE table_for_constraints; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON TABLE target.table_for_constraints IS 'comments';
--
-- TOC entry 61067 (class 0 OID 0)
-- Dependencies: 12268
-- Name: CONSTRAINT check_con ON table_for_constraints; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON CONSTRAINT check_con ON target.table_for_constraints IS 'coment';
--
-- TOC entry 12257 (class 1259 OID 148960)
-- Name: table_for_del; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_del (
);
ALTER TABLE target.table_for_del OWNER TO postgres;
--
-- TOC entry 12271 (class 1259 OID 149172)
-- Name: table_for_foreign_key; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_foreign_key (
col1 integer NOT NULL,
col2 "char",
col3 bigint
);
ALTER TABLE target.table_for_foreign_key OWNER TO postgres;
--
-- TOC entry 12263 (class 1259 OID 149013)
-- Name: table_for_identical; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_identical (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE target.table_for_identical OWNER TO postgres;
--
-- TOC entry 12261 (class 1259 OID 148986)
-- Name: table_for_index; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_index (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE target.table_for_index OWNER TO postgres;
--
-- TOC entry 12270 (class 1259 OID 149144)
-- Name: table_for_primary_key; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_primary_key (
col1 integer NOT NULL,
col2 text NOT NULL
);
ALTER TABLE target.table_for_primary_key OWNER TO postgres;
--
-- TOC entry 12265 (class 1259 OID 149034)
-- Name: table_for_rule; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_rule (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE target.table_for_rule OWNER TO postgres;
--
-- TOC entry 12267 (class 1259 OID 149066)
-- Name: table_for_trigger; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_trigger (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE target.table_for_trigger OWNER TO postgres;
--
-- TOC entry 56906 (class 2606 OID 149097)
-- Name: table_for_constraints Exclusion; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_constraints
ADD CONSTRAINT "Exclusion" EXCLUDE USING gist (col2 WITH <>) WITH (fillfactor='15') WHERE ((col1 > 1)) DEFERRABLE INITIALLY DEFERRED;
--
-- TOC entry 61068 (class 0 OID 0)
-- Dependencies: 56906
-- Name: CONSTRAINT "Exclusion" ON table_for_constraints; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON CONSTRAINT "Exclusion" ON target.table_for_constraints IS 'comments';
--
-- TOC entry 56910 (class 2606 OID 149176)
-- Name: table_for_foreign_key table_for_foreign_key_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_foreign_key
ADD CONSTRAINT table_for_foreign_key_pkey PRIMARY KEY (col1);
--
-- TOC entry 56897 (class 2606 OID 148993)
-- Name: table_for_index table_for_index_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_index
ADD CONSTRAINT table_for_index_pkey PRIMARY KEY (col1);
--
-- TOC entry 56908 (class 2606 OID 149151)
-- Name: table_for_primary_key table_for_primary_key_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_primary_key
ADD CONSTRAINT table_for_primary_key_pkey PRIMARY KEY (col1);
--
-- TOC entry 56902 (class 2606 OID 149041)
-- Name: table_for_rule table_for_rule_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_rule
ADD CONSTRAINT table_for_rule_pkey PRIMARY KEY (col1);
--
-- TOC entry 56900 (class 2606 OID 149020)
-- Name: table_for_identical table_for_table_for_identical_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_identical
ADD CONSTRAINT table_for_table_for_identical_pkey PRIMARY KEY (col1);
--
-- TOC entry 56904 (class 2606 OID 149073)
-- Name: table_for_trigger table_for_trigger_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_trigger
ADD CONSTRAINT table_for_trigger_pkey PRIMARY KEY (col1);
--
-- TOC entry 56893 (class 1259 OID 148994)
-- Name: index1; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index1 ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56894 (class 1259 OID 148995)
-- Name: index2; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index2 ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56898 (class 1259 OID 149021)
-- Name: index_identical; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index_identical ON target.table_for_identical USING btree (col2 text_pattern_ops);
--
-- TOC entry 56895 (class 1259 OID 149212)
-- Name: index_same; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index_same ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56892 (class 1259 OID 139945)
-- Name: mview_index; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX mview_index ON target."MView" USING btree (text text_pattern_ops);
--
-- TOC entry 61045 (class 2618 OID 149042)
-- Name: table_for_rule rule1; Type: RULE; Schema: target; Owner: postgres
--
CREATE RULE rule1 AS
ON UPDATE TO target.table_for_rule DO INSTEAD NOTHING;
--
-- TOC entry 61069 (class 0 OID 0)
-- Dependencies: 61045
-- Name: RULE rule1 ON table_for_rule; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON RULE rule1 ON target.table_for_rule IS 'comments';
--
-- TOC entry 61046 (class 2618 OID 149043)
-- Name: table_for_rule rule2; Type: RULE; Schema: target; Owner: postgres
--
CREATE RULE rule2 AS
ON UPDATE TO target.table_for_rule DO NOTHING;
--
-- TOC entry 61047 (class 2618 OID 149044)
-- Name: table_for_rule rule3; Type: RULE; Schema: target; Owner: postgres
--
CREATE RULE rule3 AS
ON INSERT TO target.table_for_rule DO NOTHING;
--
-- TOC entry 61050 (class 0 OID 139938)
-- Dependencies: 12250 61062
-- Name: MView; Type: MATERIALIZED VIEW DATA; Schema: target; Owner: postgres
--
REFRESH MATERIALIZED VIEW target."MView";
--
-- TOC entry 12284 (class 1259 OID 347823)
-- Name: test view; Type: VIEW; Schema: target; Owner: postgres
--
CREATE VIEW target."test view" AS
SELECT pg_class.relname,
pg_class.relnamespace,
pg_class.reltype,
pg_class.reloftype,
pg_class.relowner,
pg_class.relam,
pg_class.relfilenode,
pg_class.reltablespace,
pg_class.relpages,
pg_class.reltuples,
pg_class.relallvisible,
pg_class.reltoastrelid,
pg_class.relhasindex,
pg_class.relisshared,
pg_class.relpersistence,
pg_class.relkind,
pg_class.relnatts,
pg_class.relchecks,
pg_class.relhasoids,
pg_class.relhasrules,
pg_class.relhastriggers,
pg_class.relhassubclass,
pg_class.relrowsecurity,
pg_class.relforcerowsecurity,
pg_class.relispopulated,
pg_class.relreplident,
pg_class.relispartition,
pg_class.relfrozenxid,
pg_class.relminmxid,
pg_class.relacl,
pg_class.reloptions,
pg_class.relpartbound
FROM pg_class
LIMIT 10;
ALTER TABLE target."test view" OWNER TO postgres;
--
-- TOC entry 12285 (class 1259 OID 347828)
-- Name: test view f; Type: VIEW; Schema: target; Owner: postgres
--
CREATE VIEW target."test view f" WITH (security_barrier='true') AS
SELECT 2;
ALTER TABLE target."test view f" OWNER TO postgres;
--
-- TOC entry 61105 (class 0 OID 0)
-- Dependencies: 12285
-- Name: VIEW "test view f"; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON VIEW target."test view f" IS 'cmn';

View File

@ -0,0 +1,440 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.7
-- Dumped by pg_dump version 12beta2
-- Started on 2019-11-01 12:54:15 IST
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- TOC entry 17 (class 2615 OID 139770)
-- Name: source; Type: SCHEMA; Schema: -; Owner: postgres
--
CREATE SCHEMA source;
ALTER SCHEMA source OWNER TO postgres;
SET default_tablespace = '';
CREATE EXTENSION btree_gist
SCHEMA source;
--
-- TOC entry 12272 (class 1259 OID 149205)
-- Name: table_for_partition; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_partition (
col1 bigint NOT NULL
)
PARTITION BY RANGE (col1);
ALTER TABLE source.table_for_partition OWNER TO postgres;
--
-- TOC entry 12273 (class 1259 OID 149208)
-- Name: part1; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.part1 (
col1 bigint NOT NULL
);
ALTER TABLE ONLY source.table_for_partition ATTACH PARTITION source.part1 FOR VALUES FROM ('1') TO ('23');
ALTER TABLE source.part1 OWNER TO postgres;
--
-- TOC entry 12274 (class 1259 OID 149213)
-- Name: table_for_partition_1; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_partition_1 (
col1 bigint
)
PARTITION BY RANGE (col1);
ALTER TABLE source.table_for_partition_1 OWNER TO postgres;
--
-- TOC entry 12275 (class 1259 OID 149216)
-- Name: part3; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.part3 (
col1 bigint
);
ALTER TABLE ONLY source.table_for_partition_1 ATTACH PARTITION source.part3 FOR VALUES FROM ('1') TO ('10');
ALTER TABLE source.part3 OWNER TO postgres;
--
-- TOC entry 12276 (class 1259 OID 149219)
-- Name: part4; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.part4 (
col1 bigint
);
ALTER TABLE ONLY source.table_for_partition_1 ATTACH PARTITION source.part4 FOR VALUES FROM ('11') TO ('20');
ALTER TABLE source.part4 OWNER TO postgres;
--
-- TOC entry 12258 (class 1259 OID 148963)
-- Name: table_for_column; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_column (
col1 bigint NOT NULL,
col2 text,
col3 text
);
ALTER TABLE source.table_for_column OWNER TO postgres;
--
-- TOC entry 12256 (class 1259 OID 148895)
-- Name: table_for_constraints; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_constraints (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_constraints OWNER TO postgres;
--
-- TOC entry 61066 (class 0 OID 0)
-- Dependencies: 12256
-- Name: TABLE table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON TABLE source.table_for_constraints IS 'comments';
--
-- TOC entry 12262 (class 1259 OID 149004)
-- Name: table_for_identical; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_identical (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_identical OWNER TO postgres;
--
-- TOC entry 12260 (class 1259 OID 148977)
-- Name: table_for_index; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_index (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_index OWNER TO postgres;
--
-- TOC entry 12269 (class 1259 OID 149128)
-- Name: table_for_primary_key; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_primary_key (
col1 integer NOT NULL,
col2 text NOT NULL
);
ALTER TABLE source.table_for_primary_key OWNER TO postgres;
--
-- TOC entry 12264 (class 1259 OID 149024)
-- Name: table_for_rule; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_rule (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE source.table_for_rule OWNER TO postgres;
--
-- TOC entry 12266 (class 1259 OID 149048)
-- Name: table_for_trigger; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_trigger (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE source.table_for_trigger OWNER TO postgres;
--
-- TOC entry 56893 (class 2606 OID 148904)
-- Name: table_for_constraints Exclusion; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT "Exclusion" EXCLUDE USING gist (col2 WITH <>) WITH (fillfactor='12') WHERE ((col1 > 1)) DEFERRABLE INITIALLY DEFERRED;
--
-- TOC entry 61067 (class 0 OID 0)
-- Dependencies: 56893
-- Name: CONSTRAINT "Exclusion" ON table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON CONSTRAINT "Exclusion" ON source.table_for_constraints IS 'comments';
--
-- TOC entry 56891 (class 2606 OID 148911)
-- Name: table_for_constraints check_con; Type: CHECK CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE source.table_for_constraints
ADD CONSTRAINT check_con CHECK ((col1 > 10)) NOT VALID;
--
-- TOC entry 61068 (class 0 OID 0)
-- Dependencies: 56891
-- Name: CONSTRAINT check_con ON table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON CONSTRAINT check_con ON source.table_for_constraints IS 'coment';
--
-- TOC entry 56899 (class 2606 OID 148970)
-- Name: table_for_column table_for_column_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_column
ADD CONSTRAINT table_for_column_pkey PRIMARY KEY (col1);
--
-- TOC entry 56895 (class 2606 OID 148902)
-- Name: table_for_constraints table_for_constraints_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT table_for_constraints_pkey PRIMARY KEY (col1);
--
-- TOC entry 56904 (class 2606 OID 148984)
-- Name: table_for_index table_for_index_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_index
ADD CONSTRAINT table_for_index_pkey PRIMARY KEY (col1);
--
-- TOC entry 56913 (class 2606 OID 149135)
-- Name: table_for_primary_key table_for_primary_key_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_primary_key
ADD CONSTRAINT table_for_primary_key_pkey PRIMARY KEY (col1, col2);
--
-- TOC entry 56909 (class 2606 OID 149031)
-- Name: table_for_rule table_for_rule_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_rule
ADD CONSTRAINT table_for_rule_pkey PRIMARY KEY (col1);
--
-- TOC entry 56907 (class 2606 OID 149011)
-- Name: table_for_identical table_for_table_for_identical_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_identical
ADD CONSTRAINT table_for_table_for_identical_pkey PRIMARY KEY (col1);
--
-- TOC entry 56911 (class 2606 OID 149055)
-- Name: table_for_trigger table_for_trigger_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_trigger
ADD CONSTRAINT table_for_trigger_pkey PRIMARY KEY (col1);
--
-- TOC entry 56897 (class 2606 OID 148913)
-- Name: table_for_constraints unique; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT "unique" UNIQUE (col1);
--
-- TOC entry 61069 (class 0 OID 0)
-- Dependencies: 56897
-- Name: CONSTRAINT "unique" ON table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON CONSTRAINT "unique" ON source.table_for_constraints IS 'cmnt';
--
-- TOC entry 56900 (class 1259 OID 149023)
-- Name: index1; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index1 ON source.table_for_index USING btree (col2 varchar_pattern_ops);
--
-- TOC entry 56905 (class 1259 OID 149012)
-- Name: index_identical; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index_identical ON source.table_for_identical USING btree (col2 text_pattern_ops);
--
-- TOC entry 56901 (class 1259 OID 149211)
-- Name: index_same; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index_same ON source.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56902 (class 1259 OID 149022)
-- Name: index_source; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index_source ON source.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 61044 (class 2618 OID 149032)
-- Name: table_for_rule rule1; Type: RULE; Schema: source; Owner: postgres
--
CREATE RULE rule1 AS
ON UPDATE TO source.table_for_rule DO INSTEAD NOTHING;
--
-- TOC entry 61070 (class 0 OID 0)
-- Dependencies: 61044
-- Name: RULE rule1 ON table_for_rule; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON RULE rule1 ON source.table_for_rule IS 'comments';
--
-- TOC entry 61045 (class 2618 OID 149033)
-- Name: table_for_rule rule2; Type: RULE; Schema: source; Owner: postgres
--
CREATE RULE rule2 AS
ON INSERT TO source.table_for_rule DO NOTHING;
--
-- TOC entry 12283 (class 1259 OID 347818)
-- Name: test view; Type: VIEW; Schema: source; Owner: postgres
--
CREATE VIEW source."test view" AS
SELECT pg_class.relname,
pg_class.relnamespace,
pg_class.reltype,
pg_class.reloftype,
pg_class.relowner,
pg_class.relam,
pg_class.relfilenode,
pg_class.reltablespace,
pg_class.relpages
FROM pg_class
LIMIT 10;
ALTER TABLE source."test view" OWNER TO postgres;
--
-- TOC entry 12286 (class 1259 OID 347832)
-- Name: test view f; Type: VIEW; Schema: source; Owner: postgres
--
CREATE VIEW source."test view f" WITH (security_barrier='false') AS
SELECT 2;
ALTER TABLE source."test view f" OWNER TO postgres;
--
-- TOC entry 61111 (class 0 OID 0)
-- Dependencies: 12286
-- Name: VIEW "test view f"; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON VIEW source."test view f" IS 'cmn';
--
-- TOC entry 223 (class 1255 OID 67206)
-- Name: dodaj_klijenta(character varying, character varying, character varying, character varying, integer, character varying, character varying, character varying, boolean, boolean, character varying, character varying, character varying, character varying, numeric, character varying); Type: PROCEDURE; Schema: public; Owner: postgres
--
CREATE PROCEDURE source.dodaj_klijenta(v_naziv character varying, v_oib character varying, v_pdv_id character varying, v_adresa character varying, v_mjesto integer, v_drzava character varying, v_tip_p_sub character varying, v_vlasnik character varying, v_pdv boolean, v_fisk boolean, v_iban character varying, v_k_osoba character varying, v_email character varying, v_br_tel character varying, v_radna_god numeric, v_schema character varying)
LANGUAGE sql
AS $$select 1;$$;
ALTER PROCEDURE source.dodaj_klijenta(v_naziv character varying, v_oib character varying, v_pdv_id character varying, v_adresa character varying, v_mjesto integer, v_drzava character varying, v_tip_p_sub character varying, v_vlasnik character varying, v_pdv boolean, v_fisk boolean, v_iban character varying, v_k_osoba character varying, v_email character varying, v_br_tel character varying, v_radna_god numeric, v_schema character varying) OWNER TO postgres;
--
-- TOC entry 220 (class 1255 OID 67205)
-- Name: proc1(bigint); Type: PROCEDURE; Schema: source; Owner: postgres
--
CREATE PROCEDURE source.proc1(arg1 bigint)
LANGUAGE sql
AS $$select 1;$$;
ALTER PROCEDURE source.proc1(arg1 bigint) OWNER TO postgres;

View File

@ -0,0 +1,417 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.7
-- Dumped by pg_dump version 12beta2
-- Started on 2019-11-01 12:55:22 IST
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- TOC entry 18 (class 2615 OID 139771)
-- Name: target; Type: SCHEMA; Schema: -; Owner: postgres
--
CREATE SCHEMA target;
ALTER SCHEMA target OWNER TO postgres;
SET default_tablespace = '';
CREATE EXTENSION btree_gist
SCHEMA target;
--
-- TOC entry 12250 (class 1259 OID 139938)
-- Name: MView; Type: MATERIALIZED VIEW; Schema: target; Owner: postgres
--
CREATE MATERIALIZED VIEW target."MView" AS
SELECT 'tekst'::text AS text
WITH NO DATA;
ALTER TABLE target."MView" OWNER TO postgres;
--
-- TOC entry 12277 (class 1259 OID 149234)
-- Name: table_for_partition_1; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_partition_1 (
col1 bigint
)
PARTITION BY RANGE (col1);
ALTER TABLE target.table_for_partition_1 OWNER TO postgres;
--
-- TOC entry 12278 (class 1259 OID 149237)
-- Name: part3; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.part3 (
col1 bigint
);
ALTER TABLE ONLY target.table_for_partition_1 ATTACH PARTITION target.part3 FOR VALUES FROM ('13') TO ('56');
ALTER TABLE target.part3 OWNER TO postgres;
--
-- TOC entry 12259 (class 1259 OID 148971)
-- Name: table_for_column; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_column (
col1 bigint,
col2 bigint,
col4 text
);
ALTER TABLE target.table_for_column OWNER TO postgres;
--
-- TOC entry 12268 (class 1259 OID 149089)
-- Name: table_for_constraints; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_constraints (
col1 integer NOT NULL,
col2 text,
CONSTRAINT check_con CHECK ((col1 > 30))
);
ALTER TABLE target.table_for_constraints OWNER TO postgres;
--
-- TOC entry 61066 (class 0 OID 0)
-- Dependencies: 12268
-- Name: TABLE table_for_constraints; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON TABLE target.table_for_constraints IS 'comments';
--
-- TOC entry 61067 (class 0 OID 0)
-- Dependencies: 12268
-- Name: CONSTRAINT check_con ON table_for_constraints; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON CONSTRAINT check_con ON target.table_for_constraints IS 'coment';
--
-- TOC entry 12257 (class 1259 OID 148960)
-- Name: table_for_del; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_del (
);
ALTER TABLE target.table_for_del OWNER TO postgres;
--
-- TOC entry 12271 (class 1259 OID 149172)
-- Name: table_for_foreign_key; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_foreign_key (
col1 integer NOT NULL,
col2 "char",
col3 bigint
);
ALTER TABLE target.table_for_foreign_key OWNER TO postgres;
--
-- TOC entry 12263 (class 1259 OID 149013)
-- Name: table_for_identical; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_identical (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE target.table_for_identical OWNER TO postgres;
--
-- TOC entry 12261 (class 1259 OID 148986)
-- Name: table_for_index; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_index (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE target.table_for_index OWNER TO postgres;
--
-- TOC entry 12270 (class 1259 OID 149144)
-- Name: table_for_primary_key; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_primary_key (
col1 integer NOT NULL,
col2 text NOT NULL
);
ALTER TABLE target.table_for_primary_key OWNER TO postgres;
--
-- TOC entry 12265 (class 1259 OID 149034)
-- Name: table_for_rule; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_rule (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE target.table_for_rule OWNER TO postgres;
--
-- TOC entry 12267 (class 1259 OID 149066)
-- Name: table_for_trigger; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_trigger (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE target.table_for_trigger OWNER TO postgres;
--
-- TOC entry 56906 (class 2606 OID 149097)
-- Name: table_for_constraints Exclusion; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_constraints
ADD CONSTRAINT "Exclusion" EXCLUDE USING gist (col2 WITH <>) WITH (fillfactor='15') WHERE ((col1 > 1)) DEFERRABLE INITIALLY DEFERRED;
--
-- TOC entry 61068 (class 0 OID 0)
-- Dependencies: 56906
-- Name: CONSTRAINT "Exclusion" ON table_for_constraints; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON CONSTRAINT "Exclusion" ON target.table_for_constraints IS 'comments';
--
-- TOC entry 56910 (class 2606 OID 149176)
-- Name: table_for_foreign_key table_for_foreign_key_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_foreign_key
ADD CONSTRAINT table_for_foreign_key_pkey PRIMARY KEY (col1);
--
-- TOC entry 56897 (class 2606 OID 148993)
-- Name: table_for_index table_for_index_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_index
ADD CONSTRAINT table_for_index_pkey PRIMARY KEY (col1);
--
-- TOC entry 56908 (class 2606 OID 149151)
-- Name: table_for_primary_key table_for_primary_key_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_primary_key
ADD CONSTRAINT table_for_primary_key_pkey PRIMARY KEY (col1);
--
-- TOC entry 56902 (class 2606 OID 149041)
-- Name: table_for_rule table_for_rule_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_rule
ADD CONSTRAINT table_for_rule_pkey PRIMARY KEY (col1);
--
-- TOC entry 56900 (class 2606 OID 149020)
-- Name: table_for_identical table_for_table_for_identical_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_identical
ADD CONSTRAINT table_for_table_for_identical_pkey PRIMARY KEY (col1);
--
-- TOC entry 56904 (class 2606 OID 149073)
-- Name: table_for_trigger table_for_trigger_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_trigger
ADD CONSTRAINT table_for_trigger_pkey PRIMARY KEY (col1);
--
-- TOC entry 56893 (class 1259 OID 148994)
-- Name: index1; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index1 ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56894 (class 1259 OID 148995)
-- Name: index2; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index2 ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56898 (class 1259 OID 149021)
-- Name: index_identical; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index_identical ON target.table_for_identical USING btree (col2 text_pattern_ops);
--
-- TOC entry 56895 (class 1259 OID 149212)
-- Name: index_same; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index_same ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56892 (class 1259 OID 139945)
-- Name: mview_index; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX mview_index ON target."MView" USING btree (text text_pattern_ops);
--
-- TOC entry 61045 (class 2618 OID 149042)
-- Name: table_for_rule rule1; Type: RULE; Schema: target; Owner: postgres
--
CREATE RULE rule1 AS
ON UPDATE TO target.table_for_rule DO INSTEAD NOTHING;
--
-- TOC entry 61069 (class 0 OID 0)
-- Dependencies: 61045
-- Name: RULE rule1 ON table_for_rule; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON RULE rule1 ON target.table_for_rule IS 'comments';
--
-- TOC entry 61046 (class 2618 OID 149043)
-- Name: table_for_rule rule2; Type: RULE; Schema: target; Owner: postgres
--
CREATE RULE rule2 AS
ON UPDATE TO target.table_for_rule DO NOTHING;
--
-- TOC entry 61047 (class 2618 OID 149044)
-- Name: table_for_rule rule3; Type: RULE; Schema: target; Owner: postgres
--
CREATE RULE rule3 AS
ON INSERT TO target.table_for_rule DO NOTHING;
--
-- TOC entry 61050 (class 0 OID 139938)
-- Dependencies: 12250 61062
-- Name: MView; Type: MATERIALIZED VIEW DATA; Schema: target; Owner: postgres
--
REFRESH MATERIALIZED VIEW target."MView";
--
-- TOC entry 12284 (class 1259 OID 347823)
-- Name: test view; Type: VIEW; Schema: target; Owner: postgres
--
CREATE VIEW target."test view" AS
SELECT pg_class.relname,
pg_class.relnamespace,
pg_class.reltype,
pg_class.reloftype,
pg_class.relowner,
pg_class.relam,
pg_class.relfilenode,
pg_class.reltablespace,
pg_class.relpages
FROM pg_class
LIMIT 10;
ALTER TABLE target."test view" OWNER TO postgres;
--
-- TOC entry 12285 (class 1259 OID 347828)
-- Name: test view f; Type: VIEW; Schema: target; Owner: postgres
--
CREATE VIEW target."test view f" WITH (security_barrier='true') AS
SELECT 2;
ALTER TABLE target."test view f" OWNER TO postgres;
--
-- TOC entry 61105 (class 0 OID 0)
-- Dependencies: 12285
-- Name: VIEW "test view f"; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON VIEW target."test view f" IS 'cmn';
--
-- TOC entry 437 (class 1255 OID 112907)
-- Name: dodaj_klijenta(character varying, character varying, character varying, character varying, integer, character varying, character varying, character varying, boolean, boolean, character varying, character varying, character varying, character varying, numeric, character varying); Type: PROCEDURE; Schema: target schema; Owner: postgres
--
CREATE PROCEDURE target.dodaj_klijenta(v_naziv character varying, v_oib character varying, v_pdv_id character varying, v_adresa character varying, v_mjesto integer, v_drzava character varying, v_tip_p_sub character varying, v_vlasnik character varying, v_pdv boolean, v_fisk boolean, v_iban character varying, v_k_osoba character varying, v_email character varying, v_br_tel character varying, v_radna_god numeric, v_schema character varying)
LANGUAGE sql
AS $$select 4;$$;
ALTER PROCEDURE target.dodaj_klijenta(v_naziv character varying, v_oib character varying, v_pdv_id character varying, v_adresa character varying, v_mjesto integer, v_drzava character varying, v_tip_p_sub character varying, v_vlasnik character varying, v_pdv boolean, v_fisk boolean, v_iban character varying, v_k_osoba character varying, v_email character varying, v_br_tel character varying, v_radna_god numeric, v_schema character varying) OWNER TO postgres;

View File

@ -0,0 +1,311 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.7
-- Dumped by pg_dump version 12beta2
-- Started on 2019-11-01 12:54:15 IST
SET statement_timeout = 0;
SET lock_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
--
-- TOC entry 17 (class 2615 OID 139770)
-- Name: source; Type: SCHEMA; Schema: -; Owner: postgres
--
CREATE SCHEMA source;
ALTER SCHEMA source OWNER TO postgres;
SET default_tablespace = '';
CREATE EXTENSION btree_gist
SCHEMA source;
--
-- TOC entry 12258 (class 1259 OID 148963)
-- Name: table_for_column; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_column (
col1 bigint NOT NULL,
col2 text,
col3 text
);
ALTER TABLE source.table_for_column OWNER TO postgres;
--
-- TOC entry 12256 (class 1259 OID 148895)
-- Name: table_for_constraints; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_constraints (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_constraints OWNER TO postgres;
--
-- TOC entry 61066 (class 0 OID 0)
-- Dependencies: 12256
-- Name: TABLE table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON TABLE source.table_for_constraints IS 'comments';
--
-- TOC entry 12262 (class 1259 OID 149004)
-- Name: table_for_identical; Type: TABLE; Schema: source; Owner: postgres;
--
CREATE TABLE source.table_for_identical (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_identical OWNER TO postgres;;
--
-- TOC entry 12260 (class 1259 OID 148977)
-- Name: table_for_index; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_index (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_index OWNER TO postgres;
--
-- TOC entry 12269 (class 1259 OID 149128)
-- Name: table_for_primary_key; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_primary_key (
col1 integer NOT NULL,
col2 text NOT NULL
);
ALTER TABLE source.table_for_primary_key OWNER TO postgres;
--
-- TOC entry 12264 (class 1259 OID 149024)
-- Name: table_for_rule; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_rule (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE source.table_for_rule OWNER TO postgres;
--
-- TOC entry 12266 (class 1259 OID 149048)
-- Name: table_for_trigger; Type: TABLE; Schema: source; Owner: postgres
--
CREATE TABLE source.table_for_trigger (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE source.table_for_trigger OWNER TO postgres;
--
-- TOC entry 56893 (class 2606 OID 148904)
-- Name: table_for_constraints Exclusion; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT "Exclusion" EXCLUDE USING gist (col2 WITH <>) WITH (fillfactor='12') WHERE ((col1 > 1)) DEFERRABLE INITIALLY DEFERRED;
--
-- TOC entry 61067 (class 0 OID 0)
-- Dependencies: 56893
-- Name: CONSTRAINT "Exclusion" ON table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON CONSTRAINT "Exclusion" ON source.table_for_constraints IS 'comments';
--
-- TOC entry 56891 (class 2606 OID 148911)
-- Name: table_for_constraints check_con; Type: CHECK CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE source.table_for_constraints
ADD CONSTRAINT check_con CHECK ((col1 > 10)) NOT VALID;
--
-- TOC entry 61068 (class 0 OID 0)
-- Dependencies: 56891
-- Name: CONSTRAINT check_con ON table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON CONSTRAINT check_con ON source.table_for_constraints IS 'coment';
--
-- TOC entry 56899 (class 2606 OID 148970)
-- Name: table_for_column table_for_column_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_column
ADD CONSTRAINT table_for_column_pkey PRIMARY KEY (col1);
--
-- TOC entry 56895 (class 2606 OID 148902)
-- Name: table_for_constraints table_for_constraints_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT table_for_constraints_pkey PRIMARY KEY (col1);
--
-- TOC entry 56904 (class 2606 OID 148984)
-- Name: table_for_index table_for_index_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_index
ADD CONSTRAINT table_for_index_pkey PRIMARY KEY (col1);
--
-- TOC entry 56913 (class 2606 OID 149135)
-- Name: table_for_primary_key table_for_primary_key_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_primary_key
ADD CONSTRAINT table_for_primary_key_pkey PRIMARY KEY (col1, col2);
--
-- TOC entry 56909 (class 2606 OID 149031)
-- Name: table_for_rule table_for_rule_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_rule
ADD CONSTRAINT table_for_rule_pkey PRIMARY KEY (col1);
--
-- TOC entry 56907 (class 2606 OID 149011)
-- Name: table_for_identical table_for_table_for_identical_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres;
--
ALTER TABLE ONLY source.table_for_identical
ADD CONSTRAINT table_for_table_for_identical_pkey PRIMARY KEY (col1);
--
-- TOC entry 56911 (class 2606 OID 149055)
-- Name: table_for_trigger table_for_trigger_pkey; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_trigger
ADD CONSTRAINT table_for_trigger_pkey PRIMARY KEY (col1);
--
-- TOC entry 56897 (class 2606 OID 148913)
-- Name: table_for_constraints unique; Type: CONSTRAINT; Schema: source; Owner: postgres
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT "unique" UNIQUE (col1);
--
-- TOC entry 61069 (class 0 OID 0)
-- Dependencies: 56897
-- Name: CONSTRAINT "unique" ON table_for_constraints; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON CONSTRAINT "unique" ON source.table_for_constraints IS 'cmnt';
--
-- TOC entry 56900 (class 1259 OID 149023)
-- Name: index1; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index1 ON source.table_for_index USING btree (col2 varchar_pattern_ops);
--
-- TOC entry 56905 (class 1259 OID 149012)
-- Name: index_identical; Type: INDEX; Schema: source; Owner: postgres;
--
CREATE INDEX index_identical ON source.table_for_identical USING btree (col2 text_pattern_ops);
--
-- TOC entry 56901 (class 1259 OID 149211)
-- Name: index_same; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index_same ON source.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56902 (class 1259 OID 149022)
-- Name: index_source; Type: INDEX; Schema: source; Owner: postgres
--
CREATE INDEX index_source ON source.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 61044 (class 2618 OID 149032)
-- Name: table_for_rule rule1; Type: RULE; Schema: source; Owner: postgres
--
CREATE RULE rule1 AS
ON UPDATE TO source.table_for_rule DO INSTEAD NOTHING;
--
-- TOC entry 61070 (class 0 OID 0)
-- Dependencies: 61044
-- Name: RULE rule1 ON table_for_rule; Type: COMMENT; Schema: source; Owner: postgres
--
COMMENT ON RULE rule1 ON source.table_for_rule IS 'comments';
--
-- TOC entry 61045 (class 2618 OID 149033)
-- Name: table_for_rule rule2; Type: RULE; Schema: source; Owner: postgres
--
CREATE RULE rule2 AS
ON INSERT TO source.table_for_rule DO NOTHING;

View File

@ -0,0 +1,337 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.7
-- Dumped by pg_dump version 12beta2
-- Started on 2019-11-01 12:55:22 IST
SET statement_timeout = 0;
SET lock_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
--
-- TOC entry 18 (class 2615 OID 139771)
-- Name: target; Type: SCHEMA; Schema: -; Owner: postgres
--
CREATE SCHEMA target;
ALTER SCHEMA target OWNER TO postgres;
SET default_tablespace = '';
CREATE EXTENSION btree_gist
SCHEMA target;
--
-- TOC entry 12250 (class 1259 OID 139938)
-- Name: MView; Type: MATERIALIZED VIEW; Schema: target; Owner: postgres
--
CREATE MATERIALIZED VIEW target."MView" AS
SELECT 'tekst'::text AS text
WITH NO DATA;
ALTER TABLE target."MView" OWNER TO postgres;
--
-- TOC entry 12259 (class 1259 OID 148971)
-- Name: table_for_column; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_column (
col1 bigint,
col2 bigint,
col4 text
);
ALTER TABLE target.table_for_column OWNER TO postgres;
--
-- TOC entry 12268 (class 1259 OID 149089)
-- Name: table_for_constraints; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_constraints (
col1 integer NOT NULL,
col2 text,
CONSTRAINT check_con CHECK ((col1 > 30))
);
ALTER TABLE target.table_for_constraints OWNER TO postgres;
--
-- TOC entry 61066 (class 0 OID 0)
-- Dependencies: 12268
-- Name: TABLE table_for_constraints; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON TABLE target.table_for_constraints IS 'comments';
--
-- TOC entry 61067 (class 0 OID 0)
-- Dependencies: 12268
-- Name: CONSTRAINT check_con ON table_for_constraints; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON CONSTRAINT check_con ON target.table_for_constraints IS 'coment';
--
-- TOC entry 12257 (class 1259 OID 148960)
-- Name: table_for_del; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_del (
);
ALTER TABLE target.table_for_del OWNER TO postgres;
--
-- TOC entry 12271 (class 1259 OID 149172)
-- Name: table_for_foreign_key; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_foreign_key (
col1 integer NOT NULL,
col2 "char",
col3 bigint
);
ALTER TABLE target.table_for_foreign_key OWNER TO postgres;
--
-- TOC entry 12263 (class 1259 OID 149013)
-- Name: table_for_identical; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_identical (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE target.table_for_identical OWNER TO postgres;
--
-- TOC entry 12261 (class 1259 OID 148986)
-- Name: table_for_index; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_index (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE target.table_for_index OWNER TO postgres;
--
-- TOC entry 12270 (class 1259 OID 149144)
-- Name: table_for_primary_key; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_primary_key (
col1 integer NOT NULL,
col2 text NOT NULL
);
ALTER TABLE target.table_for_primary_key OWNER TO postgres;
--
-- TOC entry 12265 (class 1259 OID 149034)
-- Name: table_for_rule; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_rule (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE target.table_for_rule OWNER TO postgres;
--
-- TOC entry 12267 (class 1259 OID 149066)
-- Name: table_for_trigger; Type: TABLE; Schema: target; Owner: postgres
--
CREATE TABLE target.table_for_trigger (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE target.table_for_trigger OWNER TO postgres;
--
-- TOC entry 56906 (class 2606 OID 149097)
-- Name: table_for_constraints Exclusion; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_constraints
ADD CONSTRAINT "Exclusion" EXCLUDE USING gist (col2 WITH <>) WITH (fillfactor='15') WHERE ((col1 > 1)) DEFERRABLE INITIALLY DEFERRED;
--
-- TOC entry 61068 (class 0 OID 0)
-- Dependencies: 56906
-- Name: CONSTRAINT "Exclusion" ON table_for_constraints; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON CONSTRAINT "Exclusion" ON target.table_for_constraints IS 'comments';
--
-- TOC entry 56910 (class 2606 OID 149176)
-- Name: table_for_foreign_key table_for_foreign_key_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_foreign_key
ADD CONSTRAINT table_for_foreign_key_pkey PRIMARY KEY (col1);
--
-- TOC entry 56897 (class 2606 OID 148993)
-- Name: table_for_index table_for_index_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_index
ADD CONSTRAINT table_for_index_pkey PRIMARY KEY (col1);
--
-- TOC entry 56908 (class 2606 OID 149151)
-- Name: table_for_primary_key table_for_primary_key_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_primary_key
ADD CONSTRAINT table_for_primary_key_pkey PRIMARY KEY (col1);
--
-- TOC entry 56902 (class 2606 OID 149041)
-- Name: table_for_rule table_for_rule_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_rule
ADD CONSTRAINT table_for_rule_pkey PRIMARY KEY (col1);
--
-- TOC entry 56900 (class 2606 OID 149020)
-- Name: table_for_identical table_for_table_for_identical_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_identical
ADD CONSTRAINT table_for_table_for_identical_pkey PRIMARY KEY (col1);
--
-- TOC entry 56904 (class 2606 OID 149073)
-- Name: table_for_trigger table_for_trigger_pkey; Type: CONSTRAINT; Schema: target; Owner: postgres
--
ALTER TABLE ONLY target.table_for_trigger
ADD CONSTRAINT table_for_trigger_pkey PRIMARY KEY (col1);
--
-- TOC entry 56893 (class 1259 OID 148994)
-- Name: index1; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index1 ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56894 (class 1259 OID 148995)
-- Name: index2; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index2 ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56898 (class 1259 OID 149021)
-- Name: index_identical; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index_identical ON target.table_for_identical USING btree (col2 text_pattern_ops);
--
-- TOC entry 56895 (class 1259 OID 149212)
-- Name: index_same; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX index_same ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56892 (class 1259 OID 139945)
-- Name: mview_index; Type: INDEX; Schema: target; Owner: postgres
--
CREATE INDEX mview_index ON target."MView" USING btree (text text_pattern_ops);
--
-- TOC entry 61045 (class 2618 OID 149042)
-- Name: table_for_rule rule1; Type: RULE; Schema: target; Owner: postgres
--
CREATE RULE rule1 AS
ON UPDATE TO target.table_for_rule DO INSTEAD NOTHING;
--
-- TOC entry 61069 (class 0 OID 0)
-- Dependencies: 61045
-- Name: RULE rule1 ON table_for_rule; Type: COMMENT; Schema: target; Owner: postgres
--
COMMENT ON RULE rule1 ON target.table_for_rule IS 'comments';
--
-- TOC entry 61046 (class 2618 OID 149043)
-- Name: table_for_rule rule2; Type: RULE; Schema: target; Owner: postgres
--
CREATE RULE rule2 AS
ON UPDATE TO target.table_for_rule DO NOTHING;
--
-- TOC entry 61047 (class 2618 OID 149044)
-- Name: table_for_rule rule3; Type: RULE; Schema: target; Owner: postgres
--
CREATE RULE rule3 AS
ON INSERT TO target.table_for_rule DO NOTHING;
--
-- TOC entry 61050 (class 0 OID 139938)
-- Dependencies: 12250 61062
-- Name: MView; Type: MATERIALIZED VIEW DATA; Schema: target; Owner: postgres
--
REFRESH MATERIALIZED VIEW target."MView";

View File

@ -0,0 +1,376 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.7
-- Dumped by pg_dump version 12beta2
-- Started on 2019-11-01 12:54:15 IST
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- TOC entry 17 (class 2615 OID 139770)
-- Name: source; Type: SCHEMA; Schema: -; Owner: enterprisedb
--
CREATE SCHEMA source;
ALTER SCHEMA source OWNER TO enterprisedb;
SET default_tablespace = '';
CREATE EXTENSION btree_gist
SCHEMA source;
--
-- TOC entry 12272 (class 1259 OID 149205)
-- Name: table_for_partition; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_partition (
col1 bigint NOT NULL
)
PARTITION BY RANGE (col1);
ALTER TABLE source.table_for_partition OWNER TO enterprisedb;
--
-- TOC entry 12273 (class 1259 OID 149208)
-- Name: part1; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.part1 (
col1 bigint NOT NULL
);
ALTER TABLE ONLY source.table_for_partition ATTACH PARTITION source.part1 FOR VALUES FROM ('1') TO ('23');
ALTER TABLE source.part1 OWNER TO enterprisedb;
--
-- TOC entry 12274 (class 1259 OID 149213)
-- Name: table_for_partition_1; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_partition_1 (
col1 bigint
)
PARTITION BY RANGE (col1);
ALTER TABLE source.table_for_partition_1 OWNER TO enterprisedb;
--
-- TOC entry 12275 (class 1259 OID 149216)
-- Name: part3; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.part3 (
col1 bigint
);
ALTER TABLE ONLY source.table_for_partition_1 ATTACH PARTITION source.part3 FOR VALUES FROM ('1') TO ('10');
ALTER TABLE source.part3 OWNER TO enterprisedb;
--
-- TOC entry 12276 (class 1259 OID 149219)
-- Name: part4; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.part4 (
col1 bigint
);
ALTER TABLE ONLY source.table_for_partition_1 ATTACH PARTITION source.part4 FOR VALUES FROM ('11') TO ('20');
ALTER TABLE source.part4 OWNER TO enterprisedb;
--
-- TOC entry 12258 (class 1259 OID 148963)
-- Name: table_for_column; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_column (
col1 bigint NOT NULL,
col2 text,
col3 text
);
ALTER TABLE source.table_for_column OWNER TO enterprisedb;
--
-- TOC entry 12256 (class 1259 OID 148895)
-- Name: table_for_constraints; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_constraints (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_constraints OWNER TO enterprisedb;
--
-- TOC entry 61066 (class 0 OID 0)
-- Dependencies: 12256
-- Name: TABLE table_for_constraints; Type: COMMENT; Schema: source; Owner: enterprisedb
--
COMMENT ON TABLE source.table_for_constraints IS 'comments';
--
-- TOC entry 12262 (class 1259 OID 149004)
-- Name: table_for_identical; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_identical (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_identical OWNER TO enterprisedb;
--
-- TOC entry 12260 (class 1259 OID 148977)
-- Name: table_for_index; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_index (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_index OWNER TO enterprisedb;
--
-- TOC entry 12269 (class 1259 OID 149128)
-- Name: table_for_primary_key; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_primary_key (
col1 integer NOT NULL,
col2 text NOT NULL
);
ALTER TABLE source.table_for_primary_key OWNER TO enterprisedb;
--
-- TOC entry 12264 (class 1259 OID 149024)
-- Name: table_for_rule; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_rule (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE source.table_for_rule OWNER TO enterprisedb;
--
-- TOC entry 12266 (class 1259 OID 149048)
-- Name: table_for_trigger; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_trigger (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE source.table_for_trigger OWNER TO enterprisedb;
--
-- TOC entry 56893 (class 2606 OID 148904)
-- Name: table_for_constraints Exclusion; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT "Exclusion" EXCLUDE USING gist (col2 WITH <>) WITH (fillfactor='12') WHERE ((col1 > 1)) DEFERRABLE INITIALLY DEFERRED;
--
-- TOC entry 61067 (class 0 OID 0)
-- Dependencies: 56893
-- Name: CONSTRAINT "Exclusion" ON table_for_constraints; Type: COMMENT; Schema: source; Owner: enterprisedb
--
COMMENT ON CONSTRAINT "Exclusion" ON source.table_for_constraints IS 'comments';
--
-- TOC entry 56891 (class 2606 OID 148911)
-- Name: table_for_constraints check_con; Type: CHECK CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE source.table_for_constraints
ADD CONSTRAINT check_con CHECK ((col1 > 10)) NOT VALID;
--
-- TOC entry 61068 (class 0 OID 0)
-- Dependencies: 56891
-- Name: CONSTRAINT check_con ON table_for_constraints; Type: COMMENT; Schema: source; Owner: enterprisedb
--
COMMENT ON CONSTRAINT check_con ON source.table_for_constraints IS 'coment';
--
-- TOC entry 56899 (class 2606 OID 148970)
-- Name: table_for_column table_for_column_pkey; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_column
ADD CONSTRAINT table_for_column_pkey PRIMARY KEY (col1);
--
-- TOC entry 56895 (class 2606 OID 148902)
-- Name: table_for_constraints table_for_constraints_pkey; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT table_for_constraints_pkey PRIMARY KEY (col1);
--
-- TOC entry 56904 (class 2606 OID 148984)
-- Name: table_for_index table_for_index_pkey; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_index
ADD CONSTRAINT table_for_index_pkey PRIMARY KEY (col1);
--
-- TOC entry 56913 (class 2606 OID 149135)
-- Name: table_for_primary_key table_for_primary_key_pkey; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_primary_key
ADD CONSTRAINT table_for_primary_key_pkey PRIMARY KEY (col1, col2);
--
-- TOC entry 56909 (class 2606 OID 149031)
-- Name: table_for_rule table_for_rule_pkey; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_rule
ADD CONSTRAINT table_for_rule_pkey PRIMARY KEY (col1);
--
-- TOC entry 56907 (class 2606 OID 149011)
-- Name: table_for_identical table_for_table_for_identical_pkey; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_identical
ADD CONSTRAINT table_for_table_for_identical_pkey PRIMARY KEY (col1);
--
-- TOC entry 56911 (class 2606 OID 149055)
-- Name: table_for_trigger table_for_trigger_pkey; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_trigger
ADD CONSTRAINT table_for_trigger_pkey PRIMARY KEY (col1);
--
-- TOC entry 56897 (class 2606 OID 148913)
-- Name: table_for_constraints unique; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT "unique" UNIQUE (col1);
--
-- TOC entry 61069 (class 0 OID 0)
-- Dependencies: 56897
-- Name: CONSTRAINT "unique" ON table_for_constraints; Type: COMMENT; Schema: source; Owner: enterprisedb
--
COMMENT ON CONSTRAINT "unique" ON source.table_for_constraints IS 'cmnt';
--
-- TOC entry 56900 (class 1259 OID 149023)
-- Name: index1; Type: INDEX; Schema: source; Owner: enterprisedb
--
CREATE INDEX index1 ON source.table_for_index USING btree (col2 varchar_pattern_ops);
--
-- TOC entry 56905 (class 1259 OID 149012)
-- Name: index_identical; Type: INDEX; Schema: source; Owner: enterprisedb
--
CREATE INDEX index_identical ON source.table_for_identical USING btree (col2 text_pattern_ops);
--
-- TOC entry 56901 (class 1259 OID 149211)
-- Name: index_same; Type: INDEX; Schema: source; Owner: enterprisedb
--
CREATE INDEX index_same ON source.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56902 (class 1259 OID 149022)
-- Name: index_source; Type: INDEX; Schema: source; Owner: enterprisedb
--
CREATE INDEX index_source ON source.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 61044 (class 2618 OID 149032)
-- Name: table_for_rule rule1; Type: RULE; Schema: source; Owner: enterprisedb
--
CREATE RULE rule1 AS
ON UPDATE TO source.table_for_rule DO INSTEAD NOTHING;
--
-- TOC entry 61070 (class 0 OID 0)
-- Dependencies: 61044
-- Name: RULE rule1 ON table_for_rule; Type: COMMENT; Schema: source; Owner: enterprisedb
--
COMMENT ON RULE rule1 ON source.table_for_rule IS 'comments';
--
-- TOC entry 61045 (class 2618 OID 149033)
-- Name: table_for_rule rule2; Type: RULE; Schema: source; Owner: enterprisedb
--
CREATE RULE rule2 AS
ON INSERT TO source.table_for_rule DO NOTHING;

View File

@ -0,0 +1,364 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.7
-- Dumped by pg_dump version 12beta2
-- Started on 2019-11-01 12:55:22 IST
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- TOC entry 18 (class 2615 OID 139771)
-- Name: target; Type: SCHEMA; Schema: -; Owner: enterprisedb
--
CREATE SCHEMA target;
ALTER SCHEMA target OWNER TO enterprisedb;
SET default_tablespace = '';
CREATE EXTENSION btree_gist
SCHEMA target;
--
-- TOC entry 12250 (class 1259 OID 139938)
-- Name: MView; Type: MATERIALIZED VIEW; Schema: target; Owner: enterprisedb
--
CREATE MATERIALIZED VIEW target."MView" AS
SELECT 'tekst'::text AS text
WITH NO DATA;
ALTER TABLE target."MView" OWNER TO enterprisedb;
--
-- TOC entry 12277 (class 1259 OID 149234)
-- Name: table_for_partition_1; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_partition_1 (
col1 bigint
)
PARTITION BY RANGE (col1);
ALTER TABLE target.table_for_partition_1 OWNER TO enterprisedb;
--
-- TOC entry 12278 (class 1259 OID 149237)
-- Name: part3; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.part3 (
col1 bigint
);
ALTER TABLE ONLY target.table_for_partition_1 ATTACH PARTITION target.part3 FOR VALUES FROM ('13') TO ('56');
ALTER TABLE target.part3 OWNER TO enterprisedb;
--
-- TOC entry 12259 (class 1259 OID 148971)
-- Name: table_for_column; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_column (
col1 bigint,
col2 bigint,
col4 text
);
ALTER TABLE target.table_for_column OWNER TO enterprisedb;
--
-- TOC entry 12268 (class 1259 OID 149089)
-- Name: table_for_constraints; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_constraints (
col1 integer NOT NULL,
col2 text,
CONSTRAINT check_con CHECK ((col1 > 30))
);
ALTER TABLE target.table_for_constraints OWNER TO enterprisedb;
--
-- TOC entry 61066 (class 0 OID 0)
-- Dependencies: 12268
-- Name: TABLE table_for_constraints; Type: COMMENT; Schema: target; Owner: enterprisedb
--
COMMENT ON TABLE target.table_for_constraints IS 'comments';
--
-- TOC entry 61067 (class 0 OID 0)
-- Dependencies: 12268
-- Name: CONSTRAINT check_con ON table_for_constraints; Type: COMMENT; Schema: target; Owner: enterprisedb
--
COMMENT ON CONSTRAINT check_con ON target.table_for_constraints IS 'coment';
--
-- TOC entry 12257 (class 1259 OID 148960)
-- Name: table_for_del; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_del (
);
ALTER TABLE target.table_for_del OWNER TO enterprisedb;
--
-- TOC entry 12271 (class 1259 OID 149172)
-- Name: table_for_foreign_key; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_foreign_key (
col1 integer NOT NULL,
col2 "char",
col3 bigint
);
ALTER TABLE target.table_for_foreign_key OWNER TO enterprisedb;
--
-- TOC entry 12263 (class 1259 OID 149013)
-- Name: table_for_identical; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_identical (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE target.table_for_identical OWNER TO enterprisedb;
--
-- TOC entry 12261 (class 1259 OID 148986)
-- Name: table_for_index; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_index (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE target.table_for_index OWNER TO enterprisedb;
--
-- TOC entry 12270 (class 1259 OID 149144)
-- Name: table_for_primary_key; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_primary_key (
col1 integer NOT NULL,
col2 text NOT NULL
);
ALTER TABLE target.table_for_primary_key OWNER TO enterprisedb;
--
-- TOC entry 12265 (class 1259 OID 149034)
-- Name: table_for_rule; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_rule (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE target.table_for_rule OWNER TO enterprisedb;
--
-- TOC entry 12267 (class 1259 OID 149066)
-- Name: table_for_trigger; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_trigger (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE target.table_for_trigger OWNER TO enterprisedb;
--
-- TOC entry 56906 (class 2606 OID 149097)
-- Name: table_for_constraints Exclusion; Type: CONSTRAINT; Schema: target; Owner: enterprisedb
--
ALTER TABLE ONLY target.table_for_constraints
ADD CONSTRAINT "Exclusion" EXCLUDE USING gist (col2 WITH <>) WITH (fillfactor='15') WHERE ((col1 > 1)) DEFERRABLE INITIALLY DEFERRED;
--
-- TOC entry 61068 (class 0 OID 0)
-- Dependencies: 56906
-- Name: CONSTRAINT "Exclusion" ON table_for_constraints; Type: COMMENT; Schema: target; Owner: enterprisedb
--
COMMENT ON CONSTRAINT "Exclusion" ON target.table_for_constraints IS 'comments';
--
-- TOC entry 56910 (class 2606 OID 149176)
-- Name: table_for_foreign_key table_for_foreign_key_pkey; Type: CONSTRAINT; Schema: target; Owner: enterprisedb
--
ALTER TABLE ONLY target.table_for_foreign_key
ADD CONSTRAINT table_for_foreign_key_pkey PRIMARY KEY (col1);
--
-- TOC entry 56897 (class 2606 OID 148993)
-- Name: table_for_index table_for_index_pkey; Type: CONSTRAINT; Schema: target; Owner: enterprisedb
--
ALTER TABLE ONLY target.table_for_index
ADD CONSTRAINT table_for_index_pkey PRIMARY KEY (col1);
--
-- TOC entry 56908 (class 2606 OID 149151)
-- Name: table_for_primary_key table_for_primary_key_pkey; Type: CONSTRAINT; Schema: target; Owner: enterprisedb
--
ALTER TABLE ONLY target.table_for_primary_key
ADD CONSTRAINT table_for_primary_key_pkey PRIMARY KEY (col1);
--
-- TOC entry 56902 (class 2606 OID 149041)
-- Name: table_for_rule table_for_rule_pkey; Type: CONSTRAINT; Schema: target; Owner: enterprisedb
--
ALTER TABLE ONLY target.table_for_rule
ADD CONSTRAINT table_for_rule_pkey PRIMARY KEY (col1);
--
-- TOC entry 56900 (class 2606 OID 149020)
-- Name: table_for_identical table_for_table_for_identical_pkey; Type: CONSTRAINT; Schema: target; Owner: enterprisedb
--
ALTER TABLE ONLY target.table_for_identical
ADD CONSTRAINT table_for_table_for_identical_pkey PRIMARY KEY (col1);
--
-- TOC entry 56904 (class 2606 OID 149073)
-- Name: table_for_trigger table_for_trigger_pkey; Type: CONSTRAINT; Schema: target; Owner: enterprisedb
--
ALTER TABLE ONLY target.table_for_trigger
ADD CONSTRAINT table_for_trigger_pkey PRIMARY KEY (col1);
--
-- TOC entry 56893 (class 1259 OID 148994)
-- Name: index1; Type: INDEX; Schema: target; Owner: enterprisedb
--
CREATE INDEX index1 ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56894 (class 1259 OID 148995)
-- Name: index2; Type: INDEX; Schema: target; Owner: enterprisedb
--
CREATE INDEX index2 ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56898 (class 1259 OID 149021)
-- Name: index_identical; Type: INDEX; Schema: target; Owner: enterprisedb
--
CREATE INDEX index_identical ON target.table_for_identical USING btree (col2 text_pattern_ops);
--
-- TOC entry 56895 (class 1259 OID 149212)
-- Name: index_same; Type: INDEX; Schema: target; Owner: enterprisedb
--
CREATE INDEX index_same ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56892 (class 1259 OID 139945)
-- Name: mview_index; Type: INDEX; Schema: target; Owner: enterprisedb
--
CREATE INDEX mview_index ON target."MView" USING btree (text text_pattern_ops);
--
-- TOC entry 61045 (class 2618 OID 149042)
-- Name: table_for_rule rule1; Type: RULE; Schema: target; Owner: enterprisedb
--
CREATE RULE rule1 AS
ON UPDATE TO target.table_for_rule DO INSTEAD NOTHING;
--
-- TOC entry 61069 (class 0 OID 0)
-- Dependencies: 61045
-- Name: RULE rule1 ON table_for_rule; Type: COMMENT; Schema: target; Owner: enterprisedb
--
COMMENT ON RULE rule1 ON target.table_for_rule IS 'comments';
--
-- TOC entry 61046 (class 2618 OID 149043)
-- Name: table_for_rule rule2; Type: RULE; Schema: target; Owner: enterprisedb
--
CREATE RULE rule2 AS
ON UPDATE TO target.table_for_rule DO NOTHING;
--
-- TOC entry 61047 (class 2618 OID 149044)
-- Name: table_for_rule rule3; Type: RULE; Schema: target; Owner: enterprisedb
--
CREATE RULE rule3 AS
ON INSERT TO target.table_for_rule DO NOTHING;
--
-- TOC entry 61050 (class 0 OID 139938)
-- Dependencies: 12250 61062
-- Name: MView; Type: MATERIALIZED VIEW DATA; Schema: target; Owner: enterprisedb
--
REFRESH MATERIALIZED VIEW target."MView";

View File

@ -0,0 +1,311 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.7
-- Dumped by pg_dump version 12beta2
-- Started on 2019-11-01 12:54:15 IST
SET statement_timeout = 0;
SET lock_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
--
-- TOC entry 17 (class 2615 OID 139770)
-- Name: source; Type: SCHEMA; Schema: -; Owner: enterprisedb
--
CREATE SCHEMA source;
ALTER SCHEMA source OWNER TO enterprisedb;
SET default_tablespace = '';
CREATE EXTENSION btree_gist
SCHEMA source;
--
-- TOC entry 12258 (class 1259 OID 148963)
-- Name: table_for_column; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_column (
col1 bigint NOT NULL,
col2 text,
col3 text
);
ALTER TABLE source.table_for_column OWNER TO enterprisedb;
--
-- TOC entry 12256 (class 1259 OID 148895)
-- Name: table_for_constraints; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_constraints (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_constraints OWNER TO enterprisedb;
--
-- TOC entry 61066 (class 0 OID 0)
-- Dependencies: 12256
-- Name: TABLE table_for_constraints; Type: COMMENT; Schema: source; Owner: enterprisedb
--
COMMENT ON TABLE source.table_for_constraints IS 'comments';
--
-- TOC entry 12262 (class 1259 OID 149004)
-- Name: table_for_identical; Type: TABLE; Schema: source; Owner: enterprisedb;
--
CREATE TABLE source.table_for_identical (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_identical OWNER TO enterprisedb;;
--
-- TOC entry 12260 (class 1259 OID 148977)
-- Name: table_for_index; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_index (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE source.table_for_index OWNER TO enterprisedb;
--
-- TOC entry 12269 (class 1259 OID 149128)
-- Name: table_for_primary_key; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_primary_key (
col1 integer NOT NULL,
col2 text NOT NULL
);
ALTER TABLE source.table_for_primary_key OWNER TO enterprisedb;
--
-- TOC entry 12264 (class 1259 OID 149024)
-- Name: table_for_rule; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_rule (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE source.table_for_rule OWNER TO enterprisedb;
--
-- TOC entry 12266 (class 1259 OID 149048)
-- Name: table_for_trigger; Type: TABLE; Schema: source; Owner: enterprisedb
--
CREATE TABLE source.table_for_trigger (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE source.table_for_trigger OWNER TO enterprisedb;
--
-- TOC entry 56893 (class 2606 OID 148904)
-- Name: table_for_constraints Exclusion; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT "Exclusion" EXCLUDE USING gist (col2 WITH <>) WITH (fillfactor='12') WHERE ((col1 > 1)) DEFERRABLE INITIALLY DEFERRED;
--
-- TOC entry 61067 (class 0 OID 0)
-- Dependencies: 56893
-- Name: CONSTRAINT "Exclusion" ON table_for_constraints; Type: COMMENT; Schema: source; Owner: enterprisedb
--
COMMENT ON CONSTRAINT "Exclusion" ON source.table_for_constraints IS 'comments';
--
-- TOC entry 56891 (class 2606 OID 148911)
-- Name: table_for_constraints check_con; Type: CHECK CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE source.table_for_constraints
ADD CONSTRAINT check_con CHECK ((col1 > 10)) NOT VALID;
--
-- TOC entry 61068 (class 0 OID 0)
-- Dependencies: 56891
-- Name: CONSTRAINT check_con ON table_for_constraints; Type: COMMENT; Schema: source; Owner: enterprisedb
--
COMMENT ON CONSTRAINT check_con ON source.table_for_constraints IS 'coment';
--
-- TOC entry 56899 (class 2606 OID 148970)
-- Name: table_for_column table_for_column_pkey; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_column
ADD CONSTRAINT table_for_column_pkey PRIMARY KEY (col1);
--
-- TOC entry 56895 (class 2606 OID 148902)
-- Name: table_for_constraints table_for_constraints_pkey; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT table_for_constraints_pkey PRIMARY KEY (col1);
--
-- TOC entry 56904 (class 2606 OID 148984)
-- Name: table_for_index table_for_index_pkey; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_index
ADD CONSTRAINT table_for_index_pkey PRIMARY KEY (col1);
--
-- TOC entry 56913 (class 2606 OID 149135)
-- Name: table_for_primary_key table_for_primary_key_pkey; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_primary_key
ADD CONSTRAINT table_for_primary_key_pkey PRIMARY KEY (col1, col2);
--
-- TOC entry 56909 (class 2606 OID 149031)
-- Name: table_for_rule table_for_rule_pkey; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_rule
ADD CONSTRAINT table_for_rule_pkey PRIMARY KEY (col1);
--
-- TOC entry 56907 (class 2606 OID 149011)
-- Name: table_for_identical table_for_table_for_identical_pkey; Type: CONSTRAINT; Schema: source; Owner: enterprisedb;
--
ALTER TABLE ONLY source.table_for_identical
ADD CONSTRAINT table_for_table_for_identical_pkey PRIMARY KEY (col1);
--
-- TOC entry 56911 (class 2606 OID 149055)
-- Name: table_for_trigger table_for_trigger_pkey; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_trigger
ADD CONSTRAINT table_for_trigger_pkey PRIMARY KEY (col1);
--
-- TOC entry 56897 (class 2606 OID 148913)
-- Name: table_for_constraints unique; Type: CONSTRAINT; Schema: source; Owner: enterprisedb
--
ALTER TABLE ONLY source.table_for_constraints
ADD CONSTRAINT "unique" UNIQUE (col1);
--
-- TOC entry 61069 (class 0 OID 0)
-- Dependencies: 56897
-- Name: CONSTRAINT "unique" ON table_for_constraints; Type: COMMENT; Schema: source; Owner: enterprisedb
--
COMMENT ON CONSTRAINT "unique" ON source.table_for_constraints IS 'cmnt';
--
-- TOC entry 56900 (class 1259 OID 149023)
-- Name: index1; Type: INDEX; Schema: source; Owner: enterprisedb
--
CREATE INDEX index1 ON source.table_for_index USING btree (col2 varchar_pattern_ops);
--
-- TOC entry 56905 (class 1259 OID 149012)
-- Name: index_identical; Type: INDEX; Schema: source; Owner: enterprisedb;
--
CREATE INDEX index_identical ON source.table_for_identical USING btree (col2 text_pattern_ops);
--
-- TOC entry 56901 (class 1259 OID 149211)
-- Name: index_same; Type: INDEX; Schema: source; Owner: enterprisedb
--
CREATE INDEX index_same ON source.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56902 (class 1259 OID 149022)
-- Name: index_source; Type: INDEX; Schema: source; Owner: enterprisedb
--
CREATE INDEX index_source ON source.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 61044 (class 2618 OID 149032)
-- Name: table_for_rule rule1; Type: RULE; Schema: source; Owner: enterprisedb
--
CREATE RULE rule1 AS
ON UPDATE TO source.table_for_rule DO INSTEAD NOTHING;
--
-- TOC entry 61070 (class 0 OID 0)
-- Dependencies: 61044
-- Name: RULE rule1 ON table_for_rule; Type: COMMENT; Schema: source; Owner: enterprisedb
--
COMMENT ON RULE rule1 ON source.table_for_rule IS 'comments';
--
-- TOC entry 61045 (class 2618 OID 149033)
-- Name: table_for_rule rule2; Type: RULE; Schema: source; Owner: enterprisedb
--
CREATE RULE rule2 AS
ON INSERT TO source.table_for_rule DO NOTHING;

View File

@ -0,0 +1,337 @@
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.7
-- Dumped by pg_dump version 12beta2
-- Started on 2019-11-01 12:55:22 IST
SET statement_timeout = 0;
SET lock_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
--
-- TOC entry 18 (class 2615 OID 139771)
-- Name: target; Type: SCHEMA; Schema: -; Owner: enterprisedb
--
CREATE SCHEMA target;
ALTER SCHEMA target OWNER TO enterprisedb;
SET default_tablespace = '';
CREATE EXTENSION btree_gist
SCHEMA target;
--
-- TOC entry 12250 (class 1259 OID 139938)
-- Name: MView; Type: MATERIALIZED VIEW; Schema: target; Owner: enterprisedb
--
CREATE MATERIALIZED VIEW target."MView" AS
SELECT 'tekst'::text AS text
WITH NO DATA;
ALTER TABLE target."MView" OWNER TO enterprisedb;
--
-- TOC entry 12259 (class 1259 OID 148971)
-- Name: table_for_column; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_column (
col1 bigint,
col2 bigint,
col4 text
);
ALTER TABLE target.table_for_column OWNER TO enterprisedb;
--
-- TOC entry 12268 (class 1259 OID 149089)
-- Name: table_for_constraints; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_constraints (
col1 integer NOT NULL,
col2 text,
CONSTRAINT check_con CHECK ((col1 > 30))
);
ALTER TABLE target.table_for_constraints OWNER TO enterprisedb;
--
-- TOC entry 61066 (class 0 OID 0)
-- Dependencies: 12268
-- Name: TABLE table_for_constraints; Type: COMMENT; Schema: target; Owner: enterprisedb
--
COMMENT ON TABLE target.table_for_constraints IS 'comments';
--
-- TOC entry 61067 (class 0 OID 0)
-- Dependencies: 12268
-- Name: CONSTRAINT check_con ON table_for_constraints; Type: COMMENT; Schema: target; Owner: enterprisedb
--
COMMENT ON CONSTRAINT check_con ON target.table_for_constraints IS 'coment';
--
-- TOC entry 12257 (class 1259 OID 148960)
-- Name: table_for_del; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_del (
);
ALTER TABLE target.table_for_del OWNER TO enterprisedb;
--
-- TOC entry 12271 (class 1259 OID 149172)
-- Name: table_for_foreign_key; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_foreign_key (
col1 integer NOT NULL,
col2 "char",
col3 bigint
);
ALTER TABLE target.table_for_foreign_key OWNER TO enterprisedb;
--
-- TOC entry 12263 (class 1259 OID 149013)
-- Name: table_for_identical; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_identical (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE target.table_for_identical OWNER TO enterprisedb;
--
-- TOC entry 12261 (class 1259 OID 148986)
-- Name: table_for_index; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_index (
col1 integer NOT NULL,
col2 text
);
ALTER TABLE target.table_for_index OWNER TO enterprisedb;
--
-- TOC entry 12270 (class 1259 OID 149144)
-- Name: table_for_primary_key; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_primary_key (
col1 integer NOT NULL,
col2 text NOT NULL
);
ALTER TABLE target.table_for_primary_key OWNER TO enterprisedb;
--
-- TOC entry 12265 (class 1259 OID 149034)
-- Name: table_for_rule; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_rule (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE target.table_for_rule OWNER TO enterprisedb;
--
-- TOC entry 12267 (class 1259 OID 149066)
-- Name: table_for_trigger; Type: TABLE; Schema: target; Owner: enterprisedb
--
CREATE TABLE target.table_for_trigger (
col1 bigint NOT NULL,
col2 text
);
ALTER TABLE target.table_for_trigger OWNER TO enterprisedb;
--
-- TOC entry 56906 (class 2606 OID 149097)
-- Name: table_for_constraints Exclusion; Type: CONSTRAINT; Schema: target; Owner: enterprisedb
--
ALTER TABLE ONLY target.table_for_constraints
ADD CONSTRAINT "Exclusion" EXCLUDE USING gist (col2 WITH <>) WITH (fillfactor='15') WHERE ((col1 > 1)) DEFERRABLE INITIALLY DEFERRED;
--
-- TOC entry 61068 (class 0 OID 0)
-- Dependencies: 56906
-- Name: CONSTRAINT "Exclusion" ON table_for_constraints; Type: COMMENT; Schema: target; Owner: enterprisedb
--
COMMENT ON CONSTRAINT "Exclusion" ON target.table_for_constraints IS 'comments';
--
-- TOC entry 56910 (class 2606 OID 149176)
-- Name: table_for_foreign_key table_for_foreign_key_pkey; Type: CONSTRAINT; Schema: target; Owner: enterprisedb
--
ALTER TABLE ONLY target.table_for_foreign_key
ADD CONSTRAINT table_for_foreign_key_pkey PRIMARY KEY (col1);
--
-- TOC entry 56897 (class 2606 OID 148993)
-- Name: table_for_index table_for_index_pkey; Type: CONSTRAINT; Schema: target; Owner: enterprisedb
--
ALTER TABLE ONLY target.table_for_index
ADD CONSTRAINT table_for_index_pkey PRIMARY KEY (col1);
--
-- TOC entry 56908 (class 2606 OID 149151)
-- Name: table_for_primary_key table_for_primary_key_pkey; Type: CONSTRAINT; Schema: target; Owner: enterprisedb
--
ALTER TABLE ONLY target.table_for_primary_key
ADD CONSTRAINT table_for_primary_key_pkey PRIMARY KEY (col1);
--
-- TOC entry 56902 (class 2606 OID 149041)
-- Name: table_for_rule table_for_rule_pkey; Type: CONSTRAINT; Schema: target; Owner: enterprisedb
--
ALTER TABLE ONLY target.table_for_rule
ADD CONSTRAINT table_for_rule_pkey PRIMARY KEY (col1);
--
-- TOC entry 56900 (class 2606 OID 149020)
-- Name: table_for_identical table_for_table_for_identical_pkey; Type: CONSTRAINT; Schema: target; Owner: enterprisedb
--
ALTER TABLE ONLY target.table_for_identical
ADD CONSTRAINT table_for_table_for_identical_pkey PRIMARY KEY (col1);
--
-- TOC entry 56904 (class 2606 OID 149073)
-- Name: table_for_trigger table_for_trigger_pkey; Type: CONSTRAINT; Schema: target; Owner: enterprisedb
--
ALTER TABLE ONLY target.table_for_trigger
ADD CONSTRAINT table_for_trigger_pkey PRIMARY KEY (col1);
--
-- TOC entry 56893 (class 1259 OID 148994)
-- Name: index1; Type: INDEX; Schema: target; Owner: enterprisedb
--
CREATE INDEX index1 ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56894 (class 1259 OID 148995)
-- Name: index2; Type: INDEX; Schema: target; Owner: enterprisedb
--
CREATE INDEX index2 ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56898 (class 1259 OID 149021)
-- Name: index_identical; Type: INDEX; Schema: target; Owner: enterprisedb
--
CREATE INDEX index_identical ON target.table_for_identical USING btree (col2 text_pattern_ops);
--
-- TOC entry 56895 (class 1259 OID 149212)
-- Name: index_same; Type: INDEX; Schema: target; Owner: enterprisedb
--
CREATE INDEX index_same ON target.table_for_index USING btree (col2 text_pattern_ops);
--
-- TOC entry 56892 (class 1259 OID 139945)
-- Name: mview_index; Type: INDEX; Schema: target; Owner: enterprisedb
--
CREATE INDEX mview_index ON target."MView" USING btree (text text_pattern_ops);
--
-- TOC entry 61045 (class 2618 OID 149042)
-- Name: table_for_rule rule1; Type: RULE; Schema: target; Owner: enterprisedb
--
CREATE RULE rule1 AS
ON UPDATE TO target.table_for_rule DO INSTEAD NOTHING;
--
-- TOC entry 61069 (class 0 OID 0)
-- Dependencies: 61045
-- Name: RULE rule1 ON table_for_rule; Type: COMMENT; Schema: target; Owner: enterprisedb
--
COMMENT ON RULE rule1 ON target.table_for_rule IS 'comments';
--
-- TOC entry 61046 (class 2618 OID 149043)
-- Name: table_for_rule rule2; Type: RULE; Schema: target; Owner: enterprisedb
--
CREATE RULE rule2 AS
ON UPDATE TO target.table_for_rule DO NOTHING;
--
-- TOC entry 61047 (class 2618 OID 149044)
-- Name: table_for_rule rule3; Type: RULE; Schema: target; Owner: enterprisedb
--
CREATE RULE rule3 AS
ON INSERT TO target.table_for_rule DO NOTHING;
--
-- TOC entry 61050 (class 0 OID 139938)
-- Dependencies: 12250 61062
-- Name: MView; Type: MATERIALIZED VIEW DATA; Schema: target; Owner: enterprisedb
--
REFRESH MATERIALIZED VIEW target."MView";

View File

@ -0,0 +1,204 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import uuid
import json
import os
import random
from pgadmin.utils import server_utils as server_utils
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression.python_test_utils import test_utils as utils
from .utils import create_table, create_schema, restore_schema
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.utils.versioned_template_loader import \
get_version_mapping_directories
class SchemaDiffTestCase(BaseTestGenerator):
""" This class will test the schema diff. """
scenarios = [
# Fetching default URL for database node.
('Schema diff comparison', dict(
url='schema_diff/compare/{0}/{1}/{2}/{3}/{4}/{5}/{6}'))
]
def setUp(self):
self.src_database = "db_schema_diff_src_%s" % str(uuid.uuid4())[1:8]
self.tar_database = "db_schema_diff_tar_%s" % str(uuid.uuid4())[1:8]
self.src_db_id = utils.create_database(self.server, self.src_database)
self.tar_db_id = utils.create_database(self.server, self.tar_database)
self.server = parent_node_dict["server"][-1]["server"]
self.server_id = parent_node_dict["server"][-1]["server_id"]
self.nodes = ['table', 'function', 'procedure', 'view', 'mview']
self.restore_backup()
def restore_backup(self):
self.sql_folder = self.get_sql_folder()
if self.sql_folder is None:
raise FileNotFoundError('Schema diff folder does not exists')
src_sql_path = os.path.join(self.sql_folder, 'source.sql')
tar_sql_path = os.path.join(self.sql_folder, 'target.sql')
if not os.path.exists(src_sql_path):
raise FileNotFoundError(
'{} file does not exists'.format(src_sql_path))
if not os.path.exists(tar_sql_path):
raise FileNotFoundError(
'{} file does not exists'.format(tar_sql_path))
self.src_schema_id = restore_schema(self.server, self.src_database,
'source', src_sql_path)
self.tar_schema_id = restore_schema(self.server, self.tar_database,
'target', tar_sql_path)
def get_sql_folder(self):
"""
This function will get the appropriate test folder based on
server version and their existence.
:param module_path: Path of the module to be tested.
:return:
"""
# Join the application path, module path and tests folder
tests_folder_path = os.path.dirname(os.path.abspath(__file__))
# A folder name matching the Server Type (pg, ppas) takes priority so
# check whether that exists or not. If so, than check the version
# folder in it, else look directly in the 'tests' folder.
absolute_path = os.path.join(tests_folder_path, self.server['type'])
if not os.path.exists(absolute_path):
absolute_path = tests_folder_path
# Iterate the version mapping directories.
for version_mapping in get_version_mapping_directories(
self.server['type']):
if version_mapping['number'] > \
self.server_information['server_version']:
continue
complete_path = os.path.join(absolute_path,
version_mapping['name'])
if os.path.exists(complete_path):
return complete_path
return None
def compare(self):
comp_url = self.url.format(self.trans_id, self.server_id,
self.src_db_id,
self.src_schema_id,
self.server_id,
self.tar_db_id,
self.tar_schema_id
)
response = self.tester.get(comp_url)
self.assertEquals(response.status_code, 200)
return json.loads(response.data.decode('utf-8'))
def runTest(self):
""" This function will test the schema diff."""
response = self.tester.get("schema_diff/initialize")
self.assertEquals(response.status_code, 200)
response_data = json.loads(response.data.decode('utf-8'))
self.trans_id = response_data['data']['schemaDiffTransId']
url = 'schema_diff/server/connect/{}'.format(self.server_id)
data = {'password': self.server['db_password']}
response = self.tester.post(url,
data=json.dumps(data),
content_type='html/json'
)
response = self.tester.post(
'schema_diff/database/connect/{0}/{1}'.format(
self.server_id,
self.src_db_id))
response = self.tester.post(
'schema_diff/database/connect/{0}/{1}'.format(
self.server_id,
self.tar_db_id))
response_data = self.compare()
diff_file = os.path.join(self.sql_folder, 'diff_{0}.sql'.format(
str(random.randint(1, 99999))))
file_obj = open(diff_file, 'a')
for diff in response_data['data']:
if diff['type'] in self.nodes:
src_obj_oid = tar_obj_oid = None
if diff['status'] == 'Source Only' or\
diff['status'] == 'Target Only':
src_obj_oid = tar_obj_oid = diff['oid']
elif diff['status'] == 'Different':
src_obj_oid = diff['source_oid']
tar_obj_oid = diff['target_oid']
if src_obj_oid is not None:
url = 'schema_diff/ddl_compare/{0}/{1}/{2}/{3}/{4}/{5}/' \
'{6}/{7}/{8}/{9}/{10}/'.format(self.trans_id,
self.server_id,
self.src_db_id,
self.src_schema_id,
self.server_id,
self.tar_db_id,
self.tar_schema_id,
src_obj_oid,
tar_obj_oid,
diff['type'],
diff['status']
)
response = self.tester.get(url)
self.assertEquals(response.status_code, 200)
response_data = json.loads(response.data.decode('utf-8'))
file_obj.write(response_data['diff_ddl'])
file_obj.close()
try:
restore_schema(self.server, self.tar_database, 'target',
diff_file)
os.remove(diff_file)
response_data = self.compare()
for diff in response_data['data']:
if diff['type'] in self.nodes:
self.assertEquals(diff['status'], 'Identical')
except Exception as e:
os.remove(diff_file)
def tearDown(self):
"""This function drop the added database"""
connection = utils.get_db_connection(self.server['db'],
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'],
self.server['sslmode'])
utils.drop_database(connection, self.src_database)
connection = utils.get_db_connection(self.server['db'],
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'],
self.server['sslmode'])
utils.drop_database(connection, self.tar_database)

View File

@ -0,0 +1,103 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import sys
import traceback
from regression.python_test_utils import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
def restore_schema(server, db_name, schema_name, sql_path):
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'],
server['sslmode']
)
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor = connection.cursor()
sql = ''
with open(sql_path, 'r') as content_file:
sql = content_file.read()
pg_cursor.execute(sql)
connection.set_isolation_level(old_isolation_level)
connection.commit()
SQL = """SELECT
nsp.oid
FROM
pg_namespace nsp
WHERE nsp.nspname = '{0}'""".format(schema_name)
pg_cursor.execute(SQL)
schema = pg_cursor.fetchone()
schema_id = None
if schema:
schema_id = schema[0]
connection.close()
return schema_id
def create_schema(server, db_name, schema_name):
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'],
server['sslmode']
)
return schema_utils.create_schema(connection, schema_name)
def create_table(server, db_name, schema_id, table_name, query):
"""
This function creates a table under provided schema.
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param schema_id: schema oid
:type schema_name: int
:param table_name: table name
:type table_name: str
:return table_id: table id
:rtype: int
"""
try:
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'],
server['sslmode'])
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor = connection.cursor()
pg_cursor.execute(query)
connection.set_isolation_level(old_isolation_level)
connection.commit()
# Get 'oid' from newly created table
pg_cursor.execute("SELECT oid FROM pg_class WHERE relname='{0}'"
" AND relnamespace = {1}".format(table_name,
schema_id))
table = pg_cursor.fetchone()
table_id = ''
if table:
table_id = table[0]
connection.close()
return table_id
except Exception:
traceback.print_exc(file=sys.stderr)
raise

View File

@ -84,6 +84,8 @@ define('tools.querytool', [
this.handler.preferences = this.preferences;
this.connIntervalId = null;
this.layout = opts.layout;
this.set_server_version(opts.server_ver);
this.trigger('pgadmin-sqleditor:view:initialised');
},
// Bind all the events
@ -2318,6 +2320,12 @@ define('tools.querytool', [
}
},
set_value_to_editor: function(query) {
if (this.gridView && this.gridView.query_tool_obj && !_.isUndefined(query)) {
this.gridView.query_tool_obj.setValue(query);
}
},
init_events: function() {
var self = this;
// Listen to the file manager button events

View File

@ -4,5 +4,5 @@ SELECT at.attname, at.attnum, ty.typname
FROM pg_attribute at LEFT JOIN pg_type ty ON (ty.oid = at.atttypid)
WHERE attrelid={{obj_id}}::oid AND attnum = ANY (
(SELECT con.conkey FROM pg_class rel LEFT OUTER JOIN pg_constraint con ON con.conrelid=rel.oid
AND con.contype='p' WHERE rel.relkind IN ('r','s','t') AND rel.oid = {{obj_id}}::oid)::oid[])
AND con.contype='p' WHERE rel.relkind IN ('r','s','t') AND rel.oid = ({{obj_id}})::oid)::oid[])
{% endif %}

View File

@ -35,12 +35,13 @@ def get_columns_types(is_query_tool, columns_info, table_oid, conn, has_oids):
column_types[col['name']] = col_type
if not is_query_tool:
col_type['not_null'] = col['not_null'] = \
rset['rows'][key]['not_null']
if key in rset['rows']:
col_type['not_null'] = col['not_null'] = \
rset['rows'][key]['not_null']
col_type['has_default_val'] = \
col['has_default_val'] = \
rset['rows'][key]['has_default_val']
col_type['has_default_val'] = \
col['has_default_val'] = \
rset['rows'][key]['has_default_val']
else:
for row in rset['rows']:

View File

@ -34,6 +34,8 @@ class _PGCSRFProtect(CSRFProtect):
'pgadmin.tools.datagrid.panel',
'pgadmin.tools.debugger.initialize_target',
'pgadmin.tools.debugger.direct_new',
'pgadmin.tools.schema_diff.panel',
'pgadmin.tools.schema_diff.ddl_compare',
]
for exempt in exempt_views:

View File

@ -356,6 +356,7 @@ module.exports = [{
slickgrid: sourceDir + '/bundle/slickgrid.js',
sqleditor: './pgadmin/tools/sqleditor/static/js/sqleditor.js',
debugger_direct: './pgadmin/tools/debugger/static/js/direct.js',
schema_diff: './pgadmin/tools/schema_diff/static/js/schema_diff_hook.js',
file_utils: './pgadmin/misc/file_manager/static/js/utility.js',
'pgadmin.style': pgadminCssStyles,
pgadmin: pgadminScssStyles,
@ -492,7 +493,8 @@ module.exports = [{
',pgadmin.tools.import_export' +
',pgadmin.tools.debugger.controller' +
',pgadmin.tools.debugger.direct' +
',pgadmin.node.pga_job',
',pgadmin.node.pga_job' +
',pgadmin.tools.schema_diff',
},
}, {
test: require.resolve('snapsvg'),

View File

@ -278,6 +278,8 @@ var webpackShimConfig = {
'pgadmin.tools.import_export': path.join(__dirname, './pgadmin/tools/import_export/static/js/import_export'),
'pgadmin.tools.maintenance': path.join(__dirname, './pgadmin/tools/maintenance/static/js/maintenance'),
'pgadmin.tools.restore': path.join(__dirname, './pgadmin/tools/restore/static/js/restore'),
'pgadmin.tools.schema_diff': path.join(__dirname, './pgadmin/tools/schema_diff/static/js/schema_diff'),
'pgadmin.tools.schema_diff_ui': path.join(__dirname, './pgadmin/tools/schema_diff/static/js/schema_diff_ui'),
'pgadmin.tools.user_management': path.join(__dirname, './pgadmin/tools/user_management/static/js/user_management'),
'pgadmin.user_management.current_user': '/user_management/current_user',
'slick.pgadmin.editors': path.join(__dirname, './pgadmin/tools/../static/js/slickgrid/editors'),