Improve logic to get the DDL statements as a part of the comparison. Fixes #5221

Fixed 23 issues related to schema diff.
This commit is contained in:
Akshay Joshi 2020-03-15 14:51:16 +05:30
parent 0b101d9efd
commit 44c0d76541
52 changed files with 951 additions and 921 deletions

View File

@ -19,4 +19,5 @@ Bug fixes
| `Issue #4237 <https://redmine.postgresql.org/issues/4237>`_ - Fix an issue where the user can not change the value of DateTime picker control using keyboard.
| `Issue #4942 <https://redmine.postgresql.org/issues/4942>`_ - Fixed chrome driver download utility issue for Ubuntu.
| `Issue #5143 <https://redmine.postgresql.org/issues/5143>`_ - Fix an accessibility issue to maximize the panel for all alertify dialog.
| `Issue #5143 <https://redmine.postgresql.org/issues/5143>`_ - Fix an accessibility issue to maximize the panel for all alertify dialog.
| `Issue #5221 <https://redmine.postgresql.org/issues/5221>`_ - Improve logic to get the DDL statements as a part of the comparison.

View File

@ -220,7 +220,8 @@ class FunctionView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
})
keys_to_ignore = ['oid', 'proowner', 'typnsp', 'xmin', 'prokind',
'proisagg', 'pronamespace', 'proargdefaults']
'proisagg', 'pronamespace', 'proargdefaults',
'prorettype', 'proallargtypes', 'proacl']
@property
def required_args(self):
@ -1125,7 +1126,8 @@ class FunctionView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
SQL = re.sub('\n{2,}', '\n\n', SQL)
return SQL
def _get_sql(self, gid, sid, did, scid, data, fnid=None, is_sql=False):
def _get_sql(self, gid, sid, did, scid, data, fnid=None, is_sql=False,
is_schema_diff=False):
"""
Generates the SQL statements to create/update the Function.
@ -1239,8 +1241,11 @@ class FunctionView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
for v in data['variables']['added']:
chngd_variables[v['name']] = v['value']
for v in old_data['variables']:
old_data['chngd_variables'][v['name']] = v['value']
# In case of schema diff we don't want variables from
# old data
if not is_schema_diff:
for v in old_data['variables']:
old_data['chngd_variables'][v['name']] = v['value']
# Prepare final dict of new and old variables
for name, val in old_data['chngd_variables'].items():
@ -1624,7 +1629,15 @@ class FunctionView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
if data:
if diff_schema:
data['schema'] = diff_schema
status, sql = self._get_sql(gid, sid, did, scid, data, oid)
status, sql = self._get_sql(gid, sid, did, scid, data, oid, False,
True)
# Check if return type is changed then we need to drop the
# function first and then recreate it.
drop_fun_sql = ''
if 'prorettypename' in data:
drop_fun_sql = self.delete(gid=gid, sid=sid, did=did,
scid=scid, fnid=oid, only_sql=True)
sql = drop_fun_sql + '\n' + sql
else:
if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did,

View File

@ -18,7 +18,8 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% endfor %}
{% endif -%}
)
RETURNS {{ o_data.prorettypename }}
RETURNS {% if 'prorettypename' in data %}{{ data.prorettypename }}{% else %}{{ o_data.prorettypename }}{% endif %}
{% if 'lanname' in data %}
LANGUAGE {{ data.lanname|qtLiteral }} {% else %}
LANGUAGE {{ o_data.lanname|qtLiteral }}

View File

@ -19,7 +19,8 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% endfor %}
{% endif -%}
)
RETURNS {{ o_data.prorettypename }}
RETURNS {% if 'prorettypename' in data %}{{ data.prorettypename }}{% else %}{{ o_data.prorettypename }}{% endif %}
{% if 'lanname' in data %}
LANGUAGE {{ data.lanname|qtLiteral }} {% else %}
LANGUAGE {{ o_data.lanname|qtLiteral }}
@ -31,7 +32,7 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% if 'proparallel' in data and data.proparallel %}PARALLEL {{ data.proparallel }}{% elif 'proparallel' not in data and o_data.proparallel %}PARALLEL {{ o_data.proparallel }}{% endif %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows and data.prorows != '0'%}
ROWS {{data.prorows}}{% elif data.prorows is not defined and o_data.prorows and o_data.prorows != '0' %} ROWS {{o_data.prorows}} {%endif -%}{% if data.merged_variables %}{% for v in data.merged_variables %}

View File

@ -19,7 +19,8 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% endfor %}
{% endif -%}
)
RETURNS {{ o_data.prorettypename }}
RETURNS {% if 'prorettypename' in data %}{{ data.prorettypename }}{% else %}{{ o_data.prorettypename }}{% endif %}
{% if 'lanname' in data %}
LANGUAGE {{ data.lanname|qtLiteral }} {% else %}
LANGUAGE {{ o_data.lanname|qtLiteral }}
@ -31,7 +32,7 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% if 'proparallel' in data and data.proparallel %}PARALLEL {{ data.proparallel }}{% elif 'proparallel' not in data and o_data.proparallel %}PARALLEL {{ o_data.proparallel }}{% endif %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows and data.prorows != '0' %}
ROWS {{data.prorows}}{% elif data.prorows is not defined and o_data.prorows and o_data.prorows != '0' %} ROWS {{o_data.prorows}} {%endif %}

View File

@ -18,7 +18,8 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% endfor %}
{% endif -%}
)
RETURNS {{ o_data.prorettypename }}
RETURNS {% if 'prorettypename' in data %}{{ data.prorettypename }}{% else %}{{ o_data.prorettypename }}{% endif %}
{% if 'lanname' in data %}
LANGUAGE {{ data.lanname|qtLiteral }} {% else %}
LANGUAGE {{ o_data.lanname|qtLiteral }}
@ -28,7 +29,7 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% if ('prosecdef' in data and data.prosecdef) or ('prosecdef' not in data and o_data.prosecdef) %} SECURITY DEFINER{% endif %}
{% if ('proiswindow' in data and data.proiswindow) or ('proiswindow' not in data and o_data.proiswindow) %} WINDOW{% endif %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows and data.prorows != '0' %}
ROWS {{data.prorows}}{% elif data.prorows is not defined and o_data.prorows and o_data.prorows != '0' %} ROWS {{o_data.prorows}}{% endif -%}{% if data.merged_variables %}{% for v in data.merged_variables %}

View File

@ -19,7 +19,8 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% endfor %}
{% endif -%}
)
RETURNS {{ o_data.prorettypename }}
RETURNS {% if 'prorettypename' in data %}{{ data.prorettypename }}{% else %}{{ o_data.prorettypename }}{% endif %}
{% if 'lanname' in data %}
LANGUAGE {{ data.lanname|qtLiteral }} {% else %}
LANGUAGE {{ o_data.lanname|qtLiteral }}
@ -29,7 +30,7 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% if ('prosecdef' in data and data.prosecdef) or ('prosecdef' not in data and o_data.prosecdef) %} SECURITY DEFINER{% endif %}
{% if ('proiswindow' in data and data.proiswindow) or ('proiswindow' not in data and o_data.proiswindow) %} WINDOW{% endif %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows and data.prorows != '0' %}
ROWS {{data.prorows}}{% elif data.prorows is not defined and o_data.prorows and o_data.prorows != '0' %} ROWS {{o_data.prorows}} {%endif -%}{% if data.merged_variables %}{% for v in data.merged_variables %}

View File

@ -19,7 +19,8 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% endfor %}
{% endif -%}
)
RETURNS {{ o_data.prorettypename }}
RETURNS {% if 'prorettypename' in data %}{{ data.prorettypename }}{% else %}{{ o_data.prorettypename }}{% endif %}
{% if 'lanname' in data %}
LANGUAGE {{ data.lanname|qtLiteral }} {% else %}
LANGUAGE {{ o_data.lanname|qtLiteral }}
@ -31,7 +32,7 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% if 'proparallel' in data and data.proparallel %}PARALLEL {{ data.proparallel }}{% elif 'proparallel' not in data and o_data.proparallel %}PARALLEL {{ o_data.proparallel }}{% endif %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows and data.prorows != '0' %}
ROWS {{data.prorows}}{% elif data.prorows is not defined and o_data.prorows and o_data.prorows != '0' %} ROWS {{o_data.prorows}} {%endif -%}{% if data.merged_variables %}{% for v in data.merged_variables %}

View File

@ -18,7 +18,8 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% endfor %}
{% endif -%}
)
RETURNS {{ o_data.prorettypename }}
RETURNS {% if 'prorettypename' in data %}{{ data.prorettypename }}{% else %}{{ o_data.prorettypename }}{% endif %}
{% if 'lanname' in data %}
LANGUAGE {{ data.lanname|qtLiteral }} {% else %}
LANGUAGE {{ o_data.lanname|qtLiteral }}
@ -27,7 +28,7 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% if ('prosecdef' in data and data.prosecdef) or ('prosecdef' not in data and o_data.prosecdef) %} SECURITY DEFINER{% endif %}
{% if ('proiswindow' in data and data.proiswindow) or ('proiswindow' not in data and o_data.proiswindow) %} WINDOW{% endif %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows and data.prorows != '0' %}
ROWS {{data.prorows}}{% elif o_data.prorows and o_data.prorows != '0' %} ROWS {{o_data.prorows}}{%endif -%}{% if data.merged_variables %}{% for v in data.merged_variables %}

View File

@ -19,7 +19,8 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% endfor %}
{% endif -%}
)
RETURNS {{ o_data.prorettypename }}
RETURNS {% if 'prorettypename' in data %}{{ data.prorettypename }}{% else %}{{ o_data.prorettypename }}{% endif %}
{% if 'lanname' in data %}
LANGUAGE {{ data.lanname|qtLiteral }} {% else %}
LANGUAGE {{ o_data.lanname|qtLiteral }}
@ -31,7 +32,7 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% if 'proparallel' in data and data.proparallel %}PARALLEL {{ data.proparallel }}{% elif 'proparallel' not in data and o_data.proparallel %}PARALLEL {{ o_data.proparallel }}{% endif %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows and data.prorows != '0' %}
ROWS {{data.prorows}}{% elif data.prorows is not defined and o_data.prorows and o_data.prorows != '0' %} ROWS {{o_data.prorows}} {%endif %}

View File

@ -18,7 +18,8 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% endfor %}
{% endif -%}
)
RETURNS {{ o_data.prorettypename }}
RETURNS {% if 'prorettypename' in data %}{{ data.prorettypename }}{% else %}{{ o_data.prorettypename }}{% endif %}
{% if 'lanname' in data %}
LANGUAGE {{ data.lanname|qtLiteral }} {% else %}
LANGUAGE {{ o_data.lanname|qtLiteral }}
@ -28,7 +29,7 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% if ('prosecdef' in data and data.prosecdef) or ('prosecdef' not in data and o_data.prosecdef) %} SECURITY DEFINER{% endif %}
{% if ('proiswindow' in data and data.proiswindow) or ('proiswindow' not in data and o_data.proiswindow) %} WINDOW{% endif %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows and data.prorows != '0' %}
ROWS {{data.prorows}}{% elif data.prorows is not defined and o_data.prorows and o_data.prorows != '0' %} ROWS {{o_data.prorows}}{% endif -%}{% if data.merged_variables %}{% for v in data.merged_variables %}

View File

@ -19,7 +19,8 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% endfor %}
{% endif -%}
)
RETURNS {{ o_data.prorettypename }}
RETURNS {% if 'prorettypename' in data %}{{ data.prorettypename }}{% else %}{{ o_data.prorettypename }}{% endif %}
{% if 'lanname' in data %}
LANGUAGE {{ data.lanname|qtLiteral }} {% else %}
LANGUAGE {{ o_data.lanname|qtLiteral }}
@ -29,7 +30,7 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% if ('prosecdef' in data and data.prosecdef) or ('prosecdef' not in data and o_data.prosecdef) %} SECURITY DEFINER{% endif %}
{% if ('proiswindow' in data and data.proiswindow) or ('proiswindow' not in data and o_data.proiswindow) %} WINDOW{% endif %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows and data.prorows != '0' %}
ROWS {{data.prorows}}{% elif data.prorows is not defined and o_data.prorows and o_data.prorows != '0' %} ROWS {{o_data.prorows}} {%endif -%}{% if data.merged_variables %}{% for v in data.merged_variables %}

View File

@ -19,7 +19,8 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% endfor %}
{% endif -%}
)
RETURNS {{ o_data.prorettypename }}
RETURNS {% if 'prorettypename' in data %}{{ data.prorettypename }}{% else %}{{ o_data.prorettypename }}{% endif %}
{% if 'lanname' in data %}
LANGUAGE {{ data.lanname|qtLiteral }} {% else %}
LANGUAGE {{ o_data.lanname|qtLiteral }}
@ -31,7 +32,7 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% if 'proparallel' in data and data.proparallel %}PARALLEL {{ data.proparallel }}{% elif 'proparallel' not in data and o_data.proparallel %}PARALLEL {{ o_data.proparallel }}{% endif %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows and data.prorows != '0' %}
ROWS {{data.prorows}}{% elif data.prorows is not defined and o_data.prorows and o_data.prorows != '0' %} ROWS {{o_data.prorows}} {%endif -%}{% if data.merged_variables %}{% for v in data.merged_variables %}

View File

@ -18,7 +18,8 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% endfor %}
{% endif -%}
)
RETURNS {{ o_data.prorettypename }}
RETURNS {% if 'prorettypename' in data %}{{ data.prorettypename }}{% else %}{{ o_data.prorettypename }}{% endif %}
{% if 'lanname' in data %}
LANGUAGE {{ data.lanname|qtLiteral }} {% else %}
LANGUAGE {{ o_data.lanname|qtLiteral }}
@ -27,7 +28,7 @@ CREATE OR REPLACE FUNCTION {{ conn|qtIdent(o_data.pronamespace, name) }}({% if d
{% if ('prosecdef' in data and data.prosecdef) or ('prosecdef' not in data and o_data.prosecdef) %} SECURITY DEFINER{% endif %}
{% if ('proiswindow' in data and data.proiswindow) or ('proiswindow' not in data and o_data.proiswindow) %} WINDOW{% endif %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows %}
{% if data.procost %}COST {{data.procost}}{% elif o_data.procost %}COST {{o_data.procost}}{% endif %}{% if data.prorows and data.prorows != '0' %}
ROWS {{data.prorows}}{% elif data.prorows is not defined and o_data.prorows and o_data.prorows != '0' %} ROWS {{o_data.prorows}}{%endif -%}{% if data.merged_variables %}{% for v in data.merged_variables %}

View File

@ -11,8 +11,6 @@
import simplejson as json
import re
import copy
import random
import pgadmin.browser.server_groups.servers.databases as database
from flask import render_template, request, jsonify, url_for, current_app
@ -25,11 +23,6 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
from .utils import BaseTableView
from pgadmin.utils.preferences import Preferences
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries,\
directory_diff
from pgadmin.tools.schema_diff.model import SchemaDiffModel
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
constraints.foreign_key import utils as fkey_utils
from .schema_diff_utils import SchemaDiffTableCompare
@ -1212,8 +1205,6 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings,
json_resp = kwargs['json_resp'] if 'json_resp' in kwargs else True
diff_schema = kwargs['diff_schema'] if 'diff_schema' in kwargs else\
None
schema_diff_table = kwargs['schema_diff_table'] if\
'schema_diff_table' in kwargs else None
if diff_data:
return self._fetch_sql(did, scid, tid, diff_data, json_resp)
@ -1239,24 +1230,8 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings,
if diff_schema:
data['schema'] = diff_schema
if schema_diff_table:
data['orig_name'] = data['name']
data['name'] = 'schema_diff_temp_{0}'.format(
random.randint(1, 9999999))
sql, partition_sql = BaseTableView.get_reverse_engineered_sql(
self, did, scid, tid, main_sql, data, json_resp,
diff_partition_sql=True)
else:
sql, partition_sql = BaseTableView.get_reverse_engineered_sql(
self, did, scid, tid, main_sql, data, json_resp)
if schema_diff_table:
# If partition tables have different partitions
sql += render_template(
"/".join([self.table_template_path, 'schema_diff.sql']),
conn=self.conn, data=data, partition_sql=partition_sql
)
sql, partition_sql = BaseTableView.get_reverse_engineered_sql(
self, did, scid, tid, main_sql, data, json_resp)
return sql
@ -1625,7 +1600,7 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings,
return sql
@BaseTableView.check_precondition
def fetch_tables(self, sid, did, scid, tid=None, keys_to_remove=None):
def fetch_tables(self, sid, did, scid, tid=None):
"""
This function will fetch the list of all the tables
and will be used by schema diff.
@ -1634,9 +1609,13 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings,
:param did: Database Id
:param scid: Schema Id
:param tid: Table Id
:param keys_to_remove: Table columns to be removed from the dataset
:return: Table dataset
"""
sub_modules = ['index', 'rule', 'trigger']
if self.manager.server_type == 'ppas' and \
self.manager.version >= 120000:
sub_modules.append('compound_trigger')
if tid:
status, data = self._fetch_properties(did, scid, tid)
@ -1647,7 +1626,7 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings,
data = super(TableView, self).properties(
0, sid, did, scid, tid, data, False
)
self.remove_keys_for_comparision(data, keys_to_remove)
return data
else:
@ -1667,7 +1646,17 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings,
0, sid, did, scid, row['oid'], data, False
)
self.remove_keys_for_comparision(data, keys_to_remove)
# Get sub module data of a specified table for object
# comparison
for module in sub_modules:
module_view = SchemaDiffRegistry.get_node_view(module)
if module_view.blueprint.server_type is None or \
self.manager.server_type in \
module_view.blueprint.server_type:
sub_data = module_view.fetch_objects_to_compare(
sid=sid, did=did, scid=scid, tid=row['oid'],
oid=None)
data[module] = sub_data
res[row['name']] = data
return res

View File

@ -29,6 +29,8 @@ from pgadmin.utils import IS_PY2
from pgadmin.utils.compile_template_name import compile_template_path
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
from pgadmin.tools.schema_diff.directory_compare import directory_diff,\
parce_acl
# If we are in Python3
@ -938,8 +940,7 @@ class CompoundTriggerView(PGChildNodeView, SchemaDiffObjectCompare):
return SQL
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None,
ignore_keys=False):
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None):
"""
This function will fetch the list of all the triggers for
specified schema id.
@ -969,14 +970,60 @@ class CompoundTriggerView(PGChildNodeView, SchemaDiffObjectCompare):
for row in triggers['rows']:
status, data = self._fetch_properties(tid, row['oid'])
if status:
if ignore_keys:
for key in self.keys_to_ignore:
if key in data:
del data[key]
res[row['name']] = data
return res
def ddl_compare(self, **kwargs):
"""
This function returns the DDL/DML statements based on the
comparison status.
:param kwargs:
:return:
"""
src_params = kwargs.get('source_params')
tgt_params = kwargs.get('target_params')
source = kwargs.get('source')
target = kwargs.get('target')
target_schema = kwargs.get('target_schema')
comp_status = kwargs.get('comp_status')
diff = ''
if comp_status == 'source_only':
diff = self.get_sql_from_diff(gid=src_params['gid'],
sid=src_params['sid'],
did=src_params['did'],
scid=src_params['scid'],
tid=src_params['tid'],
oid=source['oid'],
diff_schema=target_schema)
elif comp_status == 'target_only':
diff = self.get_sql_from_diff(gid=tgt_params['gid'],
sid=tgt_params['sid'],
did=tgt_params['did'],
scid=tgt_params['scid'],
tid=tgt_params['tid'],
oid=target['oid'],
drop_sql=True)
elif comp_status == 'different':
diff_dict = directory_diff(
source, target,
ignore_keys=self.keys_to_ignore, difference={}
)
diff_dict.update(parce_acl(source, target))
diff = self.get_sql_from_diff(gid=tgt_params['gid'],
sid=tgt_params['sid'],
did=tgt_params['did'],
scid=tgt_params['scid'],
tid=tgt_params['tid'],
oid=target['oid'],
data=diff_dict)
return diff
SchemaDiffRegistry(blueprint.node_type, CompoundTriggerView, 'table')
CompoundTriggerView.register_node_view(blueprint)

View File

@ -82,10 +82,10 @@ def get_foreign_keys(conn, tid, fkid=None, template_path=None):
coveringindex = search_coveringindex(conn, tid, cols)
fk['coveringindex'] = coveringindex
if coveringindex:
fk['autoindex'] = True
fk['autoindex'] = False
fk['hasindex'] = True
else:
fk['autoindex'] = False
fk['autoindex'] = True
fk['hasindex'] = False
return True, result['rows']

View File

@ -26,9 +26,7 @@ from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.utils import IS_PY2
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries,\
directory_diff
from pgadmin.tools.schema_diff.model import SchemaDiffModel
from pgadmin.tools.schema_diff.directory_compare import directory_diff
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
from pgadmin.browser.server_groups.servers.databases.schemas. \
tables.indexes import utils as index_utils
@ -830,11 +828,8 @@ class IndexesView(PGChildNodeView, SchemaDiffObjectCompare):
def get_sql_from_index_diff(self, sid, did, scid, tid, idx, data=None,
diff_schema=None, drop_req=False):
tmp_idx = idx
schema = ''
sql = ''
if data:
schema = self.schema
data['schema'] = self.schema
data['nspname'] = self.schema
data['table'] = self.table
@ -987,8 +982,7 @@ class IndexesView(PGChildNodeView, SchemaDiffObjectCompare):
)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None,
ignore_keys=False):
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None):
"""
This function will fetch the list of all the indexes for
specified schema id.
@ -996,6 +990,7 @@ class IndexesView(PGChildNodeView, SchemaDiffObjectCompare):
:param sid: Server Id
:param did: Database Id
:param scid: Schema Id
:param oid: Index Id
:return:
"""
@ -1013,10 +1008,6 @@ class IndexesView(PGChildNodeView, SchemaDiffObjectCompare):
status, data = self._fetch_properties(did, tid,
row['oid'])
if status:
if ignore_keys:
for key in self.keys_to_ignore:
if key in data:
del data[key]
res[row['name']] = data
else:
status, data = self._fetch_properties(did, tid,
@ -1030,55 +1021,35 @@ class IndexesView(PGChildNodeView, SchemaDiffObjectCompare):
def ddl_compare(self, **kwargs):
"""
This function will compare index properties and
return the difference of SQL
This function returns the DDL/DML statements based on the
comparison status.
:param kwargs:
:return:
"""
src_sid = kwargs.get('source_sid')
src_did = kwargs.get('source_did')
src_scid = kwargs.get('source_scid')
src_tid = kwargs.get('source_tid')
src_oid = kwargs.get('source_oid')
tar_sid = kwargs.get('target_sid')
tar_did = kwargs.get('target_did')
tar_scid = kwargs.get('target_scid')
tar_tid = kwargs.get('target_tid')
tar_oid = kwargs.get('target_oid')
src_params = kwargs.get('source_params')
tgt_params = kwargs.get('target_params')
source = kwargs.get('source')
target = kwargs.get('target')
target_schema = kwargs.get('target_schema')
comp_status = kwargs.get('comp_status')
source = ''
target = ''
diff = ''
status, target_schema = self.get_schema(tar_sid,
tar_did,
tar_scid
)
if not status:
return internal_server_error(errormsg=target_schema)
if comp_status == SchemaDiffModel.COMPARISON_STATUS['source_only']:
diff = self.get_sql_from_index_diff(sid=src_sid,
did=src_did, scid=src_scid,
tid=src_tid, idx=src_oid,
if comp_status == 'source_only':
diff = self.get_sql_from_index_diff(sid=src_params['sid'],
did=src_params['did'],
scid=src_params['scid'],
tid=src_params['tid'],
idx=source['oid'],
diff_schema=target_schema)
elif comp_status == 'target_only':
diff = self.delete(gid=1, sid=tgt_params['sid'],
did=tgt_params['did'], scid=tgt_params['scid'],
tid=tgt_params['tid'], idx=target['oid'],
only_sql=True)
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['target_only']:
diff = self.delete(gid=1, sid=tar_sid, did=tar_did,
scid=tar_scid, tid=tar_tid,
idx=tar_oid, only_sql=True)
else:
source = self.fetch_objects_to_compare(sid=src_sid, did=src_did,
scid=src_scid, tid=src_tid,
oid=src_oid)
target = self.fetch_objects_to_compare(sid=tar_sid, did=tar_did,
scid=tar_scid, tid=tar_tid,
oid=tar_oid)
if not (source or target):
return None
elif comp_status == 'different':
diff_dict = directory_diff(
source, target, ignore_keys=self.keys_to_ignore,
difference={}
@ -1102,19 +1073,19 @@ class IndexesView(PGChildNodeView, SchemaDiffObjectCompare):
create_req = True
if create_req:
diff = self.get_sql_from_index_diff(sid=tar_sid,
did=tar_did,
scid=tar_scid,
tid=tar_tid,
idx=tar_oid,
diff = self.get_sql_from_index_diff(sid=tgt_params['sid'],
did=tgt_params['did'],
scid=tgt_params['scid'],
tid=tgt_params['tid'],
idx=target['oid'],
diff_schema=target_schema,
drop_req=True)
else:
diff = self.get_sql_from_index_diff(sid=tar_sid,
did=tar_did,
scid=tar_scid,
tid=tar_tid,
idx=tar_oid,
diff = self.get_sql_from_index_diff(sid=tgt_params['sid'],
did=tgt_params['did'],
scid=tgt_params['scid'],
tid=tgt_params['tid'],
idx=target['oid'],
data=diff_dict)
return diff

View File

@ -10,9 +10,10 @@
""" Implements Partitions Node """
import re
import random
import simplejson as json
import pgadmin.browser.server_groups.servers.databases.schemas as schema
from flask import render_template, request
from flask import render_template, request, current_app
from flask_babelex import gettext
from pgadmin.browser.server_groups.servers.databases.schemas.utils \
import DataTypeReader, VacuumSettings
@ -21,13 +22,9 @@ from pgadmin.utils.ajax import internal_server_error, \
from pgadmin.browser.server_groups.servers.databases.schemas.tables.utils \
import BaseTableView
from pgadmin.browser.collection import CollectionNodeModule
from pgadmin.utils.ajax import make_json_response, precondition_required
from config import PG_DEFAULT_DRIVER
from pgadmin.utils.ajax import make_json_response
from pgadmin.browser.utils import PGChildModule
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries,\
directory_diff
from pgadmin.tools.schema_diff.model import SchemaDiffModel
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
@ -464,58 +461,57 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings,
@BaseTableView.check_precondition
def get_sql_from_diff(self, **kwargs):
"""
This function will create sql on the basis the difference of 2 tables
This function is used to get the DDL/DML statements for
partitions.
:param kwargs:
:return:
"""
data = dict()
res = None
sid = kwargs['sid']
did = kwargs['did']
scid = kwargs['scid']
tid = kwargs['tid']
ptid = kwargs['ptid']
diff_data = kwargs['diff_data'] if 'diff_data' in kwargs else None
json_resp = kwargs['json_resp'] if 'json_resp' in kwargs else True
diff_schema = kwargs['diff_schema'] if 'diff_schema' in kwargs else\
None
if diff_data:
SQL = render_template("/".join([self.partition_template_path,
'properties.sql']),
did=did, scid=scid, tid=tid,
ptid=ptid, datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
source_data = kwargs['source_data'] if 'source_data' in kwargs \
else None
target_data = kwargs['target_data'] if 'target_data' in kwargs \
else None
SQL, name = self.get_sql(did, scid, ptid, diff_data, res)
SQL = re.sub('\n{2,}', '\n\n', SQL)
SQL = SQL.strip('\n')
return SQL
else:
main_sql = []
# Store the original name and create a temporary name for
# the partitioned(base) table.
target_data['orig_name'] = target_data['name']
target_data['name'] = 'temp_partitioned_{0}'.format(
random.randint(1, 9999999))
# For PG/EPAS 11 and above when we copy the data from original
# table to temporary table for schema diff, we will have to create
# a default partition to prevent the data loss.
target_data['default_partition_name'] = \
target_data['orig_name'] + '_default'
SQL = render_template("/".join([self.partition_template_path,
'properties.sql']),
did=did, scid=scid, tid=tid,
ptid=ptid, datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
# Copy the partition scheme from source to target.
if 'partition_scheme' in source_data:
target_data['partition_scheme'] = source_data['partition_scheme']
if len(res['rows']) == 0:
return gone(gettext(
"The specified partitioned table could not be found."))
partition_data = dict()
partition_data['name'] = target_data['name']
partition_data['schema'] = target_data['schema']
partition_data['partition_type'] = source_data['partition_type']
partition_data['default_partition_header'] = \
'-- Create a default partition to prevent the data loss.\n' \
'-- It helps when none of the partitions of a relation\n' \
'-- matches the inserted data.'
data = res['rows'][0]
# Create temporary name for partitions
for item in source_data['partitions']:
item['temp_partition_name'] = 'partition_{0}'.format(
random.randint(1, 9999999))
if diff_schema:
data['schema'] = diff_schema
data['parent_schema'] = diff_schema
partition_data['partitions'] = source_data['partitions']
return BaseTableView.get_reverse_engineered_sql(self, did,
scid, ptid,
main_sql, data,
False)
partition_sql = self.get_partitions_sql(partition_data,
schema_diff=True)
return render_template(
"/".join([self.partition_template_path, 'partition_diff.sql']),
conn=self.conn, data=target_data, partition_sql=partition_sql,
partition_data=partition_data
)
@BaseTableView.check_precondition
def detach(self, gid, sid, did, scid, tid, ptid):
@ -775,58 +771,25 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings,
def ddl_compare(self, **kwargs):
"""
This function will compare index properties and
return the difference of SQL
This function returns the DDL/DML statements based on the
comparison status.
:param kwargs:
:return:
"""
src_sid = kwargs.get('source_sid')
src_did = kwargs.get('source_did')
src_scid = kwargs.get('source_scid')
src_tid = kwargs.get('source_tid')
src_oid = kwargs.get('source_oid')
tar_sid = kwargs.get('target_sid')
tar_did = kwargs.get('target_did')
tar_scid = kwargs.get('target_scid')
tar_tid = kwargs.get('target_tid')
tar_oid = kwargs.get('target_oid')
comp_status = kwargs.get('comp_status')
tgt_params = kwargs.get('target_params')
parent_source_data = kwargs.get('parent_source_data')
parent_target_data = kwargs.get('parent_target_data')
source = ''
target = ''
diff = ''
diff = self.get_sql_from_diff(sid=tgt_params['sid'],
did=tgt_params['did'],
scid=tgt_params['scid'],
tid=tgt_params['tid'],
source_data=parent_source_data,
target_data=parent_target_data)
status, target_schema = self.get_schema_for_schema_diff(tar_sid,
tar_did,
tar_scid
)
if not status:
return internal_server_error(errormsg=target_schema)
if comp_status == SchemaDiffModel.COMPARISON_STATUS['source_only']:
diff = self.get_sql_from_diff(sid=src_sid,
did=src_did, scid=src_scid,
tid=src_tid, ptid=src_oid,
diff_schema=target_schema)
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['target_only']:
SQL = render_template("/".join([self.partition_template_path,
'properties.sql']),
did=did, scid=scid, tid=tid,
ptid=ptid, datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
SQL = render_template(
"/".join([self.table_template_path, 'properties.sql']),
did=tar_did, scid=tar_scid, tid=tar_oid,
datlastsysoid=self.datlastsysoid
)
status, res = self.conn.execute_dict(SQL)
if status:
self.cmd = 'delete'
diff = super(PartitionsView, self).get_delete_sql(res)
self.cmd = None
return diff
return diff + '\n'
SchemaDiffRegistry(blueprint.node_type, PartitionsView, 'table')

View File

@ -28,6 +28,8 @@ from pgadmin.utils.compile_template_name import compile_template_path
from pgadmin.utils import IS_PY2
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
from pgadmin.tools.schema_diff.directory_compare import directory_diff,\
parce_acl
# If we are in Python3
if not IS_PY2:
@ -520,7 +522,8 @@ class RuleView(PGChildNodeView, SchemaDiffObjectCompare):
@check_precondition
def get_sql_from_diff(self, gid, sid, did, scid, tid, oid, data=None,
diff_schema=None, drop_sql=False):
source_schema=None, diff_schema=None,
drop_sql=False):
if drop_sql:
SQL = self.delete(gid=gid, sid=sid, did=did,
@ -541,6 +544,11 @@ class RuleView(PGChildNodeView, SchemaDiffObjectCompare):
SQL = ''
if data:
if source_schema:
if 'statements' in data:
# Replace the source schema with the target schema
data['statements'] = data['statements'].replace(
source_schema, diff_schema)
old_data = res_data
SQL = render_template(
"/".join([self.template_path, 'update.sql']),
@ -548,6 +556,11 @@ class RuleView(PGChildNodeView, SchemaDiffObjectCompare):
)
else:
if diff_schema:
if 'statements' in res_data:
# Replace the source schema with the target schema
res_data['statements'] = \
res_data['statements'].replace(
res_data['schema'], diff_schema)
res_data['schema'] = diff_schema
SQL = render_template("/".join(
@ -595,8 +608,7 @@ class RuleView(PGChildNodeView, SchemaDiffObjectCompare):
)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None,
ignore_keys=False):
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None):
"""
This function will fetch the list of all the rules for
specified schema id.
@ -628,13 +640,61 @@ class RuleView(PGChildNodeView, SchemaDiffObjectCompare):
for row in rules['rows']:
status, data = self._fetch_properties(row['oid'])
if status:
if ignore_keys:
for key in self.keys_to_ignore:
if key in data:
del data[key]
res[row['name']] = data
return res
def ddl_compare(self, **kwargs):
"""
This function returns the DDL/DML statements based on the
comparison status.
:param kwargs:
:return:
"""
src_params = kwargs.get('source_params')
tgt_params = kwargs.get('target_params')
source = kwargs.get('source')
target = kwargs.get('target')
target_schema = kwargs.get('target_schema')
comp_status = kwargs.get('comp_status')
diff = ''
if comp_status == 'source_only':
diff = self.get_sql_from_diff(gid=src_params['gid'],
sid=src_params['sid'],
did=src_params['did'],
scid=src_params['scid'],
tid=src_params['tid'],
oid=source['oid'],
diff_schema=target_schema)
elif comp_status == 'target_only':
diff = self.get_sql_from_diff(gid=tgt_params['gid'],
sid=tgt_params['sid'],
did=tgt_params['did'],
scid=tgt_params['scid'],
tid=tgt_params['tid'],
oid=target['oid'],
drop_sql=True)
elif comp_status == 'different':
diff_dict = directory_diff(
source, target,
ignore_keys=self.keys_to_ignore, difference={}
)
diff_dict.update(parce_acl(source, target))
diff = self.get_sql_from_diff(gid=tgt_params['gid'],
sid=tgt_params['sid'],
did=tgt_params['did'],
scid=tgt_params['scid'],
tid=tgt_params['tid'],
oid=target['oid'],
source_schema=source['schema'],
diff_schema=target_schema,
data=diff_dict)
return diff
SchemaDiffRegistry(blueprint.node_type, RuleView, 'table')
RuleView.register_node_view(blueprint)

View File

@ -11,365 +11,96 @@
import copy
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.utils.ajax import internal_server_error
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries,\
directory_diff
from pgadmin.tools.schema_diff.model import SchemaDiffModel
are_dictionaries_identical
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
class SchemaDiffTableCompare(SchemaDiffObjectCompare):
table_keys_to_ignore = ['oid', 'schema', 'edit_types', 'attnum',
'col_type', 'references', 'reltuples', 'oid-2',
'rows_cnt', 'seqrelid', 'atttypid', 'elemoid',
'hastoasttable', 'relhassubclass']
keys_to_ignore = ['oid', 'schema', 'vacuum_table',
'vacuum_toast', 'edit_types', 'attnum', 'col_type',
'references', 'reltuples', 'rows_cnt']
constraint_keys_to_ignore = ['relname', 'nspname', 'parent_tbl',
'attrelid', 'adrelid', 'fknsp', 'confrelid',
'references', 'refnsp', 'remote_schema']
keys_to_ignore_ddl_comp = ['oid',
'schema',
'columns',
'edit_types',
'primary_key',
'unique_constraint',
'exclude_constraint',
'check_constraint',
'foreign_key',
'reltuples',
'rows_cnt'
]
trigger_keys_to_ignore = ['xmin', 'tgrelid', 'tgfoid', 'tfunction',
'tgqual', 'tgconstraint']
index_keys_to_ignore = ['relowner', 'indrelid']
keys_to_remove = {
'columns': ['relname', 'nspname', 'parent_tbl', 'attrelid', 'adrelid'],
'primary_key': ['oid'],
'unique_constraint': ['oid'],
'check_constraint': ['oid', 'nspname'],
'foreign_key': ['oid', 'fknsp', 'confrelid'],
'exclude_constraint': ['oid'],
'partitions': ['oid'],
}
keys_to_remove_ddl_comp = {
'columns': ['relname', 'nspname', 'parent_tbl', 'attrelid', 'adrelid'],
'check_constraint': ['nspname'],
'foreign_key': ['fknsp', 'confrelid']
}
keys_to_ignore = table_keys_to_ignore + constraint_keys_to_ignore \
+ trigger_keys_to_ignore + index_keys_to_ignore
def compare(self, **kwargs):
"""
This function is used to compare all the table objects
from two different schemas.
:return: Comparison Dictionary
:param kwargs:
:return:
"""
src_sid = kwargs.get('source_sid')
src_did = kwargs.get('source_did')
src_scid = kwargs.get('source_scid')
tar_sid = kwargs.get('target_sid')
tar_did = kwargs.get('target_did')
tar_scid = kwargs.get('target_scid')
sub_modules = ['index', 'rule', 'trigger']
source_params = {'sid': kwargs.get('source_sid'),
'did': kwargs.get('source_did'),
'scid': kwargs.get('source_scid')}
target_params = {'sid': kwargs.get('target_sid'),
'did': kwargs.get('target_did'),
'scid': kwargs.get('target_scid')}
source_tables = self.fetch_tables(sid=src_sid, did=src_did,
scid=src_scid)
status, target_schema = self.get_schema(**target_params)
if not status:
return internal_server_error(errormsg=target_schema)
target_tables = self.fetch_tables(sid=tar_sid, did=tar_did,
scid=tar_scid)
if self.manager.version >= 120000:
sub_modules.append('compound_trigger')
source_tables = self.fetch_tables(**source_params)
target_tables = self.fetch_tables(**target_params)
# If both the dict have no items then return None.
if not (source_tables or target_tables) or (
len(source_tables) <= 0 and len(target_tables) <= 0):
return None
src_server_type, tar_server_type = self.get_server_type(src_sid,
tar_sid)
for module in sub_modules:
module_view = SchemaDiffRegistry.get_node_view(
module)
# Get sub module data for source tables
if module_view.blueprint.server_type is None or \
src_server_type in module_view.blueprint.server_type:
for key, val in source_tables.items():
source = module_view.fetch_objects_to_compare(
sid=src_sid,
did=src_did,
scid=src_scid,
tid=val['oid'],
oid=None,
ignore_keys=True
)
source_tables[key][module] = source
# Get sub module data for target tables
if module_view.blueprint.server_type is None or \
tar_server_type in module_view.blueprint.server_type:
for key, val in target_tables.items():
target = module_view.fetch_objects_to_compare(
sid=tar_sid,
did=tar_did,
scid=tar_scid,
tid=val['oid'],
oid=None,
ignore_keys=True
)
target_tables[key][module] = target
return compare_dictionaries(source_tables, target_tables,
return compare_dictionaries(self, source_params, target_params,
target_schema, source_tables,
target_tables,
self.node_type,
self.blueprint.COLLECTION_LABEL,
self.keys_to_ignore)
@staticmethod
def get_server_type(src_id, tar_id):
"""Get server types of source and target servers."""
driver = get_driver(PG_DEFAULT_DRIVER)
src_manager = driver.connection_manager(src_id)
tar_manager = driver.connection_manager(tar_id)
return src_manager.server_type, tar_manager.server_type
def ddl_compare(self, **kwargs):
"""
This function will compare properties of 2 tables and
return the source DDL, target DDL and Difference of them.
:param kwargs:
:return:
"""
source_params = {'sid': kwargs.get('source_sid'),
'did': kwargs.get('source_did'),
'scid': kwargs.get('source_scid'),
'tid': kwargs.get('source_oid'),
'json_resp': False
}
src_sid = kwargs.get('source_sid')
src_did = kwargs.get('source_did')
src_scid = kwargs.get('source_scid')
src_oid = kwargs.get('source_oid')
tar_sid = kwargs.get('target_sid')
tar_did = kwargs.get('target_did')
tar_scid = kwargs.get('target_scid')
tar_oid = kwargs.get('target_oid')
comp_status = kwargs.get('comp_status')
generate_script = False
target_params = {'sid': kwargs.get('target_sid'),
'did': kwargs.get('target_did'),
'scid': kwargs.get('target_scid'),
'tid': kwargs.get('target_oid'),
'json_resp': False
}
if 'generate_script' in kwargs and kwargs['generate_script']:
generate_script = True
source = ''
target = ''
diff = ''
ignore_sub_modules = ['column', 'constraints']
src_server_type, tar_server_type = self.get_server_type(src_sid,
tar_sid)
status, target_schema = self.get_schema(tar_sid,
tar_did,
tar_scid
)
if not status:
return internal_server_error(errormsg=target_schema)
if comp_status == SchemaDiffModel.COMPARISON_STATUS['source_only']:
if not generate_script:
source = self.get_sql_from_table_diff(sid=src_sid,
did=src_did,
scid=src_scid,
tid=src_oid,
json_resp=False)
diff = self.get_sql_from_table_diff(sid=src_sid, did=src_did,
scid=src_scid, tid=src_oid,
diff_schema=target_schema,
json_resp=False)
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['target_only']:
if not generate_script:
target = self.get_sql_from_table_diff(sid=tar_sid,
did=tar_did,
scid=tar_scid,
tid=tar_oid,
json_resp=False)
diff = self.get_drop_sql(sid=tar_sid, did=tar_did,
scid=tar_scid, tid=tar_oid)
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['different']:
source = self.fetch_tables(
sid=src_sid, did=src_did,
scid=src_scid, tid=src_oid,
keys_to_remove=self.keys_to_remove_ddl_comp
)
target = self.fetch_tables(
sid=tar_sid, did=tar_did,
scid=tar_scid, tid=tar_oid,
keys_to_remove=self.keys_to_remove_ddl_comp
)
if self.manager.version < 100000:
ignore_sub_modules.append('partition')
if self.manager.version < 120000:
ignore_sub_modules.append('compound_trigger')
# In case of error return None
if not (source or target):
return None
diff_dict = directory_diff(
source, target, ignore_keys=self.keys_to_ignore_ddl_comp,
difference={}
)
# Column comparison
col_diff = self.table_col_ddl_comp(source, target)
diff_dict.update(col_diff)
# Constraint comparison
pk_diff = self.constraint_ddl_comp(source, target, diff_dict)
diff_dict.update(pk_diff)
diff_dict.update(self.parce_acl(source, target))
if not generate_script:
source = self.get_sql_from_table_diff(sid=src_sid,
did=src_did,
scid=src_scid,
tid=src_oid,
json_resp=False)
target = self.get_sql_from_table_diff(sid=tar_sid,
did=tar_did,
scid=tar_scid,
tid=tar_oid,
json_resp=False)
diff = self.get_sql_from_table_diff(sid=tar_sid, did=tar_did,
scid=tar_scid, tid=tar_oid,
diff_data=diff_dict,
json_resp=False)
for module in self.blueprint.submodules:
if module.NODE_TYPE not in ignore_sub_modules:
module_view = SchemaDiffRegistry.get_node_view(
module.NODE_TYPE)
if module_view.blueprint.server_type and (
src_server_type not in
module_view.blueprint.server_type and
tar_server_type not in
module_view.blueprint.server_type
):
continue
if module_view.blueprint.server_type and (
(src_server_type in
module_view.blueprint.server_type and
tar_server_type not in
module_view.blueprint.server_type) or (
src_server_type not in
module_view.blueprint.server_type and
tar_server_type in
module_view.blueprint.server_type)
):
continue
result = module_view.compare(
source_sid=src_sid, source_did=src_did,
source_scid=src_scid, source_tid=src_oid,
target_sid=tar_sid, target_did=tar_did,
target_scid=tar_scid, target_tid=tar_oid
)
if result and module.NODE_TYPE != 'partition':
child_diff = ''
for res in result:
if res['status'] == \
SchemaDiffModel.COMPARISON_STATUS[
'different']:
source_oid = res['source_oid']
target_oid = res['target_oid']
else:
source_oid = res['oid']
target_oid = res['oid']
if res['status'] != \
SchemaDiffModel.COMPARISON_STATUS[
'identical']:
child_diff = module_view.ddl_compare(
source_sid=src_sid, source_did=src_did,
source_scid=src_scid,
source_oid=source_oid,
source_tid=src_oid, target_sid=tar_sid,
target_did=tar_did, target_scid=tar_scid,
target_tid=tar_oid, target_oid=target_oid,
comp_status=res['status']
)
if child_diff:
diff += '\n' + child_diff
elif result:
# For partition module
identical = False
source_only = False
target_only = False
different = False
for res in result:
if res['status'] == \
SchemaDiffModel.COMPARISON_STATUS[
'identical']:
identical = True
elif res['status'] == \
SchemaDiffModel.COMPARISON_STATUS[
'source_only']:
source_only = True
elif res['status'] == \
SchemaDiffModel.COMPARISON_STATUS[
'target_only']:
target_only = True
else:
different = True
if identical:
pass
elif (source_only or target_only) and not different:
for res in result:
source_oid = res['oid']
target_oid = res['oid']
child_diff = module_view.ddl_compare(
source_sid=src_sid, source_did=src_did,
source_scid=src_scid,
source_oid=source_oid,
source_tid=src_oid, target_sid=tar_sid,
target_did=tar_did, target_scid=tar_scid,
target_tid=tar_oid, target_oid=target_oid,
comp_status=res['status']
)
if child_diff:
diff += child_diff
else:
diff = self.get_sql_from_table_diff(
sid=src_sid,
did=src_did,
scid=src_scid,
tid=src_oid,
diff_schema=target_schema,
json_resp=False,
schema_diff_table=True
)
else:
source = self.get_sql_from_table_diff(sid=src_sid, did=src_did,
scid=src_scid, tid=src_oid,
json_resp=False)
target = self.get_sql_from_table_diff(sid=tar_sid, did=tar_did,
scid=tar_scid, tid=tar_oid,
json_resp=False)
source = self.get_sql_from_table_diff(**source_params)
target = self.get_sql_from_table_diff(**target_params)
return {'source_ddl': source,
'target_ddl': target,
'diff_ddl': diff
'diff_ddl': ''
}
@staticmethod
def table_col_ddl_comp(source, target):
def table_col_comp(source, target):
"""
Table Column comparison
:param source: Source columns
@ -413,17 +144,14 @@ class SchemaDiffTableCompare(SchemaDiffObjectCompare):
return different
@staticmethod
def constraint_ddl_comp(source_table, target_table, diff_dict):
def table_constraint_comp(source_table, target_table):
"""
Table Constraint DDL comparison
:param source: Source Table
:param target: Target Table
:param source_table: Source Table
:param target_table: Target Table
:return: Difference of constraints
"""
different = {}
non_editable_keys = {}
columns_to_be_dropped = []
non_editable_keys = {'primary_key': ['col_count',
'condeferrable',
'condeffered',
@ -456,14 +184,16 @@ class SchemaDiffTableCompare(SchemaDiffObjectCompare):
if type(target_cols) is list and len(
target_cols) > 0:
tmp_src = copy.deepcopy(source)
tmp_src.pop('oid')
if 'oid' in tmp_src:
tmp_src.pop('oid')
tmp_tar = None
tmp = None
for item in target_cols:
if item['name'] == source['name']:
tmp_tar = copy.deepcopy(item)
tmp = copy.deepcopy(item)
tmp_tar.pop('oid')
if 'oid' in tmp_tar:
tmp_tar.pop('oid')
if tmp_tar and tmp_src != tmp_tar:
tmp_updated = copy.deepcopy(source)
for key in non_editable_keys[constraint]:
@ -474,7 +204,8 @@ class SchemaDiffTableCompare(SchemaDiffObjectCompare):
tmp_updated = None
break
if tmp_updated:
tmp_updated['oid'] = tmp['oid']
if 'oid' in tmp:
tmp_updated['oid'] = tmp['oid']
updated.append(tmp_updated)
target_cols.remove(tmp)
elif tmp_tar and tmp_src == tmp_tar:
@ -492,18 +223,109 @@ class SchemaDiffTableCompare(SchemaDiffObjectCompare):
return different
def remove_keys_for_comparision(self, data, keys=None):
def get_sql_from_submodule_diff(self, source_params, target_params,
target_schema, source, target, diff_dict):
"""
This function is used to remove specific keys from data
This function returns the DDL/DML statements of the
submodules of table based on the comparison status.
:param source_params:
:param target_params:
:param target_schema:
:param source:
:param target:
:param diff_dict:
:return:
"""
# Get the difference result for source and target columns
col_diff = self.table_col_comp(source, target)
diff_dict.update(col_diff)
keys_to_remove = keys if keys else self.keys_to_remove
# Get the difference result for source and target constraints
pk_diff = self.table_constraint_comp(source, target)
diff_dict.update(pk_diff)
for p_key, p_val in keys_to_remove.items():
if p_key in data and data[p_key] is not None \
and len(data[p_key]) > 0:
for item in data[p_key]:
# Remove keys that should not be the part of comparision.
for key in p_val:
if key in item:
item.pop(key)
# Get the difference DDL/DML statements for table
target_params['diff_data'] = diff_dict
diff = self.get_sql_from_table_diff(**target_params)
ignore_sub_modules = ['column', 'constraints']
if self.manager.version < 100000:
ignore_sub_modules.append('partition')
if self.manager.server_type == 'pg' or self.manager.version < 120000:
ignore_sub_modules.append('compound_trigger')
# Iterate through all the sub modules of the table
for module in self.blueprint.submodules:
if module.NODE_TYPE not in ignore_sub_modules:
module_view = \
SchemaDiffRegistry.get_node_view(module.NODE_TYPE)
if module.NODE_TYPE == 'partition' and \
('is_partitioned' in source and source['is_partitioned'])\
and ('is_partitioned' in target and
target['is_partitioned']):
target_ddl = module_view.ddl_compare(
target_params=target_params,
parent_source_data=source,
parent_target_data=target
)
diff += '\n' + target_ddl
elif module.NODE_TYPE != 'partition':
dict1 = copy.deepcopy(source[module.NODE_TYPE])
dict2 = copy.deepcopy(target[module.NODE_TYPE])
# Find the duplicate keys in both the dictionaries
dict1_keys = set(dict1.keys())
dict2_keys = set(dict2.keys())
intersect_keys = dict1_keys.intersection(dict2_keys)
# Keys that are available in source and missing in target.
added = dict1_keys - dict2_keys
for item in added:
source_ddl = module_view.ddl_compare(
source_params=source_params,
target_params=target_params,
source=dict1[item],
target=None,
target_schema=target_schema,
comp_status='source_only'
)
diff += '\n' + source_ddl
# Keys that are available in target and missing in source.
removed = dict2_keys - dict1_keys
for item in removed:
target_ddl = module_view.ddl_compare(
source_params=source_params,
target_params=target_params,
source=None,
target=dict2[item],
target_schema=target_schema,
comp_status='target_only'
)
diff += '\n' + target_ddl
# Keys that are available in both source and target.
for key in intersect_keys:
# Recursively Compare the two dictionary
if not are_dictionaries_identical(
dict1[key], dict2[key], self.keys_to_ignore):
diff_ddl = module_view.ddl_compare(
source_params=source_params,
target_params=target_params,
source=dict1[key],
target=dict2[key],
target_schema=target_schema,
comp_status='different',
parent_source_data=source,
parent_target_data=target
)
diff += '\n' + diff_ddl
return diff

View File

@ -9,7 +9,7 @@ ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{% endif %}
{### Alter column type and collation ###}
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen is defined and data.attlen != o_data.attlen) or (data.attprecision is defined and data.attprecision != o_data.attprecision) or (data.collspcname and data.collspcname != o_data.collspcname)%}
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen is defined and data.attlen != o_data.attlen) or (data.attprecision is defined and data.attprecision != o_data.attprecision) or (data.collspcname and data.collspcname != o_data.collspcname) or data.col_type_conversion is defined %}
{% if data.col_type_conversion is defined and data.col_type_conversion == False %}
-- WARNING:
-- The SQL statement below would normally be used to alter the datatype for the {{o_data.name}} column, however,
@ -101,7 +101,7 @@ COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, o_data.name)}}
{% endif %}
{### Update column variables ###}
{% if 'attoptions' in data and data.attoptions and data.attoptions|length > 0 %}
{% if 'attoptions' in data and data.attoptions != None and data.attoptions|length > 0 %}
{% set variables = data.attoptions %}
{% if 'deleted' in variables and variables.deleted|length > 0 %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}

View File

@ -6,17 +6,18 @@
{% if data.name and data.name != o_data.name %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
RENAME {{conn|qtIdent(o_data.name)}} TO {{conn|qtIdent(data.name)}};
{% endif %}
{### Alter column type and collation ###}
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen is defined and data.attlen != o_data.attlen) or (data.attprecision is defined and data.attprecision != o_data.attprecision) or (data.collspcname and data.collspcname != o_data.collspcname) or data.col_type_conversion is defined %}
{% if data.col_type_conversion is defined and data.col_type_conversion == False %}
-- WARNING:
-- The SQL statement below would normally be used to alter the datatype for the {{o_data.name}} column, however,
-- the current datatype cannot be cast to the target datatype so this conversion cannot be made automatically.
{% endif %}
{### Alter column type and collation ###}
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen is defined and data.attlen != o_data.attlen) or (data.attprecision is defined and data.attprecision != o_data.attprecision) or (data.collspcname and data.collspcname != o_data.collspcname)%}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ALTER COLUMN {% if data.name %}{{conn|qtTypeIdent(data.name)}}{% else %}{{conn|qtTypeIdent(o_data.name)}}{% endif %} TYPE {{ GET_TYPE.UPDATE_TYPE_SQL(conn, data, o_data) }}{% if data.collspcname and data.collspcname != o_data.collspcname %}
{% if data.col_type_conversion is defined and data.col_type_conversion == False %} -- {% endif %}ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{% if data.col_type_conversion is defined and data.col_type_conversion == False %} -- {% endif %} ALTER COLUMN {% if data.name %}{{conn|qtTypeIdent(data.name)}}{% else %}{{conn|qtTypeIdent(o_data.name)}}{% endif %} TYPE {{ GET_TYPE.UPDATE_TYPE_SQL(conn, data, o_data) }}{% if data.collspcname and data.collspcname != o_data.collspcname %}
COLLATE {{data.collspcname}}{% elif o_data.collspcname %} COLLATE {{o_data.collspcname}}{% endif %};
{% endif %}
{### Alter column default value ###}

View File

@ -9,7 +9,7 @@ ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{% endif %}
{### Alter column type and collation ###}
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen is defined and data.attlen != o_data.attlen) or (data.attprecision is defined and data.attprecision != o_data.attprecision) or (data.collspcname and data.collspcname != o_data.collspcname)%}
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen is defined and data.attlen != o_data.attlen) or (data.attprecision is defined and data.attprecision != o_data.attprecision) or (data.collspcname and data.collspcname != o_data.collspcname) or data.col_type_conversion is defined %}
{% if data.col_type_conversion is defined and data.col_type_conversion == False %}
-- WARNING:
-- The SQL statement below would normally be used to alter the datatype for the XXX column, however,

View File

@ -0,0 +1,22 @@
CREATE TABLE {{conn|qtIdent(data.schema, data.name)}} (
LIKE {{conn|qtIdent(data.schema, data.orig_name)}} INCLUDING ALL
) PARTITION BY {{ data.partition_scheme }};
{{partition_sql}}
INSERT INTO {{conn|qtIdent(data.schema, data.name)}}(
{% if data.columns and data.columns|length > 0 %}
{% for c in data.columns %} {{c.name}}{% if not loop.last %},{% endif %}{% endfor %}{% endif %})
SELECT {% if data.columns and data.columns|length > 0 %}{% for c in data.columns %}{{c.name}}{% if not loop.last %},{% endif %}{% endfor %}{% endif %}
FROM {{conn|qtIdent(data.schema, data.orig_name)}};
{% if partition_data.partitions and partition_data.partitions|length > 0 %}
{% for part in partition_data.partitions %}
DROP TABLE IF EXISTS {{conn|qtIdent(data.schema, part.partition_name)}};
ALTER TABLE {{conn|qtIdent(data.schema, part.temp_partition_name)}}
RENAME TO {{conn|qtIdent(part.partition_name)}};
{% endfor %}{% endif %}
DROP TABLE {{conn|qtIdent(data.schema, data.orig_name)}};
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}}
RENAME TO {{data.orig_name}};

View File

@ -0,0 +1,24 @@
CREATE TABLE {{conn|qtIdent(data.schema, data.name)}} (
LIKE {{conn|qtIdent(data.schema, data.orig_name)}} INCLUDING ALL
) PARTITION BY {{ data.partition_scheme }};
{{partition_sql}}{{partition_data.default_partition_header}}
CREATE TABLE IF NOT EXISTS {{conn|qtIdent(data.schema, data.default_partition_name)}} PARTITION OF {{conn|qtIdent(data.schema, data.name)}} DEFAULT;
INSERT INTO {{conn|qtIdent(data.schema, data.name)}}(
{% if data.columns and data.columns|length > 0 %}
{% for c in data.columns %} {{c.name}}{% if not loop.last %},{% endif %}{% endfor %}{% endif %})
SELECT {% if data.columns and data.columns|length > 0 %}{% for c in data.columns %}{{c.name}}{% if not loop.last %},{% endif %}{% endfor %}{% endif %}
FROM {{conn|qtIdent(data.schema, data.orig_name)}};
{% if partition_data.partitions and partition_data.partitions|length > 0 %}
{% for part in partition_data.partitions %}
DROP TABLE IF EXISTS {{conn|qtIdent(data.schema, part.partition_name)}};
ALTER TABLE {{conn|qtIdent(data.schema, part.temp_partition_name)}}
RENAME TO {{conn|qtIdent(part.partition_name)}};
{% endfor %}{% endif %}
DROP TABLE {{conn|qtIdent(data.schema, data.orig_name)}};
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}}
RENAME TO {{data.orig_name}};

View File

@ -0,0 +1,22 @@
CREATE TABLE {{conn|qtIdent(data.schema, data.name)}} (
LIKE {{conn|qtIdent(data.schema, data.orig_name)}} INCLUDING ALL
) PARTITION BY {{ data.partition_scheme }};
{{partition_sql}}
INSERT INTO {{conn|qtIdent(data.schema, data.name)}}(
{% if data.columns and data.columns|length > 0 %}
{% for c in data.columns %} {{c.name}}{% if not loop.last %},{% endif %}{% endfor %}{% endif %})
SELECT {% if data.columns and data.columns|length > 0 %}{% for c in data.columns %}{{c.name}}{% if not loop.last %},{% endif %}{% endfor %}{% endif %}
FROM {{conn|qtIdent(data.schema, data.orig_name)}};
{% if partition_data.partitions and partition_data.partitions|length > 0 %}
{% for part in partition_data.partitions %}
DROP TABLE IF EXISTS {{conn|qtIdent(data.schema, part.partition_name)}};
ALTER TABLE {{conn|qtIdent(data.schema, part.temp_partition_name)}}
RENAME TO {{conn|qtIdent(part.partition_name)}};
{% endfor %}{% endif %}
DROP TABLE {{conn|qtIdent(data.schema, data.orig_name)}};
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}}
RENAME TO {{data.orig_name}};

View File

@ -0,0 +1,24 @@
CREATE TABLE {{conn|qtIdent(data.schema, data.name)}} (
LIKE {{conn|qtIdent(data.schema, data.orig_name)}} INCLUDING ALL
) PARTITION BY {{ data.partition_scheme }};
{{partition_sql}}{{partition_data.default_partition_header}}
CREATE TABLE IF NOT EXISTS {{conn|qtIdent(data.schema, data.default_partition_name)}} PARTITION OF {{conn|qtIdent(data.schema, data.name)}} DEFAULT;
INSERT INTO {{conn|qtIdent(data.schema, data.name)}}(
{% if data.columns and data.columns|length > 0 %}
{% for c in data.columns %} {{c.name}}{% if not loop.last %},{% endif %}{% endfor %}{% endif %})
SELECT {% if data.columns and data.columns|length > 0 %}{% for c in data.columns %}{{c.name}}{% if not loop.last %},{% endif %}{% endfor %}{% endif %}
FROM {{conn|qtIdent(data.schema, data.orig_name)}};
{% if partition_data.partitions and partition_data.partitions|length > 0 %}
{% for part in partition_data.partitions %}
DROP TABLE IF EXISTS {{conn|qtIdent(data.schema, part.partition_name)}};
ALTER TABLE {{conn|qtIdent(data.schema, part.temp_partition_name)}}
RENAME TO {{conn|qtIdent(part.partition_name)}};
{% endfor %}{% endif %}
DROP TABLE {{conn|qtIdent(data.schema, data.orig_name)}};
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}}
RENAME TO {{data.orig_name}};

View File

@ -1,14 +0,0 @@
INSERT INTO {{conn|qtIdent(data.schema, data.name)}}(
{% if data.columns and data.columns|length > 0 %}
{% for c in data.columns %}{{c.name}}{% if not loop.last %},{% endif %}{% endfor %}{% endif %})
SELECT {% if data.columns and data.columns|length > 0 %}{% for c in data.columns %}{{c.name}}{% if not loop.last %},{% endif %}{% endfor %}{% endif %}
FROM {{conn|qtIdent(data.schema, data.orig_name)}};
DROP TABLE {{conn|qtIdent(data.schema, data.orig_name)}};
{{partition_sql}}
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}}
RENAME TO {{conn|qtIdent(data.orig_name)}};

View File

@ -56,7 +56,7 @@ ALTER TABLE {{conn|qtIdent(data.schema, data.name)}}
{% if data.fillfactor and data.fillfactor != o_data.fillfactor %}
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}}
SET (FILLFACTOR={{data.fillfactor}});
{% elif data.fillfactor == '' and data.fillfactor != o_data.fillfactor %}
{% elif (data.fillfactor == '' or data.fillfactor == None) and data.fillfactor != o_data.fillfactor %}
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}}
RESET (FILLFACTOR);

View File

@ -64,7 +64,7 @@ ALTER TABLE {{conn|qtIdent(data.schema, data.name)}}
{% if data.fillfactor and data.fillfactor != o_data.fillfactor %}
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}}
SET (FILLFACTOR={{data.fillfactor}});
{% elif data.fillfactor == '' and data.fillfactor != o_data.fillfactor %}
{% elif (data.fillfactor == '' or data.fillfactor == None) and data.fillfactor != o_data.fillfactor %}
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}}
RESET (FILLFACTOR);

View File

@ -3,9 +3,13 @@ ALTER TRIGGER {{ conn|qtIdent(o_data.name) }} ON {{ conn|qtIdent(o_data.nspname,
RENAME TO {{ conn|qtIdent(data.name) }};
{% endif %}
{% if ((data.prosrc is defined or data.is_row_trigger is defined or data.evnt_insert is defined or data.evnt_delete is defined or data.evnt_update is defined or data.fires is defined) and o_data.lanname == 'edbspl' and (o_data.prosrc != data.prosrc or data.is_row_trigger != o_data.is_row_trigger or data.evnt_insert != o_data.evnt_insert or data.evnt_delete != o_data.evnt_delete or data.evnt_update != o_data.evnt_update or o_data.fires != data.fires)) %}
{% if ((data.prosrc is defined or data.is_row_trigger is defined or data.evnt_insert is defined or data.evnt_delete is defined or data.evnt_update is defined or data.fires is defined or data.is_constraint_trigger is defined) and (o_data.prosrc != data.prosrc or data.is_row_trigger != o_data.is_row_trigger or data.evnt_insert != o_data.evnt_insert or data.evnt_delete != o_data.evnt_delete or data.evnt_update != o_data.evnt_update or o_data.fires != data.fires or data.is_constraint_trigger != o_data.is_constraint_trigger)) %}
{% set or_flag = False %}
{% if data.lanname == 'edbspl' or data.tfunction == 'Inline EDB-SPL' %}
CREATE OR REPLACE TRIGGER {{ conn|qtIdent(data.name) }}
{% else %}
CREATE{% if data.is_constraint_trigger %} CONSTRAINT{% endif %} TRIGGER {{ conn|qtIdent(data.name) }}
{% endif %}
{% if data.fires is defined %}{{data.fires}} {% else %}{{o_data.fires}} {% endif %}{% if data.evnt_insert is not defined %}{% if o_data.evnt_insert %}INSERT{% set or_flag = True %}
{% endif %}{% else %}{% if data.evnt_insert %}INSERT{% set or_flag = True %}{% endif %}{% endif %}{% if data.evnt_delete is not defined %}{% if o_data.evnt_delete %}
{% if or_flag %} OR {% endif %}DELETE{% set or_flag = True %}
@ -16,23 +20,29 @@ CREATE OR REPLACE TRIGGER {{ conn|qtIdent(data.name) }}
{% if or_flag %} OR {% endif %}TRUNCATE{% set or_flag = True %}{%endif %}{% endif %}{% if data.evnt_update is not defined %}{% if o_data.evnt_update %}
{% if or_flag %} OR {% endif %}UPDATE {% if o_data.columns|length > 0 %}OF {% for c in o_data.columns %}{% if loop.index != 1 %}, {% endif %}{{ conn|qtIdent(c) }}{% endfor %}{% endif %}
{% endif %}{% else %}{% if data.evnt_update %}
{% if or_flag %} OR {% endif %}UPDATE {% if o_data.columns|length > 0 %}OF {% for c in o_data.columns %}{% if loop.index != 1 %}, {% endif %}{{ conn|qtIdent(c) }}{% endfor %}{% endif %}{% endif %}
{% if or_flag %} OR {% endif %}UPDATE {% if data.columns|length > 0 %}OF {% for c in data.columns %}{% if loop.index != 1 %}, {% endif %}{{ conn|qtIdent(c) }}{% endfor %}{% endif %}{% endif %}
{% endif %}
ON {{ conn|qtIdent(data.schema, data.table) }}
{% if o_data.tgdeferrable %}
{% if data.tgdeferrable %}
DEFERRABLE{% if data.tginitdeferred %} INITIALLY DEFERRED{% endif %}
{% elif o_data.tgdeferrable %}
DEFERRABLE{% if o_data.tginitdeferred %} INITIALLY DEFERRED{% endif %}
{% endif %}{% if data.is_row_trigger is not defined %}
FOR EACH{% if o_data.is_row_trigger %} ROW{% else %} STATEMENT{% endif %} {% else %}
FOR EACH{% if data.is_row_trigger %} ROW{% else %} STATEMENT{% endif %} {% endif %}
{% if o_data.whenclause %}
{% if data.whenclause %}
WHEN {{ data.whenclause }}
{% elif o_data.whenclause %}
WHEN {{ o_data.whenclause }}
{% endif %}
{% if (data.prosrc is not defined) %}
{{ o_data.prosrc }};
{% if (data.tfunction is defined) %}
EXECUTE PROCEDURE {{ data.tfunction }}{% if data.tgargs %}({{ data.tgargs }}){% else %}(){% endif%};
{% else %}
{{ data.prosrc }};
EXECUTE PROCEDURE {{ o_data.tfunction }}{% if o_data.tgargs %}({{ o_data.tgargs }}){% else %}(){% endif%};
{% endif %}
{% if data.description is not defined and o_data.description %}

View File

@ -3,9 +3,13 @@ ALTER TRIGGER {{ conn|qtIdent(o_data.name) }} ON {{ conn|qtIdent(o_data.nspname,
RENAME TO {{ conn|qtIdent(data.name) }};
{% endif %}
{% if ((data.prosrc is defined or data.is_row_trigger is defined or data.evnt_insert is defined or data.evnt_delete is defined or data.evnt_update is defined or data.fires is defined) and o_data.lanname == 'edbspl' and (o_data.prosrc != data.prosrc or data.is_row_trigger != o_data.is_row_trigger or data.evnt_insert != o_data.evnt_insert or data.evnt_delete != o_data.evnt_delete or data.evnt_update != o_data.evnt_update or o_data.fires != data.fires)) %}
{% if ((data.prosrc is defined or data.is_row_trigger is defined or data.evnt_insert is defined or data.evnt_delete is defined or data.evnt_update is defined or data.fires is defined or data.is_constraint_trigger is defined) and (o_data.prosrc != data.prosrc or data.is_row_trigger != o_data.is_row_trigger or data.evnt_insert != o_data.evnt_insert or data.evnt_delete != o_data.evnt_delete or data.evnt_update != o_data.evnt_update or o_data.fires != data.fires or data.is_constraint_trigger != o_data.is_constraint_trigger)) %}
{% set or_flag = False %}
{% if data.lanname == 'edbspl' or data.tfunction == 'Inline EDB-SPL' %}
CREATE OR REPLACE TRIGGER {{ conn|qtIdent(data.name) }}
{% else %}
CREATE{% if data.is_constraint_trigger %} CONSTRAINT{% endif %} TRIGGER {{ conn|qtIdent(data.name) }}
{% endif %}
{% if data.fires is defined %}{{data.fires}} {% else %}{{o_data.fires}} {% endif %}{% if data.evnt_insert is not defined %}{% if o_data.evnt_insert %}INSERT{% set or_flag = True %}
{% endif %}{% else %}{% if data.evnt_insert %}INSERT{% set or_flag = True %}{% endif %}{% endif %}{% if data.evnt_delete is not defined %}{% if o_data.evnt_delete %}
{% if or_flag %} OR {% endif %}DELETE{% set or_flag = True %}
@ -16,24 +20,39 @@ CREATE OR REPLACE TRIGGER {{ conn|qtIdent(data.name) }}
{% if or_flag %} OR {% endif %}TRUNCATE{% set or_flag = True %}{%endif %}{% endif %}{% if data.evnt_update is not defined %}{% if o_data.evnt_update %}
{% if or_flag %} OR {% endif %}UPDATE {% if o_data.columns|length > 0 %}OF {% for c in o_data.columns %}{% if loop.index != 1 %}, {% endif %}{{ conn|qtIdent(c) }}{% endfor %}{% endif %}
{% endif %}{% else %}{% if data.evnt_update %}
{% if or_flag %} OR {% endif %}UPDATE {% if o_data.columns|length > 0 %}OF {% for c in o_data.columns %}{% if loop.index != 1 %}, {% endif %}{{ conn|qtIdent(c) }}{% endfor %}{% endif %}{% endif %}
{% if or_flag %} OR {% endif %}UPDATE {% if data.columns|length > 0 %}OF {% for c in data.columns %}{% if loop.index != 1 %}, {% endif %}{{ conn|qtIdent(c) }}{% endfor %}{% endif %}{% endif %}
{% endif %}
ON {{ conn|qtIdent(data.schema, data.table) }}
{% if o_data.tgdeferrable %}
{% if data.tgdeferrable %}
DEFERRABLE{% if data.tginitdeferred %} INITIALLY DEFERRED{% endif %}
{% elif o_data.tgdeferrable %}
DEFERRABLE{% if o_data.tginitdeferred %} INITIALLY DEFERRED{% endif %}
{% endif %}{% if data.is_row_trigger is not defined %}
FOR EACH{% if o_data.is_row_trigger %} ROW{% else %} STATEMENT{% endif %} {% else %}
FOR EACH{% if data.is_row_trigger %} ROW{% else %} STATEMENT{% endif %} {% endif %}
{% if o_data.whenclause %}
{% if data.whenclause %}
WHEN {{ data.whenclause }}
{% elif o_data.whenclause %}
WHEN {{ o_data.whenclause }}
{% endif %}
{%if data.tfunction == 'Inline EDB-SPL' %}
{% if (data.prosrc is not defined) %}
{{ o_data.prosrc }};
{% else %}
{{ data.prosrc }};
{% endif %}
{% else %}
{% if (data.tfunction is defined) %}
EXECUTE PROCEDURE {{ data.tfunction }}{% if data.tgargs %}({{ data.tgargs }}){% else %}(){% endif%};
{% else %}
EXECUTE PROCEDURE {{ o_data.tfunction }}{% if o_data.tgargs %}({{ o_data.tgargs }}){% else %}(){% endif%};
{% endif %}
{% endif %}
{% if data.description is not defined and o_data.description %}
COMMENT ON TRIGGER {{ conn|qtIdent(data.name) }} ON {{ conn|qtIdent(o_data.nspname, o_data.relname) }}

View File

@ -29,6 +29,8 @@ from pgadmin.utils.compile_template_name import compile_template_path
from pgadmin.utils import IS_PY2
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
from pgadmin.tools.schema_diff.directory_compare import directory_diff,\
parce_acl
# If we are in Python3
if not IS_PY2:
@ -794,7 +796,7 @@ class TriggerView(PGChildNodeView, SchemaDiffObjectCompare):
SQL, name = trigger_utils.get_sql(
self.conn, data, tid, oid,
self.datlastsysoid,
self.blueprint.show_system_objects)
self.blueprint.show_system_objects, True)
if not isinstance(SQL, (str, unicode)):
return SQL
@ -932,8 +934,7 @@ class TriggerView(PGChildNodeView, SchemaDiffObjectCompare):
)
@check_precondition
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None,
ignore_keys=False):
def fetch_objects_to_compare(self, sid, did, scid, tid, oid=None):
"""
This function will fetch the list of all the triggers for
specified schema id.
@ -963,14 +964,59 @@ class TriggerView(PGChildNodeView, SchemaDiffObjectCompare):
for row in triggers['rows']:
status, data = self._fetch_properties(tid, row['oid'])
if status:
if ignore_keys:
for key in self.keys_to_ignore:
if key in data:
del data[key]
res[row['name']] = data
return res
def ddl_compare(self, **kwargs):
"""
This function returns the DDL/DML statements based on the
comparison status.
:param kwargs:
:return:
"""
src_params = kwargs.get('source_params')
tgt_params = kwargs.get('target_params')
source = kwargs.get('source')
target = kwargs.get('target')
target_schema = kwargs.get('target_schema')
comp_status = kwargs.get('comp_status')
diff = ''
if comp_status == 'source_only':
diff = self.get_sql_from_diff(gid=src_params['gid'],
sid=src_params['sid'],
did=src_params['did'],
scid=src_params['scid'],
tid=src_params['tid'],
oid=source['oid'],
diff_schema=target_schema)
elif comp_status == 'target_only':
diff = self.get_sql_from_diff(gid=tgt_params['gid'],
sid=tgt_params['sid'],
did=tgt_params['did'],
scid=tgt_params['scid'],
tid=tgt_params['tid'],
oid=target['oid'],
drop_sql=True)
elif comp_status == 'different':
diff_dict = directory_diff(
source, target,
ignore_keys=self.keys_to_ignore, difference={}
)
diff_dict.update(parce_acl(source, target))
diff = self.get_sql_from_diff(gid=tgt_params['gid'],
sid=tgt_params['sid'],
did=tgt_params['did'],
scid=tgt_params['scid'],
tid=tgt_params['tid'],
oid=target['oid'],
data=diff_dict)
return diff
SchemaDiffRegistry(blueprint.node_type, TriggerView, 'table')
TriggerView.register_node_view(blueprint)

View File

@ -141,7 +141,7 @@ def get_trigger_function_and_columns(conn, data, tid,
@get_template_path
def get_sql(conn, data, tid, trid, datlastsysoid,
show_system_objects, template_path=None):
show_system_objects, is_schema_diff=False, template_path=None):
"""
This function will generate sql from model data.
@ -151,6 +151,7 @@ def get_sql(conn, data, tid, trid, datlastsysoid,
:param trid: Trigger ID
:param datlastsysoid:
:param show_system_objects: Show System Object value True or False
:param is_schema_diff:
:param template_path: Optional template path
:return:
"""
@ -173,6 +174,35 @@ def get_sql(conn, data, tid, trid, datlastsysoid,
if 'name' not in data:
name = data['name'] = old_data['name']
drop_sql = ''
if is_schema_diff:
if 'table' not in data:
data['table'] = old_data['relname']
if 'schema' not in data:
data['schema'] = old_data['nspname']
# If any of the below key is present in data then we need to drop
# trigger and re-create it.
key_array = ['prosrc', 'is_row_trigger', 'evnt_insert',
'evnt_delete', 'evnt_update', 'fires', 'tgdeferrable',
'whenclause', 'tfunction', 'tgargs', 'columns',
'is_constraint_trigger', 'tginitdeferred']
is_drop_trigger = False
for key in key_array:
if key in data:
is_drop_trigger = True
break
if is_drop_trigger:
tmp_data = dict()
tmp_data['name'] = data['name']
tmp_data['nspname'] = old_data['nspname']
tmp_data['relname'] = old_data['relname']
drop_sql = render_template("/".join([template_path,
'delete.sql']),
data=tmp_data, conn=conn)
old_data = get_trigger_function_and_columns(
conn, old_data, tid, show_system_objects)
@ -182,6 +212,9 @@ def get_sql(conn, data, tid, trid, datlastsysoid,
"/".join([template_path, 'update.sql']),
data=data, o_data=old_data, conn=conn
)
if is_schema_diff:
SQL = drop_sql + '\n' + SQL
else:
required_args = {
'name': 'Name',

View File

@ -588,6 +588,10 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
if not json_resp:
display_comments = False
res_data = parse_rule_definition(res)
# Update the correct table name for rules
if 'view' in res_data:
res_data['view'] = table
rules_sql += render_template("/".join(
[self.rules_template_path, 'create.sql']),
data=res_data, display_comments=display_comments)
@ -1298,11 +1302,12 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
status=200
)
def get_partitions_sql(self, partitions):
def get_partitions_sql(self, partitions, schema_diff=False):
"""
This function will iterate all the partitions and create SQL.
:param partitions: List of partitions
:param schema_diff: If true then create sql accordingly.
"""
sql = ''
@ -1370,12 +1375,17 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
data=part_data, conn=self.conn
)
else:
# For schema diff we create temporary partitions to copy the
# data from original table to temporary table.
if schema_diff:
part_data['name'] = row['temp_partition_name']
partition_sql = render_template(
"/".join([self.partition_template_path, 'create.sql']),
data=part_data, conn=self.conn
)
sql += partition_sql + '\n'
sql += partition_sql
return sql

View File

@ -30,9 +30,9 @@ CREATE MATERIALIZED VIEW {{ conn|qtIdent(view_schema, view_name) }}
{% if data.fillfactor or o_data.fillfactor %}
WITH(
{% if data.fillfactor %}
FILLFACTOR = {{ data.fillfactor }}{% if (data['vacuum_data'] is defined and data['vacuum_data']['changed']|length > 0) or (o_data['vacuum_data'] is defined and o_data['vacuum_data']['changed']|length > 0) %},{% endif %}
FILLFACTOR = {{ data.fillfactor }}{% if (data['vacuum_data'] is defined and data['vacuum_data']['changed']|length > 0) %},{% endif %}
{% elif o_data.fillfactor %}
FILLFACTOR = {{ o_data.fillfactor }}{% if (data['vacuum_data'] is defined and data['vacuum_data']['changed']|length > 0) or (o_data['vacuum_data'] is defined and o_data['vacuum_data']['changed']|length > 0) %},{% endif %}
FILLFACTOR = {{ o_data.fillfactor }}{% if (data['vacuum_data'] is defined and data['vacuum_data']['changed']|length > 0) %},{% endif %}
{% endif %}
{% if data['vacuum_data']['changed']|length > 0 %}

View File

@ -30,12 +30,13 @@ CREATE MATERIALIZED VIEW {{ conn|qtIdent(view_schema, view_name) }}
{% if data.fillfactor or o_data.fillfactor %}
WITH(
{% if data.fillfactor %}
FILLFACTOR = {{ data.fillfactor }}{% if (data['vacuum_data'] is defined and data['vacuum_data']['changed']|length > 0) or (o_data['vacuum_data'] is defined and o_data['vacuum_data']['changed']|length > 0) %},{% endif %}
FILLFACTOR = {{ data.fillfactor }}{% if (data['vacuum_data'] is defined and data['vacuum_data']['changed']|length > 0) %},{% endif %}
{% elif o_data.fillfactor %}
FILLFACTOR = {{ o_data.fillfactor }}{% if (data['vacuum_data'] is defined and data['vacuum_data']['changed']|length > 0) or (o_data['vacuum_data'] is defined and o_data['vacuum_data']['changed']|length > 0) %},{% endif %}
FILLFACTOR = {{ o_data.fillfactor }}{% if (data['vacuum_data'] is defined and data['vacuum_data']['changed']|length > 0) %},{% endif %}
{% endif %}
{% if data['vacuum_data']['changed']|length > 0 %}
{% for field in data['vacuum_data']['changed'] %} {{ field.name }} = {{ field.value|lower }}{% if not loop.last %},{% endif %}{{ '\n' }}
{% for field in data['vacuum_data']['changed'] %} {{ field.name }} = {{ field.value|lower }}{% if not loop.last %},{{ '\n' }}{% endif %}
{% endfor %}
{% endif %}
)
@ -44,7 +45,6 @@ FILLFACTOR = {{ o_data.fillfactor }}{% if (data['vacuum_data'] is defined and da
{{ def }}
{% if data.with_data is defined %}
WITH {{ 'DATA' if data.with_data else 'NO DATA' }};
{% elif o_data.with_data is defined %}
WITH {{ 'DATA' if o_data.with_data else 'NO DATA' }};
@ -128,8 +128,7 @@ ALTER MATERIALIZED VIEW {{ conn|qtIdent(view_schema, view_name) }} RESET(
{% if('vacuum_table' in data and data['vacuum_table']['changed']|length > 0) %}
ALTER MATERIALIZED VIEW {{ conn|qtIdent(data.schema, data.name) }} SET(
{% for field in data['vacuum_table']['changed'] %}
{% if field.value != None %} {{ field.name }} = {{ field.value|lower }}{% if not loop.last %},{% endif %}
{% endif %}
{% if field.value != None %} {{ field.name }} = {{ field.value|lower }}{% if not loop.last %},{% endif %}{% endif %}
{% endfor %}
);

View File

@ -19,7 +19,9 @@ ALTER TABLE {{ conn|qtIdent(view_schema, view_name) }}
{% endif %}
{% if def and def != o_data.definition.rstrip(';') %}
CREATE OR REPLACE VIEW {{ conn|qtIdent(view_schema, view_name) }}
{% if ((data.check_option and data.check_option.lower() != 'no') or data.security_barrier) %}
WITH ({% if (data.check_option or o_data.check_option) %}check_option={{ data.check_option if data.check_option else o_data.check_option }}{{', ' }}{% endif %}security_barrier={{ data.security_barrier|lower if data.security_barrier is defined else o_data.security_barrier|default('false', 'true')|lower }})
{% endif %}
AS
{{ def }};
{% else %}

View File

@ -19,7 +19,9 @@ ALTER TABLE {{ conn|qtIdent(view_schema, view_name) }}
{% endif %}
{% if def and def != o_data.definition.rstrip(';') %}
CREATE OR REPLACE VIEW {{ conn|qtIdent(view_schema, view_name) }}
{% if ((data.check_option and data.check_option.lower() != 'no') or data.security_barrier) %}
WITH ({% if (data.check_option or o_data.check_option) %}check_option={{ data.check_option if data.check_option else o_data.check_option }}{{', ' }}{% endif %}security_barrier={{ data.security_barrier|lower if data.security_barrier else o_data.security_barrier|default('false', 'true')|lower }})
{% endif %}
AS
{{ def }};
{% else %}

View File

@ -1 +1,5 @@
ALTER VIEW public."testview_$%{}[]()&*^!@""'`\/#"
SET (security_barrier=true);
ALTER VIEW public."testview_$%{}[]()&*^!@""'`\/#"
SET (check_option=cascaded);
GRANT SELECT ON TABLE public."testview_$%{}[]()&*^!@""'`\/#" TO PUBLIC;

View File

@ -3,10 +3,7 @@
-- DROP VIEW public."testview_$%{}[]()&*^!@""'`\/#";
CREATE OR REPLACE VIEW public."testview_$%{}[]()&*^!@""'`\/#"
WITH (
check_option=cascaded,
security_barrier=true
) AS
AS
SELECT test_view_table.col1
FROM test_view_table;

View File

@ -1,4 +1,3 @@
CREATE OR REPLACE VIEW public."testview_$%{}[]()&*^!@""'`\/#"
WITH (check_option=cascaded, security_barrier=true)
AS
SELECT * FROM test_view_table;

View File

@ -1 +1,5 @@
ALTER VIEW public."testview_$%{}[]()&*^!@""'`\/#"
SET (security_barrier=true);
ALTER VIEW public."testview_$%{}[]()&*^!@""'`\/#"
SET (check_option=cascaded);
GRANT SELECT ON TABLE public."testview_$%{}[]()&*^!@""'`\/#" TO PUBLIC;

View File

@ -3,10 +3,7 @@
-- DROP VIEW public."testview_$%{}[]()&*^!@""'`\/#";
CREATE OR REPLACE VIEW public."testview_$%{}[]()&*^!@""'`\/#"
WITH (
check_option=cascaded,
security_barrier=true
) AS
AS
SELECT test_view_table.col1
FROM test_view_table;

View File

@ -1,4 +1,3 @@
CREATE OR REPLACE VIEW public."testview_$%{}[]()&*^!@""'`\/#"
WITH (check_option=cascaded, security_barrier=true)
AS
SELECT * FROM test_view_table;

View File

@ -68,7 +68,6 @@ class SchemaDiffModule(PgAdminModule):
'schema_diff.connect_server',
'schema_diff.connect_database',
'schema_diff.get_server',
'schema_diff.generate_script',
'schema_diff.close'
]
@ -452,7 +451,7 @@ def compare(trans_id, source_sid, source_did, source_scid,
for node_name, node_view in all_registered_nodes.items():
view = SchemaDiffRegistry.get_node_view(node_name)
if hasattr(view, 'compare'):
msg = "Comparing " + view.blueprint.COLLECTION_LABEL + " ..."
msg = "Comparing " + view.blueprint.COLLECTION_LABEL
diff_model_obj.set_comparison_info(msg, total_percent)
# Update the message and total percentage in session object
update_session_diff_transaction(trans_id, session_obj,
@ -510,59 +509,6 @@ def poll(trans_id):
'diff_percentage': diff_percentage})
@blueprint.route(
'/generate_script/<int:trans_id>/',
methods=["POST"],
endpoint="generate_script"
)
def generate_script(trans_id):
"""This function will generate the scripts for the selected objects."""
data = request.form if request.form else json.loads(
request.data, encoding='utf-8'
)
status, error_msg, diff_model_obj, session_obj = \
check_transaction_status(trans_id)
if error_msg == gettext('Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, status=404)
source_sid = int(data['source_sid'])
source_did = int(data['source_did'])
source_scid = int(data['source_scid'])
target_sid = int(data['target_sid'])
target_did = int(data['target_did'])
target_scid = int(data['target_scid'])
diff_ddl = ''
for d in data['sel_rows']:
node_type = d['node_type']
source_oid = int(d['source_oid'])
target_oid = int(d['target_oid'])
comp_status = d['comp_status']
view = SchemaDiffRegistry.get_node_view(node_type)
if view and hasattr(view, 'ddl_compare') and \
comp_status != SchemaDiffModel.COMPARISON_STATUS['identical']:
sql = view.ddl_compare(source_sid=source_sid,
source_did=source_did,
source_scid=source_scid,
target_sid=target_sid,
target_did=target_did,
target_scid=target_scid,
source_oid=source_oid,
target_oid=target_oid,
comp_status=comp_status,
generate_script=True)
diff_ddl += sql['diff_ddl'] + '\n\n'
return ajax_response(
status=200,
response={'diff_ddl': diff_ddl}
)
@blueprint.route(
'/ddl_compare/<int:trans_id>/<int:source_sid>/<int:source_did>/'
'<int:source_scid>/<int:target_sid>/<int:target_did>/<int:target_scid>/'
@ -620,7 +566,11 @@ def check_version_compatibility(sid, tid):
tar_server = Server.query.filter_by(id=tid).first()
tar_manager = driver.connection_manager(tar_server.id)
tar_conn = tar_manager.connection()
if src_manager.server_type != tar_manager.server_type:
return False, gettext('Schema diff does not support the comparison '
'between Postgres Server and EDB Postgres '
'Advanced Server.')
if not (src_conn.connected() or src_conn.connected()):
return False, gettext('Server(s) disconnected.')

View File

@ -15,8 +15,7 @@ from flask import render_template
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.utils.ajax import internal_server_error
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries,\
directory_diff
from pgadmin.tools.schema_diff.directory_compare import compare_dictionaries
from pgadmin.tools.schema_diff.model import SchemaDiffModel
@ -68,10 +67,12 @@ class SchemaDiffObjectCompare:
'scid': kwargs.get('target_scid')
}
if 'source_tid' in kwargs:
source_params['tid'] = kwargs['source_tid']
if 'target_tid' in kwargs:
target_params['tid'] = kwargs['target_tid']
status, target_schema = self.get_schema(kwargs.get('target_sid'),
kwargs.get('target_did'),
kwargs.get('target_scid')
)
if not status:
return internal_server_error(errormsg=target_schema)
source = self.fetch_objects_to_compare(**source_params)
@ -82,7 +83,8 @@ class SchemaDiffObjectCompare:
len(source) <= 0 and len(target) <= 0):
return None
return compare_dictionaries(source, target,
return compare_dictionaries(self, source_params, target_params,
target_schema, source, target,
self.node_type,
self.blueprint.COLLECTION_LABEL,
self.keys_to_ignore)
@ -93,13 +95,6 @@ class SchemaDiffObjectCompare:
return the difference of SQL
"""
source = ''
target = ''
diff = ''
comp_status = kwargs.get('comp_status')
only_diff = False
generate_script = False
source_params = {'gid': 1,
'sid': kwargs.get('source_sid'),
'did': kwargs.get('source_did'),
@ -114,98 +109,10 @@ class SchemaDiffObjectCompare:
'oid': kwargs.get('target_oid')
}
if 'source_tid' in kwargs:
source_params['tid'] = kwargs['source_tid']
only_diff = True
if 'target_tid' in kwargs:
target_params['tid'] = kwargs['target_tid']
only_diff = True
if 'generate_script' in kwargs and kwargs['generate_script']:
generate_script = True
source_params_adv = copy.deepcopy(source_params)
target_params_adv = copy.deepcopy(target_params)
del source_params_adv['gid']
del target_params_adv['gid']
status, target_schema = self.get_schema(kwargs.get('target_sid'),
kwargs.get('target_did'),
kwargs.get('target_scid')
)
if not status:
return internal_server_error(errormsg=target_schema)
if comp_status == SchemaDiffModel.COMPARISON_STATUS['source_only']:
if not generate_script:
source = self.get_sql_from_diff(**source_params)
source_params.update({
'diff_schema': target_schema
})
diff = self.get_sql_from_diff(**source_params)
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['target_only']:
if not generate_script:
target = self.get_sql_from_diff(**target_params)
target_params.update(
{'drop_sql': True})
diff = self.get_sql_from_diff(**target_params)
elif comp_status == SchemaDiffModel.COMPARISON_STATUS['different']:
source = self.fetch_objects_to_compare(**source_params_adv)
target = self.fetch_objects_to_compare(**target_params_adv)
if not (source or target):
return None
diff_dict = directory_diff(source,
target,
ignore_keys=self.keys_to_ignore,
difference={}
)
diff_dict.update(self.parce_acl(source, target))
if not generate_script:
source = self.get_sql_from_diff(**source_params)
target = self.get_sql_from_diff(**target_params)
target_params.update(
{'data': diff_dict})
diff = self.get_sql_from_diff(**target_params)
else:
source = self.get_sql_from_diff(**source_params)
target = self.get_sql_from_diff(**target_params)
if only_diff:
return diff
source = self.get_sql_from_diff(**source_params)
target = self.get_sql_from_diff(**target_params)
return {'source_ddl': source,
'target_ddl': target,
'diff_ddl': diff
'diff_ddl': ''
}
@staticmethod
def parce_acl(source, target):
key = 'acl'
if 'datacl' in source:
key = 'datacl'
elif 'relacl' in source:
key = 'relacl'
tmp_source = source[key] if\
key in source and source[key] is not None else []
tmp_target = copy.deepcopy(target[key]) if\
key in target and target[key] is not None else []
diff = {'added': [], 'deleted': []}
for acl in tmp_source:
if acl in tmp_target:
tmp_target.remove(acl)
elif acl not in tmp_target:
diff['added'].append(acl)
diff['deleted'] = tmp_target
return {key: diff}

View File

@ -15,11 +15,17 @@ from pgadmin.tools.schema_diff.model import SchemaDiffModel
count = 1
def compare_dictionaries(source_dict, target_dict, node, node_label,
def compare_dictionaries(view_object, source_params, target_params,
target_schema, source_dict, target_dict, node,
node_label,
ignore_keys=None):
"""
This function will compare the two dictionaries.
:param view_object: View Object
:param source_params: Source Parameters
:param target_params: Target Parameters
:param target_schema: Target Schema Name
:param source_dict: First Dictionary
:param target_dict: Second Dictionary
:param node: node type
@ -36,18 +42,43 @@ def compare_dictionaries(source_dict, target_dict, node, node_label,
dict2_keys = set(dict2.keys())
intersect_keys = dict1_keys.intersection(dict2_keys)
# Add gid to the params
source_params['gid'] = target_params['gid'] = 1
# Keys that are available in source and missing in target.
source_only = []
added = dict1_keys - dict2_keys
global count
for item in added:
if node == 'table':
temp_src_params = copy.deepcopy(source_params)
temp_src_params['tid'] = source_dict[item]['oid']
temp_src_params['json_resp'] = False
source_ddl = \
view_object.get_sql_from_table_diff(**temp_src_params)
temp_src_params.update({
'diff_schema': target_schema
})
diff_ddl = view_object.get_sql_from_table_diff(**temp_src_params)
else:
temp_src_params = copy.deepcopy(source_params)
temp_src_params['oid'] = source_dict[item]['oid']
source_ddl = view_object.get_sql_from_diff(**temp_src_params)
temp_src_params.update({
'diff_schema': target_schema
})
diff_ddl = view_object.get_sql_from_diff(**temp_src_params)
source_only.append({
'id': count,
'type': node,
'label': node_label,
'title': item,
'oid': source_dict[item]['oid'],
'status': SchemaDiffModel.COMPARISON_STATUS['source_only']
'status': SchemaDiffModel.COMPARISON_STATUS['source_only'],
'source_ddl': source_ddl,
'target_ddl': '',
'diff_ddl': diff_ddl
})
count += 1
@ -55,13 +86,34 @@ def compare_dictionaries(source_dict, target_dict, node, node_label,
# Keys that are available in target and missing in source.
removed = dict2_keys - dict1_keys
for item in removed:
if node == 'table':
temp_tgt_params = copy.deepcopy(target_params)
temp_tgt_params['tid'] = target_dict[item]['oid']
temp_tgt_params['json_resp'] = False
target_ddl = view_object.get_sql_from_table_diff(**temp_tgt_params)
if 'gid' in temp_tgt_params:
del temp_tgt_params['gid']
if 'json_resp' in temp_tgt_params:
del temp_tgt_params['json_resp']
diff_ddl = view_object.get_drop_sql(**temp_tgt_params)
else:
temp_tgt_params = copy.deepcopy(target_params)
temp_tgt_params['oid'] = target_dict[item]['oid']
target_ddl = view_object.get_sql_from_diff(**temp_tgt_params)
temp_tgt_params.update(
{'drop_sql': True})
diff_ddl = view_object.get_sql_from_diff(**temp_tgt_params)
target_only.append({
'id': count,
'type': node,
'label': node_label,
'title': item,
'oid': target_dict[item]['oid'],
'status': SchemaDiffModel.COMPARISON_STATUS['target_only']
'status': SchemaDiffModel.COMPARISON_STATUS['target_only'],
'source_ddl': '',
'target_ddl': target_ddl,
'diff_ddl': diff_ddl
})
count += 1
@ -69,13 +121,6 @@ def compare_dictionaries(source_dict, target_dict, node, node_label,
identical = []
different = []
for key in intersect_keys:
# ignore the keys if available.
for ig_key in ignore_keys:
if ig_key in dict1[key]:
dict1[key].pop(ig_key)
if ig_key in dict2[key]:
dict2[key].pop(ig_key)
# Recursively Compare the two dictionary
if are_dictionaries_identical(dict1[key], dict2[key], ignore_keys):
identical.append({
@ -89,6 +134,50 @@ def compare_dictionaries(source_dict, target_dict, node, node_label,
'status': SchemaDiffModel.COMPARISON_STATUS['identical']
})
else:
if node == 'table':
temp_src_params = copy.deepcopy(source_params)
temp_tgt_params = copy.deepcopy(target_params)
# Add submodules into the ignore keys so that directory
# difference won't include those in added, deleted and changed
sub_module = ['index', 'rule', 'trigger', 'compound_trigger']
temp_ignore_keys = view_object.keys_to_ignore + sub_module
diff_dict = directory_diff(
dict1[key], dict2[key],
ignore_keys=temp_ignore_keys,
difference={}
)
diff_dict.update(parce_acl(dict1[key], dict2[key]))
temp_src_params['tid'] = source_dict[key]['oid']
temp_tgt_params['tid'] = target_dict[key]['oid']
temp_src_params['json_resp'] = \
temp_tgt_params['json_resp'] = False
source_ddl = \
view_object.get_sql_from_table_diff(**temp_src_params)
target_ddl = \
view_object.get_sql_from_table_diff(**temp_tgt_params)
diff_ddl = view_object.get_sql_from_submodule_diff(
temp_src_params, temp_tgt_params, target_schema,
dict1[key], dict2[key], diff_dict)
else:
temp_src_params = copy.deepcopy(source_params)
temp_tgt_params = copy.deepcopy(target_params)
diff_dict = directory_diff(
dict1[key], dict2[key],
ignore_keys=view_object.keys_to_ignore, difference={}
)
diff_dict.update(parce_acl(dict1[key], dict2[key]))
temp_src_params['oid'] = source_dict[key]['oid']
temp_tgt_params['oid'] = target_dict[key]['oid']
source_ddl = view_object.get_sql_from_diff(**temp_src_params)
target_ddl = view_object.get_sql_from_diff(**temp_tgt_params)
temp_tgt_params.update(
{'data': diff_dict})
diff_ddl = view_object.get_sql_from_diff(**temp_tgt_params)
different.append({
'id': count,
'type': node,
@ -97,7 +186,10 @@ def compare_dictionaries(source_dict, target_dict, node, node_label,
'oid': source_dict[key]['oid'],
'source_oid': source_dict[key]['oid'],
'target_oid': target_dict[key]['oid'],
'status': SchemaDiffModel.COMPARISON_STATUS['different']
'status': SchemaDiffModel.COMPARISON_STATUS['different'],
'source_ddl': source_ddl,
'target_ddl': target_ddl,
'diff_ddl': diff_ddl
})
count += 1
@ -143,13 +235,6 @@ def are_dictionaries_identical(source_dict, target_dict, ignore_keys):
src_keys = set(source_dict.keys())
tar_keys = set(target_dict.keys())
# ignore the keys if available.
for ig_key in ignore_keys:
if ig_key in src_keys:
source_dict.pop(ig_key)
if ig_key in target_dict:
target_dict.pop(ig_key)
# Keys that are available in source and missing in target.
src_only = src_keys - tar_keys
# Keys that are available in target and missing in source.
@ -167,6 +252,10 @@ def are_dictionaries_identical(source_dict, target_dict, ignore_keys):
return False
for key in source_dict.keys():
# Continue if key is available in ignore_keys
if key in ignore_keys:
continue
if type(source_dict[key]) is dict:
if not are_dictionaries_identical(source_dict[key],
target_dict[key], ignore_keys):
@ -235,33 +324,32 @@ def directory_diff(source_dict, target_dict, ignore_keys=[], difference={}):
# TODO
pass
elif type(source) is dict:
if 'name' in source or 'colname' in source:
if type(target_dict[key]) is list and len(
target_dict[key]) > 0:
tmp = None
tmp_target = copy.deepcopy(target_dict[key])
for item in tmp_target:
if (
'name' in item and
item['name'] == source['name']
) or (
'colname' in item and
item['colname'] == source[
'colname']
):
tmp = copy.deepcopy(item)
if tmp and source != tmp:
updated.append(copy.deepcopy(source))
tmp_target.remove(tmp)
elif tmp and source == tmp:
tmp_target.remove(tmp)
elif tmp is None:
tmp_key_array = ['name', 'colname', 'argid']
for tmp_key in tmp_key_array:
if tmp_key in source:
if type(target_dict[key]) is list and \
len(target_dict[key]) > 0:
tmp = None
tmp_target = \
copy.deepcopy(target_dict[key])
for item in tmp_target:
if tmp_key in item and \
item[tmp_key] == \
source[tmp_key]:
tmp = copy.deepcopy(item)
if tmp and source != tmp:
updated.append(copy.deepcopy(source))
tmp_target.remove(tmp)
elif tmp and source == tmp:
tmp_target.remove(tmp)
elif tmp is None:
added.append(source)
else:
added.append(source)
else:
added.append(source)
difference[key] = {}
difference[key]['added'] = added
difference[key]['changed'] = updated
difference[key] = {}
difference[key]['added'] = added
difference[key]['changed'] = updated
elif target_dict[key] is None or \
(type(target_dict[key]) is list and
len(target_dict[key]) < index and
@ -271,7 +359,7 @@ def directory_diff(source_dict, target_dict, ignore_keys=[], difference={}):
len(target_dict[key]) > index:
difference[key] = source
else:
target_dict[key] = source_dict[key]
difference[key] = source_dict[key]
if type(source) is dict and tmp_target and key in tmp_target and \
tmp_target[key] and len(tmp_target[key]) > 0:
@ -286,6 +374,34 @@ def directory_diff(source_dict, target_dict, ignore_keys=[], difference={}):
else:
if source_dict[key] != target_dict[key]:
difference[key] = source_dict[key]
if (key == 'comment' or key == 'description') and \
source_dict[key] is None:
difference[key] = ''
else:
difference[key] = source_dict[key]
return difference
def parce_acl(source, target):
key = 'acl'
if 'datacl' in source:
key = 'datacl'
elif 'relacl' in source:
key = 'relacl'
tmp_source = source[key] if\
key in source and source[key] is not None else []
tmp_target = copy.deepcopy(target[key]) if\
key in target and target[key] is not None else []
diff = {'added': [], 'deleted': []}
for acl in tmp_source:
if acl in tmp_target:
tmp_target.remove(acl)
elif acl not in tmp_target:
diff['added'].append(acl)
diff['deleted'] = tmp_target
return {key: diff}

View File

@ -63,6 +63,8 @@ let SchemaDiffSqlControl =
},
render: function() {
let obj = Backform.SqlFieldControl.prototype.render.apply(this, arguments);
obj.sqlCtrl.setOption('readOnly', true);
if(this.$el.find('.ddl-copy')) this.$el.find('.ddl-copy').on('click', this.copyData);
return obj;
},

View File

@ -197,7 +197,6 @@ export default class SchemaDiffUI {
baseServerUrl = url_for('schema_diff.get_server', {'sid': self.selection['target_sid'],
'did': self.selection['target_did']}),
sel_rows = self.grid ? self.grid.getSelectedRows() : [],
sel_rows_data = [],
url_params = self.selection,
generated_script = undefined,
open_query_tool,
@ -261,45 +260,16 @@ export default class SchemaDiffUI {
};
if (sel_rows.length > 0) {
let script_body = '';
for (var row = 0; row < sel_rows.length; row++) {
let data = self.grid.getData().getItem(sel_rows[row]);
if (data.type) {
let tmp_data = {
'node_type': data.type,
'source_oid': parseInt(data.oid, 10),
'target_oid': parseInt(data.oid, 10),
'comp_status': data.status,
};
if(data.status && (data.status.toLowerCase() == 'different' || data.status.toLowerCase() == 'identical')) {
tmp_data['target_oid'] = data.target_oid;
}
sel_rows_data.push(tmp_data);
if(!_.isUndefined(data.diff_ddl)) {
script_body += data.diff_ddl + '\n\n';
}
}
url_params['sel_rows'] = sel_rows_data;
let baseUrl = url_for('schema_diff.generate_script', {'trans_id': self.trans_id});
$.ajax({
url: baseUrl,
method: 'POST',
dataType: 'json',
contentType: 'application/json',
data: JSON.stringify(url_params),
})
.done(function (res) {
if (res) {
generated_script = script_header + 'BEGIN;' + '\n' + res.diff_ddl + '\n' + 'END;';
}
open_query_tool();
})
.fail(function (xhr) {
self.raise_error_on_fail(gettext('Generate script error'), xhr);
$('#diff_fetching_data').addClass('d-none');
});
generated_script = script_header + 'BEGIN;' + '\n' + script_body + 'END;';
open_query_tool();
} else if (!_.isUndefined(self.model.get('diff_ddl'))) {
open_query_tool();
}
@ -448,6 +418,7 @@ export default class SchemaDiffUI {
render_grid_data(data) {
var self = this;
self.grid.setSelectedRows([]);
data.sort((a, b) => (a.label > b.label) ? 1 : (a.label === b.label) ? ((a.title > b.title) ? 1 : -1) : -1);
self.dataView.beginUpdate();
self.dataView.setItems(data);
self.dataView.setFilter(self.filter.bind(self));
@ -489,8 +460,13 @@ export default class SchemaDiffUI {
contentType: 'application/json',
})
.done(function (res) {
let msg = res.data.compare_msg + res.data.diff_percentage + '% completed';
$('#diff_fetching_data').find('.schema-diff-busy-text').text(msg);
let msg = res.data.compare_msg;
if (res.data.diff_percentage != 100) {
msg = msg + ' (this may take a few minutes)...';
}
msg = msg + '<br>'+ res.data.diff_percentage + '% completed.';
$('#diff_fetching_data').find('.schema-diff-busy-text').html(msg);
})
.fail(function (xhr) {
self.raise_error_on_fail(gettext('Poll error'), xhr);
@ -528,37 +504,44 @@ export default class SchemaDiffUI {
'diff_ddl': undefined,
});
var url_params = self.selection;
if(data.status && (data.status.toLowerCase() == 'different' || data.status.toLowerCase() == 'identical')) {
if(data.status && data.status.toLowerCase() == 'identical') {
var url_params = self.selection;
target_oid = data.target_oid;
url_params['trans_id'] = self.trans_id;
url_params['source_oid'] = source_oid;
url_params['target_oid'] = target_oid;
url_params['comp_status'] = data.status;
url_params['node_type'] = node_type;
_.each(url_params, function(key, val) {
url_params[key] = parseInt(val, 10);
});
$('#ddl_comp_fetching_data').removeClass('d-none');
var baseUrl = url_for('schema_diff.ddl_compare', url_params);
self.model.url = baseUrl;
self.model.fetch({
success: function() {
self.footer.render();
$('#ddl_comp_fetching_data').addClass('d-none');
},
error: function() {
self.footer.render();
$('#ddl_comp_fetching_data').addClass('d-none');
},
});
} else {
self.model.set({
'source_ddl': data.source_ddl,
'target_ddl': data.target_ddl,
'diff_ddl': data.diff_ddl,
});
self.footer.render();
}
url_params['trans_id'] = self.trans_id;
url_params['source_oid'] = source_oid;
url_params['target_oid'] = target_oid;
url_params['comp_status'] = data.status;
url_params['node_type'] = node_type;
_.each(url_params, function(key, val) {
url_params[key] = parseInt(val, 10);
});
$('#ddl_comp_fetching_data').removeClass('d-none');
var baseUrl = url_for('schema_diff.ddl_compare', url_params);
self.model.url = baseUrl;
self.model.fetch({
success: function() {
self.footer.render();
$('#ddl_comp_fetching_data').addClass('d-none');
},
error: function() {
self.footer.render();
$('#ddl_comp_fetching_data').addClass('d-none');
},
});
}
render() {

View File

@ -142,16 +142,10 @@ class SchemaDiffTestCase(BaseTestGenerator):
file_obj = open(diff_file, 'a')
for diff in response_data['data']:
if diff['type'] in self.nodes:
src_obj_oid = tar_obj_oid = None
if diff['status'] == 'Source Only' or\
diff['status'] == 'Target Only':
src_obj_oid = tar_obj_oid = diff['oid']
elif diff['status'] == 'Different':
src_obj_oid = diff['source_oid']
tar_obj_oid = diff['target_oid']
if src_obj_oid is not None:
if diff['type'] in self.nodes and diff['status'] == 'Identical':
src_obj_oid = diff['source_oid']
tar_obj_oid = diff['target_oid']
if src_obj_oid is not None and tar_obj_oid is not None:
url = 'schema_diff/ddl_compare/{0}/{1}/{2}/{3}/{4}/{5}/' \
'{6}/{7}/{8}/{9}/{10}/'.format(self.trans_id,
self.server_id,