Fixed and improve API test cases for the schema diff tool. Fixes #5417

This commit is contained in:
Akshay Joshi 2020-09-17 20:10:01 +05:30
parent 593d86d3f2
commit 55b5080c77
22 changed files with 5899 additions and 2571 deletions

View File

@ -11,6 +11,7 @@ notes for it.
.. toctree:: .. toctree::
:maxdepth: 1 :maxdepth: 1
release_notes_4_27
release_notes_4_26 release_notes_4_26
release_notes_4_25 release_notes_4_25
release_notes_4_24 release_notes_4_24

View File

@ -0,0 +1,20 @@
************
Version 4.27
************
Release date: 2020-10-15
This release contains a number of bug fixes and new features since the release of pgAdmin4 4.26.
New features
************
Housekeeping
************
Bug fixes
*********
| `Issue #5417 <https://redmine.postgresql.org/issues/5417>`_ - Fixed and improve API test cases for the schema diff tool.

View File

@ -660,7 +660,7 @@ class EventTriggerView(PGChildNodeView, SchemaDiffObjectCompare):
) )
else: else:
sql = self._get_create_with_grant_sql(data) sql = self._get_create_with_grant_sql(data)
return sql return sql.strip('\n')
def _get_create_with_grant_sql(self, data): def _get_create_with_grant_sql(self, data):

View File

@ -486,7 +486,7 @@
"mocking_required": false, "mocking_required": false,
"mock_data": {}, "mock_data": {},
"expected_data": { "expected_data": {
"status_code": 410 "status_code": 500
} }
} }
], ],

View File

@ -3,22 +3,26 @@
{% if (data.fsrvtype is defined and data.fsrvtype != o_data.fsrvtype) or (data.fdwname is defined and data.fdwname != o_data.fdwname) %} {% if (data.fsrvtype is defined and data.fsrvtype != o_data.fsrvtype) or (data.fdwname is defined and data.fdwname != o_data.fdwname) %}
{% set fsrvtype = o_data.fsrvtype %} {% set fsrvtype = o_data.fsrvtype %}
{% set fdwname = o_data.fdwname %} {% set fdwname = o_data.fdwname %}
{% set fsrvversion = o_data.fsrvversion %}
{% if data.fsrvtype is defined %} {% if data.fsrvtype is defined %}
{% set fsrvtype = data.fsrvtype %} {% set fsrvtype = data.fsrvtype %}
{% endif %} {% endif %}
{% if data.fdwname is defined %} {% if data.fdwname is defined %}
{% set fdwname = data.fdwname %} {% set fdwname = data.fdwname %}
{% endif %} {% endif %}
{% if data.fsrvversion is defined %}
{% set fsrvversion = data.fsrvversion %}
{% endif %}
-- WARNING: -- WARNING:
-- We have found the difference in SERVER TYPE OR FOREIGN DATA WRAPPER -- We have found the difference in SERVER TYPE OR FOREIGN DATA WRAPPER
-- so we need to drop the existing foreign server first and re-create it. -- so we need to drop the existing foreign server first and re-create it.
DROP SERVER {{ conn|qtIdent(o_data.name) }}; DROP SERVER {{ conn|qtIdent(o_data.name) }};
CREATE SERVER {{ conn|qtIdent(o_data.name) }}{% if data.fsrvtype or o_data.fsrvtype %} CREATE SERVER {{ conn|qtIdent(o_data.name) }}{% if fsrvtype %}
TYPE {{ fsrvtype|qtLiteral }}{% endif %}{% if o_data.fsrvversion %} TYPE {{ fsrvtype|qtLiteral }}{% endif %}{% if fsrvversion %}
VERSION {{ o_data.fsrvversion|qtLiteral }}{%-endif %}{% if o_data.fdwname %} VERSION {{ fsrvversion|qtLiteral }}{%-endif %}{% if fdwname %}
FOREIGN DATA WRAPPER {{ conn|qtIdent(fdwname) }}{% endif %}{% if o_data.fsrvoptions %} FOREIGN DATA WRAPPER {{ conn|qtIdent(fdwname) }}{% endif %}{% if o_data.fsrvoptions %}
@ -40,7 +44,7 @@ ALTER SERVER {{ conn|qtIdent(data.name) }}
{% endif %} {% endif %}
{# ============= Update foreign server version ============= #} {# ============= Update foreign server version ============= #}
{% if data.fsrvversion is defined and data.fsrvversion != o_data.fsrvversion %} {% if data.fsrvversion is defined and data.fsrvversion is not none and data.fsrvversion != o_data.fsrvversion %}
ALTER SERVER {{ conn|qtIdent(data.name) }} ALTER SERVER {{ conn|qtIdent(data.name) }}
VERSION {{ data.fsrvversion|qtLiteral }}; VERSION {{ data.fsrvversion|qtLiteral }};

View File

@ -70,7 +70,8 @@ SET {{ conn|qtIdent(variable.fdwoption) }} {{ variable.fdwvalue|qtLiteral }}{% e
{% if data.fdwacl %} {% if data.fdwacl %}
{% if 'deleted' in data.fdwacl %} {% if 'deleted' in data.fdwacl %}
{% for priv in data.fdwacl.deleted %} {% for priv in data.fdwacl.deleted %}
{{ PRIVILEGE.RESETALL(conn, 'FOREIGN DATA WRAPPER', priv.grantee, data.name) }} {% endfor %} {{ PRIVILEGE.RESETALL(conn, 'FOREIGN DATA WRAPPER', priv.grantee, data.name) }}
{% endfor %}
{% endif %} {% endif %}
{% if 'changed' in data.fdwacl %} {% if 'changed' in data.fdwacl %}
{% for priv in data.fdwacl.changed %} {% for priv in data.fdwacl.changed %}

View File

@ -381,25 +381,12 @@ class LanguageView(PGChildNodeView, SchemaDiffObjectCompare):
if not status: if not status:
return False, internal_server_error(errormsg=result) return False, internal_server_error(errormsg=result)
# if no acl found then by default add public for row in result['rows']:
if res['rows'][0]['acl'] is None: priv = parse_priv_from_db(row)
res['rows'][0]['lanacl'] = dict() if row['deftype'] in res['rows'][0]:
res['rows'][0]['lanacl']['grantee'] = 'PUBLIC' res['rows'][0][row['deftype']].append(priv)
res['rows'][0]['lanacl']['grantor'] = res['rows'][0]['lanowner'] else:
res['rows'][0]['lanacl']['privileges'] = [ res['rows'][0][row['deftype']] = [priv]
{
'privilege_type': 'U',
'privilege': True,
'with_grant': False
}
]
else:
for row in result['rows']:
priv = parse_priv_from_db(row)
if row['deftype'] in res['rows'][0]:
res['rows'][0][row['deftype']].append(priv)
else:
res['rows'][0][row['deftype']] = [priv]
seclabels = [] seclabels = []
if 'seclabels' in res['rows'][0] and \ if 'seclabels' in res['rows'][0] and \

View File

@ -483,6 +483,14 @@ def are_dictionaries_identical(source_dict, target_dict, ignore_whitespaces,
target_value = target_value.translate( target_value = target_value.translate(
str.maketrans('', '', string.whitespace)) str.maketrans('', '', string.whitespace))
# We need a proper solution as sometimes we observe that
# source_value is '' and target_value is None or vice versa
# in such situation we shown the comparison as different
# which is wrong.
if (source_value == '' and target_value is None) or \
(source_value is None and target_value == ''):
continue
if source_value != target_value: if source_value != target_value:
return False return False
@ -597,6 +605,12 @@ def directory_diff(source_dict, target_dict, ignore_keys=[], difference=None):
else: else:
difference[key] = source_dict[key] difference[key] = source_dict[key]
if len(src_only) == 0 and len(tar_only) > 0:
for key in tar_only:
if isinstance(target_dict[key], list):
difference[key] = {}
difference[key]['deleted'] = target_dict[key]
return difference return difference

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -12,23 +12,20 @@ import json
import os import os
import random import random
from pgadmin.utils import server_utils as server_utils
from pgadmin.utils.route import BaseTestGenerator from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict from regression import parent_node_dict
from regression.python_test_utils import test_utils as utils from regression.python_test_utils import test_utils as utils
from .utils import create_table, create_schema, restore_schema from .utils import restore_schema
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.utils.versioned_template_loader import \ from pgadmin.utils.versioned_template_loader import \
get_version_mapping_directories get_version_mapping_directories
class SchemaDiffTestCase(): class SchemaDiffTestCase(BaseTestGenerator):
""" This class will test the schema diff. """ """ This class will test the schema diff. """
scenarios = [ scenarios = [
# Fetching default URL for database node. # Fetching default URL for database node.
('Schema diff comparison', dict( ('Schema diff comparison', dict(
url='schema_diff/compare/{0}/{1}/{2}/{3}/{4}/{5}/{6}')) url='schema_diff/compare/{0}/{1}/{2}/{3}/{4}'))
] ]
def setUp(self): def setUp(self):
@ -40,8 +37,12 @@ class SchemaDiffTestCase():
self.server = parent_node_dict["server"][-1]["server"] self.server = parent_node_dict["server"][-1]["server"]
self.server_id = parent_node_dict["server"][-1]["server_id"] self.server_id = parent_node_dict["server"][-1]["server_id"]
self.nodes = ['table', 'function', 'procedure', 'view', 'mview'] self.schema_name = 'test_schema_diff'
self.restore_backup()
self.restored_backup = True
status = self.restore_backup()
if not status:
self.restored_backup = False
def restore_backup(self): def restore_backup(self):
self.sql_folder = self.get_sql_folder() self.sql_folder = self.get_sql_folder()
@ -60,10 +61,19 @@ class SchemaDiffTestCase():
raise FileNotFoundError( raise FileNotFoundError(
'{} file does not exists'.format(tar_sql_path)) '{} file does not exists'.format(tar_sql_path))
self.src_schema_id = restore_schema(self.server, self.src_database, status, self.src_schema_id = restore_schema(
'source', src_sql_path) self.server, self.src_database, self.schema_name, src_sql_path)
self.tar_schema_id = restore_schema(self.server, self.tar_database, if not status:
'target', tar_sql_path) print("Failed to restore schema on source database.")
return False
status, self.tar_schema_id = restore_schema(
self.server, self.tar_database, self.schema_name, tar_sql_path)
if not status:
print("Failed to restore schema on target database.")
return False
return True
def get_sql_folder(self): def get_sql_folder(self):
""" """
@ -101,10 +111,8 @@ class SchemaDiffTestCase():
def compare(self): def compare(self):
comp_url = self.url.format(self.trans_id, self.server_id, comp_url = self.url.format(self.trans_id, self.server_id,
self.src_db_id, self.src_db_id,
self.src_schema_id,
self.server_id, self.server_id,
self.tar_db_id, self.tar_db_id
self.tar_schema_id
) )
response = self.tester.get(comp_url) response = self.tester.get(comp_url)
@ -114,7 +122,7 @@ class SchemaDiffTestCase():
def runTest(self): def runTest(self):
""" This function will test the schema diff.""" """ This function will test the schema diff."""
self.assertEqual(True, self.restored_backup)
response = self.tester.get("schema_diff/initialize") response = self.tester.get("schema_diff/initialize")
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
response_data = json.loads(response.data.decode('utf-8')) response_data = json.loads(response.data.decode('utf-8'))
@ -142,18 +150,21 @@ class SchemaDiffTestCase():
file_obj = open(diff_file, 'a') file_obj = open(diff_file, 'a')
for diff in response_data['data']: for diff in response_data['data']:
if diff['type'] in self.nodes and diff['status'] == 'Identical': if diff['status'] == 'Identical':
src_obj_oid = diff['source_oid'] src_obj_oid = diff['source_oid']
tar_obj_oid = diff['target_oid'] tar_obj_oid = diff['target_oid']
src_schema_id = diff['source_scid']
tar_schema_id = diff['target_scid']
if src_obj_oid is not None and tar_obj_oid is not None: if src_obj_oid is not None and tar_obj_oid is not None:
url = 'schema_diff/ddl_compare/{0}/{1}/{2}/{3}/{4}/{5}/' \ url = 'schema_diff/ddl_compare/{0}/{1}/{2}/{3}/{4}/{5}/' \
'{6}/{7}/{8}/{9}/{10}/'.format(self.trans_id, '{6}/{7}/{8}/{9}/{10}/'.format(self.trans_id,
self.server_id, self.server_id,
self.src_db_id, self.src_db_id,
self.src_schema_id, src_schema_id,
self.server_id, self.server_id,
self.tar_db_id, self.tar_db_id,
self.tar_schema_id, tar_schema_id,
src_obj_oid, src_obj_oid,
tar_obj_oid, tar_obj_oid,
diff['type'], diff['type'],
@ -170,17 +181,17 @@ class SchemaDiffTestCase():
file_obj.close() file_obj.close()
try: try:
restore_schema(self.server, self.tar_database, 'target', restore_schema(self.server, self.tar_database, self.schema_name,
diff_file) diff_file)
os.remove(diff_file) os.remove(diff_file)
response_data = self.compare() response_data = self.compare()
for diff in response_data['data']: for diff in response_data['data']:
if diff['type'] in self.nodes: self.assertEqual(diff['status'], 'Identical')
self.assertEqual(diff['status'], 'Identical')
except Exception as e: except Exception as e:
os.remove(diff_file) if os.path.exists(diff_file):
os.remove(diff_file)
def tearDown(self): def tearDown(self):
"""This function drop the added database""" """This function drop the added database"""

View File

@ -16,38 +16,50 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
def restore_schema(server, db_name, schema_name, sql_path): def restore_schema(server, db_name, schema_name, sql_path):
connection = utils.get_db_connection(db_name, """
server['username'], This function is used to restore the schema.
server['db_password'], :param server:
server['host'], :param db_name:
server['port'], :param schema_name:
server['sslmode'] :param sql_path:
) :return:
"""
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor = connection.cursor()
sql = ''
with open(sql_path, 'r') as content_file:
sql = content_file.read()
pg_cursor.execute(sql)
connection.set_isolation_level(old_isolation_level)
connection.commit()
SQL = """SELECT
nsp.oid
FROM
pg_namespace nsp
WHERE nsp.nspname = '{0}'""".format(schema_name)
pg_cursor.execute(SQL)
schema = pg_cursor.fetchone()
schema_id = None schema_id = None
if schema: try:
schema_id = schema[0] connection = utils.get_db_connection(db_name,
connection.close() server['username'],
return schema_id server['db_password'],
server['host'],
server['port'],
server['sslmode']
)
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor = connection.cursor()
with open(sql_path, 'r') as content_file:
sql = content_file.read()
pg_cursor.execute(sql)
connection.set_isolation_level(old_isolation_level)
connection.commit()
SQL = """SELECT
nsp.oid
FROM
pg_namespace nsp
WHERE nsp.nspname = '{0}'""".format(schema_name)
pg_cursor.execute(SQL)
schema = pg_cursor.fetchone()
if schema:
schema_id = schema[0]
connection.close()
except Exception as e:
print(str(e))
return False, schema_id
return True, schema_id
def create_schema(server, db_name, schema_name): def create_schema(server, db_name, schema_name):