mirror of
https://github.com/pgadmin-org/pgadmin4.git
synced 2025-01-23 15:03:26 -06:00
Skip tests where appropriate on GPDB. Fixes #3190
Victoria & Joao @ Pivotal.
This commit is contained in:
parent
6b03cb78af
commit
876ce1799a
@ -20,6 +20,7 @@ from . import utils as cast_utils
|
||||
|
||||
|
||||
class CastsAddTestCase(BaseTestGenerator):
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for cast node.
|
||||
('Check Cast Node', dict(url='/browser/cast/obj/'))
|
||||
@ -27,6 +28,7 @@ class CastsAddTestCase(BaseTestGenerator):
|
||||
|
||||
def runTest(self):
|
||||
""" This function will add cast under test database. """
|
||||
super(CastsAddTestCase, self).runTest()
|
||||
self.server_data = parent_node_dict["database"][-1]
|
||||
self.server_id = self.server_data["server_id"]
|
||||
self.db_id = self.server_data['db_id']
|
||||
|
@ -19,12 +19,14 @@ from . import utils as cast_utils
|
||||
|
||||
class CastsDeleteTestCase(BaseTestGenerator):
|
||||
""" This class will delete the cast node added under database node. """
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for cast node.
|
||||
('Check Cast Node', dict(url='/browser/cast/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(CastsDeleteTestCase, self).setUp()
|
||||
self.default_db = self.server["db"]
|
||||
self.database_info = parent_node_dict['database'][-1]
|
||||
self.db_name = self.database_info['db_name']
|
||||
|
@ -19,6 +19,7 @@ from . import utils as cast_utils
|
||||
|
||||
class CastsGetTestCase(BaseTestGenerator):
|
||||
""" This class will fetch the cast node added under database node. """
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for cast node.
|
||||
('Check Cast Node', dict(url='/browser/cast/obj/'))
|
||||
@ -26,6 +27,7 @@ class CastsGetTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create cast."""
|
||||
super(CastsGetTestCase, self).setUp()
|
||||
self.default_db = self.server["db"]
|
||||
self.database_info = parent_node_dict['database'][-1]
|
||||
self.db_name = self.database_info['db_name']
|
||||
|
@ -21,6 +21,7 @@ from . import utils as cast_utils
|
||||
|
||||
class CastsPutTestCase(BaseTestGenerator):
|
||||
""" This class will fetch the cast node added under database node. """
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for cast node.
|
||||
('Check Cast Node', dict(url='/browser/cast/obj/'))
|
||||
@ -28,6 +29,7 @@ class CastsPutTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create cast."""
|
||||
super(CastsPutTestCase, self).setUp()
|
||||
self.default_db = self.server["db"]
|
||||
self.database_info = parent_node_dict['database'][-1]
|
||||
self.db_name = self.database_info['db_name']
|
||||
|
@ -14,7 +14,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import \
|
||||
utils as database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression import trigger_funcs_utils
|
||||
|
@ -13,7 +13,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import \
|
||||
utils as database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression import trigger_funcs_utils
|
||||
|
@ -13,7 +13,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import \
|
||||
utils as database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression import trigger_funcs_utils
|
||||
|
@ -14,7 +14,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import \
|
||||
utils as database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression import trigger_funcs_utils
|
||||
|
@ -20,6 +20,7 @@ from . import utils as extension_utils
|
||||
|
||||
|
||||
class ExtensionsAddTestCase(BaseTestGenerator):
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for extension node.
|
||||
('Check Extension Node', dict(url='/browser/extension/obj/'))
|
||||
@ -27,6 +28,7 @@ class ExtensionsAddTestCase(BaseTestGenerator):
|
||||
|
||||
def runTest(self):
|
||||
""" This function will add extension under test schema. """
|
||||
super(ExtensionsAddTestCase, self).runTest()
|
||||
self.schema_data = parent_node_dict["schema"][-1]
|
||||
self.server_id = self.schema_data["server_id"]
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -18,6 +18,7 @@ from . import utils as extension_utils
|
||||
|
||||
|
||||
class ExtensionsDeleteTestCase(BaseTestGenerator):
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for extension node.
|
||||
('Check Extension Node', dict(url='/browser/extension/obj/'))
|
||||
@ -25,6 +26,7 @@ class ExtensionsDeleteTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension."""
|
||||
super(ExtensionsDeleteTestCase, self).setUp()
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -18,6 +18,7 @@ from . import utils as extension_utils
|
||||
|
||||
|
||||
class ExtensionsGetTestCase(BaseTestGenerator):
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for extension node.
|
||||
('Check Extension Node', dict(url='/browser/extension/obj/'))
|
||||
@ -25,6 +26,7 @@ class ExtensionsGetTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension."""
|
||||
super(ExtensionsGetTestCase, self).setUp()
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -20,6 +20,7 @@ from . import utils as extension_utils
|
||||
|
||||
|
||||
class ExtensionsPutTestCase(BaseTestGenerator):
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for extension node.
|
||||
('Check Extension Node', dict(url='/browser/extension/obj/'))
|
||||
@ -27,6 +28,7 @@ class ExtensionsPutTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension."""
|
||||
super(ExtensionsPutTestCase, self).setUp()
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -27,6 +27,7 @@ class ForeignServerAddTestCase(BaseTestGenerator):
|
||||
"""
|
||||
This class will add foreign server under database node.
|
||||
"""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for foreign server node.
|
||||
('Check FSRV Node', dict(url='/browser/foreign_server/obj/'))
|
||||
@ -34,6 +35,7 @@ class ForeignServerAddTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension and foreign data wrapper."""
|
||||
super(ForeignServerAddTestCase, self).setUp()
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -25,6 +25,7 @@ from . import utils as fsrv_utils
|
||||
|
||||
class ForeignServerDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will add foreign server under FDW node."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for foreign server node.
|
||||
('Check FSRV Node', dict(url='/browser/foreign_server/obj/'))
|
||||
@ -32,6 +33,7 @@ class ForeignServerDeleteTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension and foreign data wrapper."""
|
||||
super(ForeignServerDeleteTestCase, self).setUp()
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -25,6 +25,7 @@ from . import utils as fsrv_utils
|
||||
|
||||
class ForeignServerGetTestCase(BaseTestGenerator):
|
||||
"""This class will add foreign server under FDW node."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for foreign server node.
|
||||
('Check FSRV Node', dict(url='/browser/foreign_server/obj/'))
|
||||
@ -32,6 +33,7 @@ class ForeignServerGetTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension and foreign data wrapper."""
|
||||
super(ForeignServerGetTestCase, self).setUp()
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -26,6 +26,7 @@ from . import utils as fsrv_utils
|
||||
|
||||
class ForeignServerPutTestCase(BaseTestGenerator):
|
||||
"""This class will add foreign server under FDW node."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for foreign server node.
|
||||
('Check FSRV Node', dict(url='/browser/foreign_server/obj/'))
|
||||
@ -33,6 +34,7 @@ class ForeignServerPutTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension and foreign data wrapper."""
|
||||
super(ForeignServerPutTestCase, self).setUp()
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -27,6 +27,7 @@ from regression.python_test_utils import test_utils as utils
|
||||
|
||||
class UserMappingAddTestCase(BaseTestGenerator):
|
||||
"""This class will add user mapping under foreign server node."""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
# Fetching default URL for user mapping node.
|
||||
@ -35,6 +36,7 @@ class UserMappingAddTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension and foreign data wrapper."""
|
||||
super(UserMappingAddTestCase, self).setUp()
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -27,6 +27,7 @@ from . import utils as um_utils
|
||||
|
||||
class UserMappingDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will delete user mapping under foreign server node."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for user mapping node.
|
||||
('Check user mapping Node', dict(url='/browser/user_mapping/obj/'))
|
||||
@ -34,6 +35,7 @@ class UserMappingDeleteTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension and foreign data wrapper."""
|
||||
super(UserMappingDeleteTestCase, self).setUp()
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -27,6 +27,7 @@ from . import utils as um_utils
|
||||
|
||||
class UserMappingGetTestCase(BaseTestGenerator):
|
||||
"""This class will add user mapping under foreign server node."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for user mapping node.
|
||||
('Check user mapping Node', dict(url='/browser/user_mapping/obj/'))
|
||||
@ -34,6 +35,7 @@ class UserMappingGetTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension and foreign data wrapper."""
|
||||
super(UserMappingGetTestCase, self).setUp()
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -28,6 +28,7 @@ from . import utils as um_utils
|
||||
|
||||
class UserMappingPutTestCase(BaseTestGenerator):
|
||||
"""This class will update user mapping under foreign server node."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for user mapping node.
|
||||
('Check user mapping Node', dict(url='/browser/user_mapping/obj/'))
|
||||
@ -35,6 +36,7 @@ class UserMappingPutTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension and foreign data wrapper."""
|
||||
super(UserMappingPutTestCase, self).setUp()
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -21,6 +21,8 @@ from . import utils as fdw_utils
|
||||
|
||||
class FDWDAddTestCase(BaseTestGenerator):
|
||||
""" This class will add foreign data wrappers under database node. """
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
# Fetching default URL for foreign_data_wrapper node.
|
||||
('Check FDW Node',
|
||||
@ -29,6 +31,8 @@ class FDWDAddTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension."""
|
||||
super(FDWDAddTestCase, self).setUp()
|
||||
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -21,12 +21,14 @@ from . import utils as fdw_utils
|
||||
|
||||
class FDWDDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will delete foreign data wrappers under test database."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [ # Fetching default URL for foreign_data_wrapper node.
|
||||
('Check FDW Node',
|
||||
dict(url='/browser/foreign_data_wrapper/obj/'))]
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension and foreign data wrapper."""
|
||||
super(FDWDDeleteTestCase, self).setUp()
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -21,6 +21,7 @@ from . import utils as fdw_utils
|
||||
|
||||
class FDWDGetTestCase(BaseTestGenerator):
|
||||
""" This class will add foreign data wrappers under test database. """
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for foreign_data_wrapper node.
|
||||
('Check FDW Node',
|
||||
@ -29,6 +30,7 @@ class FDWDGetTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension and foreign data wrapper."""
|
||||
super(FDWDGetTestCase, self).setUp()
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -22,6 +22,8 @@ from . import utils as fdw_utils
|
||||
|
||||
class FDWDPutTestCase(BaseTestGenerator):
|
||||
"""This class will update foreign data wrappers under test database."""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
# Fetching default URL for foreign_data_wrapper node.
|
||||
('Check FDW Node',
|
||||
@ -30,6 +32,7 @@ class FDWDPutTestCase(BaseTestGenerator):
|
||||
|
||||
def setUp(self):
|
||||
""" This function will create extension and foreign data wrapper."""
|
||||
super(FDWDPutTestCase, self).setUp()
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
self.db_id = self.schema_data['db_id']
|
||||
|
@ -21,11 +21,13 @@ from . import utils as language_utils
|
||||
|
||||
|
||||
class LanguagesAddTestCase(BaseTestGenerator):
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
('Language add test case', dict(url='/browser/language/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(LanguagesAddTestCase, self).setUp()
|
||||
self.server_data = parent_node_dict["database"][-1]
|
||||
self.server_id = self.server_data["server_id"]
|
||||
self.db_id = self.server_data['db_id']
|
||||
|
@ -21,12 +21,14 @@ from regression.python_test_utils import test_utils as utils
|
||||
|
||||
class CollationAddTestCase(BaseTestGenerator):
|
||||
""" This class will add new collation under schema node. """
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for collation node.
|
||||
('Default Node URL', dict(url='/browser/collation/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(CollationAddTestCase, self).setUp()
|
||||
self.database_info = parent_node_dict["database"][-1]
|
||||
self.db_name = self.database_info["db_name"]
|
||||
# Change the db name, so that schema will create in newly created db
|
||||
|
@ -21,12 +21,14 @@ from . import utils as collation_utils
|
||||
|
||||
class CollationDeleteTestCase(BaseTestGenerator):
|
||||
""" This class will delete added collation under schema node. """
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for collation node.
|
||||
('Fetch collation Node URL', dict(url='/browser/collation/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(CollationDeleteTestCase, self).setUp()
|
||||
self.schema_info = parent_node_dict["schema"][-1]
|
||||
self.schema_name = self.schema_info["schema_name"]
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
|
@ -21,12 +21,14 @@ from . import utils as collation_utils
|
||||
|
||||
class CollationGetTestCase(BaseTestGenerator):
|
||||
""" This class will fetch new collation under schema node. """
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for collation node.
|
||||
('Fetch collation Node URL', dict(url='/browser/collation/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(CollationGetTestCase, self).setUp()
|
||||
self.schema_info = parent_node_dict["schema"][-1]
|
||||
self.schema_name = self.schema_info["schema_name"]
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
|
@ -22,12 +22,14 @@ from . import utils as collation_utils
|
||||
|
||||
class CollationPutTestCase(BaseTestGenerator):
|
||||
""" This class will update added collation under schema node. """
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for collation node.
|
||||
('Fetch collation Node URL', dict(url='/browser/collation/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(CollationPutTestCase, self).setUp()
|
||||
self.schema_info = parent_node_dict["schema"][-1]
|
||||
self.schema_name = self.schema_info["schema_name"]
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
|
@ -0,0 +1,25 @@
|
||||
SELECT
|
||||
d.oid, d.typname as name, d.typbasetype, format_type(b.oid,NULL) as basetype,
|
||||
pg_get_userbyid(d.typowner) as owner,
|
||||
NULL AS colloid, format_type(b.oid, d.typtypmod) AS fulltype,
|
||||
'' AS collname,
|
||||
d.typtypmod, d.typnotnull, d.typdefault, d.typndims, d.typdelim, bn.nspname as basensp,
|
||||
description, (SELECT COUNT(1) FROM pg_type t2 WHERE t2.typname=d.typname) > 1 AS domisdup,
|
||||
(SELECT COUNT(1) FROM pg_type t3 WHERE t3.typname=b.typname) > 1 AS baseisdup,
|
||||
ARRAY [] :: TEXT [] AS seclabels
|
||||
|
||||
FROM
|
||||
pg_type d
|
||||
JOIN
|
||||
pg_type b ON b.oid = d.typbasetype
|
||||
JOIN
|
||||
pg_namespace bn ON bn.oid=d.typnamespace
|
||||
LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=d.oid AND des.classoid='pg_type'::regclass)
|
||||
WHERE
|
||||
d.typnamespace = {{scid}}::oid
|
||||
{% if doid %}
|
||||
AND d.oid={{doid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
d.typname;
|
@ -51,7 +51,6 @@ class DomainAddTestCase(BaseTestGenerator):
|
||||
data = {
|
||||
"basensp": schema_name,
|
||||
"basetype": "character",
|
||||
"collname": "pg_catalog.\"POSIX\"",
|
||||
"constraints": [{
|
||||
"conname": "num",
|
||||
"convalidated": True
|
||||
|
@ -38,7 +38,7 @@ def create_domain(server, db_name, schema_name, schema_id, domain_name):
|
||||
server['port'])
|
||||
pg_cursor = connection.cursor()
|
||||
query = 'CREATE DOMAIN ' + schema_name + '.' + domain_name + \
|
||||
' AS character(10) COLLATE pg_catalog."POSIX" DEFAULT 1'
|
||||
' AS character(10) DEFAULT 1'
|
||||
pg_cursor.execute(query)
|
||||
connection.commit()
|
||||
# Get 'oid' from newly created domain
|
||||
|
@ -28,6 +28,7 @@ class ForeignTableAddTestCase(BaseTestGenerator):
|
||||
"""
|
||||
This class will add foreign table under database node.
|
||||
"""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
# Fetching default URL for foreign server node.
|
||||
@ -37,6 +38,7 @@ class ForeignTableAddTestCase(BaseTestGenerator):
|
||||
def setUp(self):
|
||||
""" This function will create foreign data wrapper and
|
||||
foreign server. """
|
||||
super(ForeignTableAddTestCase, self).setUp()
|
||||
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
|
@ -27,6 +27,7 @@ class ForeignTableDeleteTestCase(BaseTestGenerator):
|
||||
"""
|
||||
This class will delete foreign table under database node.
|
||||
"""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
# Fetching default URL for foreign table node.
|
||||
@ -36,6 +37,7 @@ class ForeignTableDeleteTestCase(BaseTestGenerator):
|
||||
def setUp(self):
|
||||
""" This function will create foreign data wrapper, foreign server
|
||||
and foreign table. """
|
||||
super(ForeignTableDeleteTestCase, self).setUp()
|
||||
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
|
@ -27,6 +27,8 @@ class ForeignTableGetTestCase(BaseTestGenerator):
|
||||
"""
|
||||
This class will fetch foreign table under database node.
|
||||
"""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
# Fetching default URL for foreign server node.
|
||||
('Check foreign table Node', dict(url='/browser/foreign_table/obj/'))
|
||||
@ -35,6 +37,7 @@ class ForeignTableGetTestCase(BaseTestGenerator):
|
||||
def setUp(self):
|
||||
""" This function will create foreign data wrapper, foreign server
|
||||
and foreign table. """
|
||||
super(ForeignTableGetTestCase, self).setUp()
|
||||
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
|
@ -28,6 +28,8 @@ class ForeignTablePutTestCase(BaseTestGenerator):
|
||||
"""
|
||||
This class will fetch foreign table under database node.
|
||||
"""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
# Fetching default URL for foreign server node.
|
||||
('Check foreign table Node', dict(url='/browser/foreign_table/obj/'))
|
||||
@ -36,6 +38,7 @@ class ForeignTablePutTestCase(BaseTestGenerator):
|
||||
def setUp(self):
|
||||
""" This function will create foreign data wrapper, foreign server
|
||||
and foreign table. """
|
||||
super(ForeignTablePutTestCase, self).setUp()
|
||||
|
||||
self.schema_data = parent_node_dict['schema'][-1]
|
||||
self.server_id = self.schema_data['server_id']
|
||||
|
@ -14,7 +14,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
@ -22,6 +22,7 @@ from regression.python_test_utils import test_utils as utils
|
||||
|
||||
class TriggerFuncAddTestCase(BaseTestGenerator):
|
||||
""" This class will add new trigger function under schema node. """
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for trigger function node.
|
||||
('Fetch Trigger Function Node URL', dict(
|
||||
@ -30,6 +31,7 @@ class TriggerFuncAddTestCase(BaseTestGenerator):
|
||||
|
||||
def runTest(self):
|
||||
""" This function will add trigger function under schema node. """
|
||||
super(TriggerFuncAddTestCase, self).runTest()
|
||||
db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
server_id = schema_info["server_id"]
|
||||
|
@ -13,7 +13,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
@ -22,6 +22,7 @@ from . import utils as trigger_funcs_utils
|
||||
|
||||
class TriggerFuncDeleteTestCase(BaseTestGenerator):
|
||||
""" This class will delete the trigger function under schema node. """
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for trigger function node.
|
||||
('Fetch Trigger Function Node URL',
|
||||
@ -29,6 +30,7 @@ class TriggerFuncDeleteTestCase(BaseTestGenerator):
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(TriggerFuncDeleteTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
self.schema_name = parent_node_dict["schema"][-1]["schema_name"]
|
||||
self.schema_id = parent_node_dict["schema"][-1]["schema_id"]
|
||||
|
@ -13,7 +13,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
@ -22,6 +22,7 @@ from . import utils as trigger_funcs_utils
|
||||
|
||||
class TriggerFuncGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch added trigger function under schema node."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for trigger function node.
|
||||
('Fetch Trigger Function Node URL',
|
||||
@ -29,6 +30,7 @@ class TriggerFuncGetTestCase(BaseTestGenerator):
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(TriggerFuncGetTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
self.schema_name = parent_node_dict["schema"][-1]["schema_name"]
|
||||
self.schema_id = parent_node_dict["schema"][-1]["schema_id"]
|
||||
|
@ -14,7 +14,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
@ -23,6 +23,7 @@ from . import utils as trigger_funcs_utils
|
||||
|
||||
class TriggerFuncPutTestCase(BaseTestGenerator):
|
||||
""" This class will update new trigger function under schema node. """
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for trigger function node.
|
||||
('Fetch Trigger Function Node URL',
|
||||
@ -30,6 +31,7 @@ class TriggerFuncPutTestCase(BaseTestGenerator):
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(TriggerFuncPutTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
self.schema_name = parent_node_dict["schema"][-1]["schema_name"]
|
||||
self.schema_id = parent_node_dict["schema"][-1]["schema_id"]
|
||||
|
@ -14,7 +14,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
@ -22,6 +22,7 @@ from regression.python_test_utils import test_utils as utils
|
||||
|
||||
class PackageAddTestCase(BaseTestGenerator):
|
||||
""" This class will add new package under test schema. """
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
# Fetching default URL for package node.
|
||||
@ -30,7 +31,7 @@ class PackageAddTestCase(BaseTestGenerator):
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
|
||||
super(PackageAddTestCase, self).setUp()
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
|
@ -13,7 +13,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
@ -22,6 +22,7 @@ from . import utils as package_utils
|
||||
|
||||
class PackageDeleteTestCase(BaseTestGenerator):
|
||||
""" This class will delete new package under test schema. """
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
# Fetching default URL for package node.
|
||||
@ -30,7 +31,7 @@ class PackageDeleteTestCase(BaseTestGenerator):
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
|
||||
super(PackageDeleteTestCase, self).setUp()
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
|
@ -13,7 +13,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
@ -22,6 +22,7 @@ from . import utils as package_utils
|
||||
|
||||
class PackageGetTestCase(BaseTestGenerator):
|
||||
""" This class will fetch new package under test schema. """
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
# Fetching default URL for package node.
|
||||
@ -30,7 +31,7 @@ class PackageGetTestCase(BaseTestGenerator):
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
|
||||
super(PackageGetTestCase, self).setUp()
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
|
@ -14,7 +14,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
@ -23,7 +23,7 @@ from . import utils as package_utils
|
||||
|
||||
class PackagePutTestCase(BaseTestGenerator):
|
||||
""" This class will update new package under test schema. """
|
||||
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for package node.
|
||||
('Fetch Package Node URL', dict(
|
||||
@ -31,7 +31,7 @@ class PackagePutTestCase(BaseTestGenerator):
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
|
||||
super(PackagePutTestCase, self).setUp()
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
|
@ -21,6 +21,7 @@ from regression.python_test_utils import test_utils as utils
|
||||
|
||||
class SequenceAddTestCase(BaseTestGenerator):
|
||||
""" This class will add new sequence(s) under schema node. """
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for sequence node.
|
||||
(
|
||||
@ -60,7 +61,7 @@ class SequenceAddTestCase(BaseTestGenerator):
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
pass
|
||||
super(SequenceAddTestCase, self).setUp()
|
||||
|
||||
def runTest(self):
|
||||
"""This function will add sequence(s) under schema node."""
|
||||
|
@ -21,12 +21,14 @@ from . import utils as sequence_utils
|
||||
|
||||
class SequenceDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will delete added sequence under schema node."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for sequence node.
|
||||
('Fetch sequence Node URL', dict(url='/browser/sequence/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(SequenceDeleteTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -21,12 +21,14 @@ from . import utils as sequence_utils
|
||||
|
||||
class SequenceGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch added sequence under schema node."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for sequence node.
|
||||
('Fetch sequence Node URL', dict(url='/browser/sequence/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(SequenceGetTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -22,12 +22,14 @@ from . import utils as sequence_utils
|
||||
|
||||
class SequencePutTestCase(BaseTestGenerator):
|
||||
"""This class will update added sequence under schema node."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for sequence node.
|
||||
('Fetch sequence Node URL', dict(url='/browser/sequence/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(SequencePutTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -16,7 +16,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
@ -24,6 +24,7 @@ from regression.python_test_utils import test_utils as utils
|
||||
|
||||
class SynonymAddTestCase(BaseTestGenerator):
|
||||
"""This class will add new synonym under test schema."""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
# Fetching default URL for synonym node.
|
||||
@ -31,6 +32,7 @@ class SynonymAddTestCase(BaseTestGenerator):
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(SynonymAddTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -15,7 +15,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
@ -24,6 +24,7 @@ from . import utils as synonym_utils
|
||||
|
||||
class SynonymDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will delete added synonym under schema node."""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
# Fetching default URL for synonym node.
|
||||
@ -31,6 +32,7 @@ class SynonymDeleteTestCase(BaseTestGenerator):
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(SynonymDeleteTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -15,7 +15,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
@ -24,6 +24,7 @@ from . import utils as synonym_utils
|
||||
|
||||
class SynonymGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch new synonym under schema node."""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
# Fetching default URL for synonym node.
|
||||
@ -31,6 +32,7 @@ class SynonymGetTestCase(BaseTestGenerator):
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(SynonymGetTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -18,7 +18,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
@ -27,6 +27,7 @@ from . import utils as synonym_utils
|
||||
|
||||
class SynonymPutTestCase(BaseTestGenerator):
|
||||
"""This class will update added synonym under test schema."""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
# Fetching default URL for synonym node.
|
||||
@ -34,7 +35,7 @@ class SynonymPutTestCase(BaseTestGenerator):
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
|
||||
super(SynonymPutTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -364,9 +364,20 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
data['attlen'] = None
|
||||
data['attprecision'] = None
|
||||
|
||||
self.set_length_precision(
|
||||
length, precision, fulltype, data
|
||||
)
|
||||
import re
|
||||
|
||||
# If we have length & precision both
|
||||
if length and precision:
|
||||
matchObj = re.search(r'(\d+),(\d+)', fulltype)
|
||||
if matchObj:
|
||||
data['attlen'] = matchObj.group(1)
|
||||
data['attprecision'] = matchObj.group(2)
|
||||
elif length:
|
||||
# If we have length only
|
||||
matchObj = re.search(r'(\d+)', fulltype)
|
||||
if matchObj:
|
||||
data['attlen'] = matchObj.group(1)
|
||||
data['attprecision'] = None
|
||||
|
||||
# We need to fetch inherited tables for each table
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
|
@ -23,12 +23,14 @@ from regression.python_test_utils import test_utils as utils
|
||||
|
||||
class CheckConstraintAddTestCase(BaseTestGenerator):
|
||||
"""This class will add check constraint to existing table"""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
('Add check constraint to table',
|
||||
dict(url='/browser/check_constraint/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(CheckConstraintAddTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -23,12 +23,14 @@ from . import utils as chk_constraint_utils
|
||||
|
||||
class CheckConstraintDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will delete check constraint to existing table"""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
('Delete check constraint to table',
|
||||
dict(url='/browser/check_constraint/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(CheckConstraintDeleteTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -23,12 +23,15 @@ from . import utils as chk_constraint_utils
|
||||
|
||||
class CheckConstraintGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch check constraint to existing table"""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
scenarios = [
|
||||
('Fetch check constraint to table',
|
||||
dict(url='/browser/check_constraint/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(CheckConstraintGetTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -24,12 +24,14 @@ from . import utils as chk_constraint_utils
|
||||
|
||||
class CheckConstraintPutTestCase(BaseTestGenerator):
|
||||
"""This class will update check constraint to existing table"""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
('Update check constraint to table',
|
||||
dict(url='/browser/check_constraint/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(CheckConstraintPutTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -24,6 +24,7 @@ from regression.python_test_utils import test_utils as utils
|
||||
class IndexConstraintAddTestCase(BaseTestGenerator):
|
||||
"""This class will add index constraint(primary key or unique key) to
|
||||
table column"""
|
||||
skip_on_database = ['gpdb']
|
||||
primary_key_name = "test_primarykey_add_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
primary_key_data = {"name": primary_key_name,
|
||||
|
@ -24,6 +24,7 @@ from . import utils as index_constraint_utils
|
||||
class IndexConstraintDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will delete index constraint(primary key or unique key) of
|
||||
table column"""
|
||||
skip_on_database = ['gpdb']
|
||||
primary_key_name = "test_primarykey_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
unique_key_name = "test_uniquekey_delete_%s" % \
|
||||
|
@ -24,6 +24,7 @@ from . import utils as index_constraint_utils
|
||||
class IndexConstraintGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the index constraint(primary key or unique key) of
|
||||
table column"""
|
||||
skip_on_database = ['gpdb']
|
||||
primary_key_name = "test_primarykey_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
unique_key_name = "test_uniquekey_delete_%s" % \
|
||||
|
@ -25,6 +25,7 @@ from . import utils as index_constraint_utils
|
||||
class IndexConstraintUpdateTestCase(BaseTestGenerator):
|
||||
"""This class will update index constraint(primary key or unique key) of
|
||||
table column"""
|
||||
skip_on_database = ['gpdb']
|
||||
primary_key_name = "test_primarykey_put_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
unique_key_name = "test_uniquekey_put_%s" % \
|
||||
|
@ -15,8 +15,9 @@ FROM pg_index i
|
||||
JOIN pg_attribute a ON (a.attrelid = i.indexrelid AND attnum = {{loop.index}})
|
||||
JOIN pg_type ty ON ty.oid=a.atttypid
|
||||
LEFT OUTER JOIN pg_opclass o ON (o.oid = i.indclass[{{loop.index -1}}])
|
||||
LEFT OUTER JOIN pg_constraint c ON (c.conindid = i.indexrelid) LEFT OUTER JOIN pg_operator op ON (op.oid = c.conexclop[{{loop.index}}])
|
||||
LEFT OUTER JOIN pg_constraint c ON (c.conindid = i.indexrelid)
|
||||
LEFT OUTER JOIN pg_operator op ON (op.oid = c.conexclop[{{loop.index}}])
|
||||
LEFT OUTER JOIN pg_collation coll ON a.attcollation=coll.oid
|
||||
LEFT OUTER JOIN pg_namespace nspc ON coll.collnamespace=nspc.oid
|
||||
WHERE i.indexrelid = {{cid}}::oid
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
|
@ -0,0 +1,20 @@
|
||||
{% for n in range(colcnt|int) %}
|
||||
{% if loop.index != 1 %}
|
||||
UNION
|
||||
{% endif %}
|
||||
SELECT
|
||||
i.indoption[{{loop.index -1}}] AS options,
|
||||
pg_get_indexdef(i.indexrelid, {{loop.index}}, true) AS coldef,
|
||||
NULL as op.oprname,
|
||||
CASE WHEN (o.opcdefault = FALSE) THEN o.opcname ELSE null END AS opcname,
|
||||
(SELECT setting AS value
|
||||
FROM pg_settings
|
||||
WHERE name='lc_collate') AS collname,
|
||||
'' as collnspname,
|
||||
format_type(ty.oid,NULL) AS col_type
|
||||
FROM pg_index i
|
||||
JOIN pg_attribute a ON (a.attrelid = i.indexrelid AND attnum = {{loop.index}})
|
||||
JOIN pg_type ty ON ty.oid=a.atttypid
|
||||
LEFT OUTER JOIN pg_opclass o ON (o.oid = i.indclass[{{loop.index -1}}])
|
||||
WHERE i.indexrelid = {{cid}}::oid
|
||||
{% endfor %}
|
@ -0,0 +1,25 @@
|
||||
SELECT
|
||||
i.indexrelid,
|
||||
CASE i.indoption[i.attnum - 1]
|
||||
WHEN 0 THEN ARRAY['ASC', 'NULLS LAST']
|
||||
WHEN 1 THEN ARRAY['DESC', 'NULLS FIRST']
|
||||
WHEN 2 THEN ARRAY['ASC', 'NULLS FIRST']
|
||||
WHEN 3 THEN ARRAY['DESC', 'NULLS ']
|
||||
ELSE ARRAY['UNKNOWN OPTION' || i.indoption[i.attnum - 1], '']
|
||||
END::text[] AS options,
|
||||
i.attnum,
|
||||
pg_get_indexdef(i.indexrelid, i.attnum, true) as attdef,
|
||||
CASE WHEN (o.opcdefault = FALSE) THEN o.opcname ELSE null END AS opcname,
|
||||
NULL AS oprname,
|
||||
'' AS collnspname
|
||||
FROM (
|
||||
SELECT
|
||||
indexrelid, i.indoption, i.indclass,
|
||||
unnest(ARRAY(SELECT generate_series(1, i.indnatts) AS n)) AS attnum
|
||||
FROM
|
||||
pg_index i
|
||||
WHERE i.indexrelid = {{idx}}::OID
|
||||
) i
|
||||
LEFT JOIN pg_opclass o ON (o.oid = i.indclass[i.attnum - 1])
|
||||
LEFT JOIN pg_attribute a ON (a.attrelid = i.indexrelid AND a.attnum = i.attnum)
|
||||
ORDER BY i.attnum;
|
@ -14,7 +14,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
@ -106,7 +106,7 @@ class TableAddTestCase(BaseTestGenerator):
|
||||
"hastoasttable": True,
|
||||
"like_constraints": True,
|
||||
"like_default_value": True,
|
||||
"like_relation": "pg_catalog.pg_tables",
|
||||
"like_relation": "pg_catalog.pg_namespace",
|
||||
"name": self.table_name,
|
||||
"primary_key": [],
|
||||
"relacl": [
|
||||
|
@ -1,96 +0,0 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import json
|
||||
import uuid
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as tables_utils
|
||||
|
||||
|
||||
class TableNotNullUpdateTestCase(BaseTestGenerator):
|
||||
"""This class will add new collation under schema node."""
|
||||
scenarios = [
|
||||
('Update Table with not null field', dict(url='/browser/table/obj/')),
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(
|
||||
self, utils.SERVER_GROUP, self.server_id, self.db_id
|
||||
)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
self.table_name = "test_table_column_put_%s" % (str(uuid.uuid4())[1:8])
|
||||
|
||||
custom_sql = 'column_1 "char" NOT NULL, ' \
|
||||
'column_2 character varying(10) NOT NULL'
|
||||
|
||||
self.table_id = tables_utils.create_table(
|
||||
self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name,
|
||||
custom_sql
|
||||
)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch added table under schema node."""
|
||||
table_response = tables_utils.verify_table(
|
||||
self.server, self.db_name, self.table_id
|
||||
)
|
||||
if not table_response:
|
||||
raise Exception("Could not find the table to update.")
|
||||
|
||||
data = {
|
||||
"id": self.table_id,
|
||||
"columns": {
|
||||
"changed": [
|
||||
{
|
||||
"attnum": 1,
|
||||
"attnotnull": False
|
||||
},
|
||||
{
|
||||
"attnum": 2,
|
||||
"attnotnull": False
|
||||
}
|
||||
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
response = self.tester.put(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(self.db_id) + '/' +
|
||||
str(self.schema_id) + '/' + str(self.table_id),
|
||||
data=json.dumps(data), follow_redirects=True
|
||||
)
|
||||
|
||||
self.assertEquals(response.status_code, 200)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -14,7 +14,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
@ -15,8 +15,7 @@ import traceback
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
||||
|
||||
def create_table(server, db_name, schema_name, table_name,
|
||||
custom_column_sql=None):
|
||||
def create_table(server, db_name, schema_name, table_name):
|
||||
"""
|
||||
This function creates a table under provided schema.
|
||||
:param server: server details
|
||||
@ -40,13 +39,9 @@ def create_table(server, db_name, schema_name, table_name,
|
||||
old_isolation_level = connection.isolation_level
|
||||
connection.set_isolation_level(0)
|
||||
pg_cursor = connection.cursor()
|
||||
if custom_column_sql:
|
||||
query = "CREATE TABLE %s.%s(%s)" % \
|
||||
(schema_name, table_name, custom_column_sql)
|
||||
else:
|
||||
query = "CREATE TABLE %s.%s(id serial UNIQUE NOT NULL, " \
|
||||
"name text, location text)" % \
|
||||
(schema_name, table_name)
|
||||
query = "CREATE TABLE %s.%s(id serial UNIQUE NOT NULL, name text," \
|
||||
" location text)" %\
|
||||
(schema_name, table_name)
|
||||
pg_cursor.execute(query)
|
||||
connection.set_isolation_level(old_isolation_level)
|
||||
connection.commit()
|
||||
|
@ -25,11 +25,13 @@ from regression.python_test_utils import test_utils as utils
|
||||
|
||||
class TriggersAddTestCase(BaseTestGenerator):
|
||||
"""This class will add new trigger under table node."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
('Add trigger Node URL', dict(url='/browser/trigger/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(TriggersAddTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -25,11 +25,13 @@ from . import utils as triggers_utils
|
||||
|
||||
class TriggersDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will delete trigger under table node."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
('Delete trigger Node URL', dict(url='/browser/trigger/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(TriggersDeleteTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -25,11 +25,13 @@ from . import utils as triggers_utils
|
||||
|
||||
class TriggersGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch trigger under table node."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
('Fetch trigger Node URL', dict(url='/browser/trigger/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(TriggersGetTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -26,11 +26,13 @@ from . import utils as triggers_utils
|
||||
|
||||
class TriggersUpdateTestCase(BaseTestGenerator):
|
||||
"""This class will update trigger under table node."""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
('Put trigger Node URL', dict(url='/browser/trigger/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(TriggersUpdateTestCase, self).setUp()
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
|
@ -1748,20 +1748,39 @@ class BaseTableView(PGChildNodeView):
|
||||
old_data['atttypmod']
|
||||
)
|
||||
|
||||
def get_type_attr(key, data):
|
||||
"""Utility function"""
|
||||
if key in data:
|
||||
return data[key]
|
||||
return None
|
||||
|
||||
# If the column data type has not changed then fetch
|
||||
# old length and precision
|
||||
if 'elemoid' in old_data and 'cltype' not in c:
|
||||
length, precision, typeval = \
|
||||
self.get_length_precision(old_data['elemoid'])
|
||||
# Set proper values for old data
|
||||
self.set_length_precision(
|
||||
length, precision, fulltype, old_data
|
||||
)
|
||||
|
||||
# Set proper values for in new data
|
||||
self.set_length_precision(
|
||||
length, precision, fulltype, c, old_data
|
||||
)
|
||||
# If we have length & precision both
|
||||
if length and precision:
|
||||
matchObj = re.search(r'(\d+),(\d+)', fulltype)
|
||||
if matchObj:
|
||||
c['attlen'] = get_type_attr(
|
||||
'attlen', c
|
||||
) or matchObj.group(1)
|
||||
c['attprecision'] = get_type_attr(
|
||||
'attprecision', c
|
||||
) or matchObj.group(2)
|
||||
elif length:
|
||||
# If we have length only
|
||||
matchObj = re.search(r'(\d+)', fulltype)
|
||||
if matchObj:
|
||||
c['attlen'] = get_type_attr(
|
||||
'attlen', c
|
||||
) or matchObj.group(1)
|
||||
c['attprecision'] = None
|
||||
else:
|
||||
c['attlen'] = None
|
||||
c['attprecision'] = None
|
||||
|
||||
if 'cltype' in c:
|
||||
typename = c['cltype']
|
||||
|
@ -5,12 +5,12 @@ SELECT
|
||||
array_agg(b.is_grantable) AS grantable
|
||||
FROM
|
||||
(SELECT
|
||||
(d).grantee AS grantee, (d).grantor AS grantor,
|
||||
(d).is_grantable AS is_grantable,
|
||||
CASE (d).privilege_type
|
||||
(a).grantee AS grantee, (a).grantor AS grantor,
|
||||
(a).is_grantable AS is_grantable,
|
||||
CASE (a).privilege_type
|
||||
WHEN 'CREATE' THEN 'C'
|
||||
WHEN 'USAGE' THEN 'U'
|
||||
ELSE 'UNKNOWN - ' || (d).privilege_type
|
||||
ELSE 'UNKNOWN - ' || (a).privilege_type
|
||||
END AS privilege_type
|
||||
FROM
|
||||
(
|
||||
|
@ -9,7 +9,7 @@
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
@ -20,12 +20,14 @@ from . import utils as schema_utils
|
||||
|
||||
class SchemaPutTestCase(BaseTestGenerator):
|
||||
""" This class will update the schema under database node. """
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
# Fetching default URL for extension node.
|
||||
('Check Schema Node URL', dict(url='/browser/schema/obj/'))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(SchemaPutTestCase, self).setUp()
|
||||
self.database_info = parent_node_dict["database"][-1]
|
||||
self.db_name = self.database_info["db_name"]
|
||||
# Change the db name, so that schema will create in newly created db
|
||||
|
@ -464,17 +464,22 @@ class TypeView(PGChildNodeView, DataTypeReader):
|
||||
|
||||
# Below logic will allow us to split length, precision from
|
||||
# type name for grid
|
||||
data = {
|
||||
'attlen': None,
|
||||
'attprecision': None
|
||||
}
|
||||
import re
|
||||
t_len = None
|
||||
t_prec = None
|
||||
|
||||
self.set_length_precision(
|
||||
is_tlength, is_precision, row['fulltype'], data
|
||||
)
|
||||
|
||||
t_len = data['attlen']
|
||||
t_prec = data['attprecision']
|
||||
# If we have length & precision both
|
||||
if is_tlength and is_precision:
|
||||
matchObj = re.search(r'(\d+),(\d+)', row['fulltype'])
|
||||
if matchObj:
|
||||
t_len = matchObj.group(1)
|
||||
t_prec = matchObj.group(2)
|
||||
elif is_tlength:
|
||||
# If we have length only
|
||||
matchObj = re.search(r'(\d+)', row['fulltype'])
|
||||
if matchObj:
|
||||
t_len = matchObj.group(1)
|
||||
t_prec = None
|
||||
|
||||
type_name = DataTypeReader.parse_type_name(row['typname'])
|
||||
|
||||
|
@ -0,0 +1,7 @@
|
||||
SELECT
|
||||
'typacl' AS deftype,
|
||||
'PUBLIC' AS grantee,
|
||||
NULL AS grantor,
|
||||
NULL AS privileges,
|
||||
NULL AS grantable
|
||||
LIMIT 0;
|
@ -0,0 +1,34 @@
|
||||
{# The SQL given below will fetch composite type#}
|
||||
{% if type == 'c' %}
|
||||
SELECT attnum, attname, format_type(t.oid,NULL) AS typname, attndims, atttypmod, nsp.nspname,
|
||||
(SELECT COUNT(1) from pg_type t2 WHERE t2.typname=t.typname) > 1 AS isdup,
|
||||
NULL AS collname, NULL as collnspname, att.attrelid,
|
||||
format_type(t.oid, att.atttypmod) AS fulltype,
|
||||
CASE WHEN t.typelem > 0 THEN t.typelem ELSE t.oid END as elemoid
|
||||
FROM pg_attribute att
|
||||
JOIN pg_type t ON t.oid=atttypid
|
||||
JOIN pg_namespace nsp ON t.typnamespace=nsp.oid
|
||||
LEFT OUTER JOIN pg_type b ON t.typelem=b.oid
|
||||
WHERE att.attrelid = {{typrelid}}::oid
|
||||
ORDER by attnum;
|
||||
{% endif %}
|
||||
|
||||
{# The SQL given below will fetch enum type#}
|
||||
{% if type == 'e' %}
|
||||
SELECT enumlabel
|
||||
FROM pg_enum
|
||||
WHERE enumtypid={{tid}}::oid
|
||||
ORDER by enumsortorder
|
||||
{% endif %}
|
||||
|
||||
{# The SQL given below will fetch range type#}
|
||||
{% if type == 'r' %}
|
||||
SELECT rngsubtype, st.typname,
|
||||
rngcollation, NULL AS collname,
|
||||
rngsubopc, opc.opcname,
|
||||
rngcanonical, rngsubdiff
|
||||
FROM pg_range
|
||||
LEFT JOIN pg_type st ON st.oid=rngsubtype
|
||||
LEFT JOIN pg_opclass opc ON opc.oid=rngsubopc
|
||||
WHERE rngtypid={{tid}}::oid;
|
||||
{% endif %}
|
@ -0,0 +1,31 @@
|
||||
SELECT
|
||||
t.oid,
|
||||
t.typname AS name,
|
||||
FALSE AS is_collatable,
|
||||
array_to_string(ct.relacl::text[], ', ') AS acl,
|
||||
t.*,
|
||||
format_type(t.oid, NULL) AS alias,
|
||||
pg_get_userbyid(t.typowner) AS typeowner,
|
||||
e.typname AS element,
|
||||
description,
|
||||
ct.oid AS taboid,
|
||||
nsp.nspname AS schema,
|
||||
ARRAY [] :: TEXT [] AS seclabels,
|
||||
(CASE WHEN (t.oid <= {{datlastsysoid}}:: OID OR ct.oid != 0)
|
||||
THEN TRUE
|
||||
ELSE FALSE END) AS is_sys_type
|
||||
FROM pg_type t
|
||||
LEFT OUTER JOIN pg_type e ON e.oid = t.typelem
|
||||
LEFT OUTER JOIN pg_class ct ON ct.oid = t.typrelid AND ct.relkind <> 'c'
|
||||
LEFT OUTER JOIN pg_description des
|
||||
ON (des.objoid = t.oid AND des.classoid = 'pg_type' :: REGCLASS)
|
||||
LEFT OUTER JOIN pg_namespace nsp ON nsp.oid = t.typnamespace
|
||||
WHERE t.typtype != 'd' AND t.typname NOT LIKE E'\\_%' AND
|
||||
t.typnamespace = {{scid}}:: OID
|
||||
{% if tid %}
|
||||
AND t.oid = {{tid}}:: OID
|
||||
{% endif %}
|
||||
{% if not show_system_objects %}
|
||||
AND ct.oid IS NULL
|
||||
{% endif %}
|
||||
ORDER BY t.typname;
|
@ -8,7 +8,7 @@
|
||||
##########################################################################
|
||||
|
||||
"""Schema collection node helper class"""
|
||||
import re
|
||||
|
||||
import json
|
||||
|
||||
from flask import render_template
|
||||
@ -336,56 +336,6 @@ class DataTypeReader:
|
||||
|
||||
return type_name
|
||||
|
||||
@classmethod
|
||||
def set_length_precision(cls, length, precision, fulltype, data,
|
||||
old_data=None):
|
||||
"""
|
||||
Parse length & precision from datatype and then assign it to datatype
|
||||
according to client format
|
||||
|
||||
Args:
|
||||
length: Boolean flag for length
|
||||
precision: Boolean flag for precision
|
||||
fulltype: Type name with length & precision
|
||||
data: New values
|
||||
old_data: Old values
|
||||
"""
|
||||
# If we have length & precision both
|
||||
|
||||
if length and precision:
|
||||
match_obj = re.search(r'(\d+),(\d+)', fulltype)
|
||||
if match_obj:
|
||||
attribute_length = DataTypeReader.get_valid_length_value(
|
||||
data.get('attlen', None))
|
||||
data['attlen'] = attribute_length or match_obj.group(1)
|
||||
attribute_precision = DataTypeReader.get_valid_length_value(
|
||||
data.get('attprecision', None))
|
||||
data['attprecision'] = attribute_precision or match_obj.group(
|
||||
2)
|
||||
elif length:
|
||||
# If we have length only
|
||||
match_obj = re.search(r'(\d+)', fulltype)
|
||||
if match_obj:
|
||||
attribute_length = DataTypeReader.get_valid_length_value(
|
||||
data.get('attlen', None))
|
||||
data['attlen'] = attribute_length or match_obj.group(1)
|
||||
data['attprecision'] = None
|
||||
else:
|
||||
# Use the old values to avoid unnecessary
|
||||
if old_data:
|
||||
if 'attlen' in old_data:
|
||||
if old_data['attlen'] != '-1':
|
||||
data['attlen'] = old_data.get('attlen', None)
|
||||
if 'attprecision' in old_data:
|
||||
if old_data['attprecision'] != '-1':
|
||||
data['attprecision'] = old_data.get(
|
||||
'attprecision', None
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_valid_length_value(cls, val):
|
||||
return val if val and int(val) != -1 else None
|
||||
|
||||
|
||||
def trigger_definition(data):
|
||||
"""
|
||||
|
@ -13,7 +13,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
@ -13,7 +13,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
@ -13,7 +13,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
@ -14,7 +14,7 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
@ -135,3 +135,4 @@
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
@ -9,7 +9,7 @@
|
||||
|
||||
import json
|
||||
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
@ -9,7 +9,7 @@
|
||||
|
||||
import uuid
|
||||
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
@ -50,6 +50,8 @@ class DatabasesUpdateTestCase(BaseTestGenerator):
|
||||
follow_redirects=True)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
except Exception as exception:
|
||||
from traceback import print_exc
|
||||
print_exc()
|
||||
raise Exception("Error while updating database details. %s" %
|
||||
exception)
|
||||
finally:
|
||||
|
@ -10,7 +10,7 @@
|
||||
import json
|
||||
import uuid
|
||||
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
||||
DATABASE_CONNECT_URL = '/browser/database/connect/'
|
||||
|
@ -10,7 +10,7 @@
|
||||
import json
|
||||
import uuid
|
||||
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
@ -9,7 +9,7 @@
|
||||
|
||||
import uuid
|
||||
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
@ -10,7 +10,7 @@
|
||||
import json
|
||||
import uuid
|
||||
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
@ -9,7 +9,7 @@
|
||||
|
||||
import uuid
|
||||
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
@ -20,11 +20,7 @@ CREATE {% if data.rolcanlogin %}USER{% else %}ROLE{% endif %} {{ conn|qtIdent(da
|
||||
|
||||
INHERIT{% else %}
|
||||
|
||||
NOINHERIT{% endif %}{% if data.rolreplication %}
|
||||
|
||||
REPLICATION{% else %}
|
||||
|
||||
NOREPLICATION{% endif %}{% if 'rolconnlimit' in data and data.rolconnlimit is number and data.rolconnlimit >= -1 %}
|
||||
NOINHERIT{% endif %}{% if 'rolconnlimit' in data and data.rolconnlimit is number and data.rolconnlimit >= -1 %}
|
||||
|
||||
CONNECTION LIMIT {{ data.rolconnlimit }}{% endif %}{% if data.rolvaliduntil and data.rolvaliduntil is not none %}
|
||||
|
||||
@ -42,7 +38,8 @@ GRANT {{ conn|qtIdent(data.admins)|join(', ') }} TO {{ conn|qtIdent(data.rolname
|
||||
|
||||
{% for var in data.variables %}
|
||||
|
||||
{{ VARIABLE.APPLY(conn, var.database, data.rolname, var.name, var.value) }}
|
||||
ALTER ROLE {{ self.conn|qtIdent(data.rolname) }}
|
||||
SET {{ conn|qtIdent(var.name) }} TO {{ var.value }};
|
||||
{% endfor %}{% endif %}{% if data.description %}
|
||||
|
||||
COMMENT ON ROLE {{ conn|qtIdent(data.rolname) }} IS {{ data.description|qtLiteral }};
|
||||
|
@ -11,6 +11,6 @@ SELECT
|
||||
FROM
|
||||
pg_roles r
|
||||
{% if rid %}
|
||||
WHERE r.oid = {{ rid|qtIdent }}::OID
|
||||
WHERE r.oid = {{ rid }}::OID
|
||||
{% endif %}
|
||||
ORDER BY r.rolcanlogin, r.rolname
|
||||
|
@ -66,7 +66,6 @@ UNION ALL
|
||||
FROM
|
||||
(SELECT
|
||||
'ALTER ROLE ' || pg_catalog.quote_ident(pg_get_userbyid(%(rid)s::OID)) ||
|
||||
' IN DATABASE ' || pg_catalog.quote_ident(datname) ||
|
||||
' SET ' || param|| ' TO ' ||
|
||||
CASE
|
||||
WHEN param IN ('search_path', 'temp_tablespaces') THEN value
|
||||
|
@ -30,9 +30,7 @@ ALTER {% if rolCanLogin %}USER{% else %}ROLE{% endif %} {{ conn|qtIdent(rolname)
|
||||
INHERIT{% else %}
|
||||
NOINHERIT{% endif %}{% endif %}{% if 'rolreplication' in data %}
|
||||
|
||||
{% if data.rolreplication %}
|
||||
REPLICATION{% else %}
|
||||
NOREPLICATION{% endif %}{% endif %}{% if 'rolconnlimit' in data and data.rolconnlimit is number and data.rolconnlimit >= -1 %}
|
||||
{% endif %}{% if 'rolconnlimit' in data and data.rolconnlimit is number and data.rolconnlimit >= -1 %}
|
||||
|
||||
CONNECTION LIMIT {{ data.rolconnlimit }}
|
||||
{% endif %}{% if 'rolvaliduntil' in data %}
|
||||
@ -87,7 +85,8 @@ GRANT {{ conn|qtIdent(data.members)|join(', ') }} TO {{ conn|qtIdent(rolname) }}
|
||||
{% if 'added' in variables and variables.added|length > 0 %}
|
||||
|
||||
{% for var in variables.added %}
|
||||
{{ VARIABLE.APPLY(conn, var.database, rolname, var.name, var.value) }}
|
||||
ALTER ROLE {{ self.conn|qtIdent(data.rolname) }}
|
||||
SET {{ conn|qtIdent(var.name) }} TO {{ var.value }};
|
||||
{% endfor %}{% endif %}
|
||||
{% if 'changed' in variables and variables.changed|length > 0 %}
|
||||
|
||||
|
@ -9,7 +9,7 @@
|
||||
|
||||
import json
|
||||
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
@ -11,7 +11,7 @@ from __future__ import print_function
|
||||
|
||||
import json
|
||||
|
||||
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user