Major update to the test suite:

1. Code changes (due to drop objects functionality).
2. Quoting for database names in drop databases.
3. Code changes for import errors for pickle_path and advanced_config variables.
This commit is contained in:
Navnath Gadakh 2016-10-07 13:59:43 +01:00 committed by Dave Page
parent 50658808d9
commit ae612f5403
89 changed files with 3683 additions and 4731 deletions

View File

@ -6,13 +6,15 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import json
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression import test_utils as utils
from . import utils as cast_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
class CastsAddTestCase(BaseTestGenerator):
@ -21,45 +23,35 @@ class CastsAddTestCase(BaseTestGenerator):
('Check Cast Node', dict(url='/browser/cast/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
def runTest(self):
""" This function will add cast under database node. """
""" This function will add cast under test database. """
self.server_data = parent_node_dict["database"][-1]
self.server_id = self.server_data["server_id"]
self.db_id = self.server_data['db_id']
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
cast_utils.add_cast(self.tester)
self.data = cast_utils.get_cast_data()
response = self.tester.post(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(
self.db_id) + '/',
data=json.dumps(self.data),
content_type='html/json')
self.assertEquals(response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added cast, database, server and the
'parent_id.pkl' file which is created in setUpClass.
:return: None
"""
cast_utils.delete_cast(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added cast."""
connection = utils.get_db_connection(self.server_data['db_name'],
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'])
cast_utils.drop_cast(connection, self.data["srctyp"],
self.data["trgtyp"])
database_utils.disconnect_database(self, self.server_id,
self.db_id)

View File

@ -1,102 +1,66 @@
# #################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
# ##################################################################
# #
# # pgAdmin 4 - PostgreSQL Tools
# #
# # Copyright (C) 2013 - 2016, The pgAdmin Development Team
# # This software is released under the PostgreSQL Licence
# #
# # ##################################################################
from __future__ import print_function
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression.test_utils import get_ids
from regression import parent_node_dict
from . import utils as cast_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from regression.test_setup import advanced_config_data
import json
class CastsDeleteTestCase(BaseTestGenerator):
""" This class will fetch the cast node added under database node. """
""" This class will delete the cast node added under database node. """
scenarios = [
# Fetching default URL for cast node.
('Check Cast Node', dict(url='/browser/cast/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add cast(s) to databases
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add cast(s) to database(s)
cast_utils.add_cast(cls.tester)
def setUp(self):
self.default_db = self.server["db"]
self.database_info = parent_node_dict['database'][-1]
self.db_name = self.database_info['db_name']
self.server["db"] = self.db_name
self.source_type = 'circle'
self.target_type = 'line'
self.cast_id = cast_utils.create_cast(self.server, self.source_type,
self.target_type)
def runTest(self):
""" This function will delete added cast(s)."""
all_id = get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
cast_ids_dict = all_id["cid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(self.tester,
""" This function will delete added cast."""
self.server_id = self.database_info["server_id"]
self.db_id = self.database_info['db_id']
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
server_id,
db_id)
if len(db_con) == 0:
raise Exception("No database(s) to delete for server id %s"
% server_id)
cast_id = cast_ids_dict[server_id]
cast_get_data = cast_utils.verify_cast(self.tester,
utils.SERVER_GROUP,
server_id,
db_id, cast_id)
if cast_get_data.status_code == 200:
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
connection = utils.get_db_connection(self.server['db'],
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'])
response = cast_utils.verify_cast(connection, self.source_type,
self.target_type)
if len(response) == 0:
raise Exception("Could not find cast.")
delete_response = self.tester.delete(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(cast_id),
str(self.server_id) + '/' + str(self.db_id) +
'/' + str(self.cast_id),
follow_redirects=True)
response_data = json.loads(delete_response.data.decode('utf-8'))
self.assertTrue(response_data['success'], 1)
self.assertEquals(delete_response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""
This function delete the added cast, database, server and the
'parent_id.pkl' file which is created in setUpClass.
:return: None
"""
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function will disconnect test database."""
database_utils.disconnect_database(self, self.server_id,
self.db_id)
self.server['db'] = self.default_db

View File

@ -7,81 +7,61 @@
#
###################################################################
from __future__ import print_function
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression.test_utils import get_ids
from regression import parent_node_dict
from . import utils as cast_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
import os
import sys
class CastsGetTestCase(BaseTestGenerator):
""" This class will fetch the cast node added under database node. """
scenarios = [
# Fetching default URL for cast node.
('Check Cast Node', dict(url='/browser/cast/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function used to add the sever, database, and cast
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
cast_utils.add_cast(cls.tester)
def setUp(self):
""" This function will create cast."""
self.default_db = self.server["db"]
self.database_info = parent_node_dict['database'][-1]
self.db_name = self.database_info['db_name']
self.server["db"] = self.db_name
self.source_type = 'money'
self.target_type = 'bigint'
self.cast_id = cast_utils.create_cast(self.server, self.source_type,
self.target_type)
def runTest(self):
""" This function will get added cast."""
all_id = get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
cast_ids_dict = all_id["cid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(self.tester,
""" This function will fetch added cast."""
self.server_id = self.database_info["server_id"]
self.db_id = self.database_info['db_id']
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
server_id,
db_id)
if len(db_con) == 0:
raise Exception("No database(s) to delete for server id %s"
% server_id)
cast_id = cast_ids_dict[server_id]
cast_get_data = cast_utils.verify_cast(self.tester,
utils.SERVER_GROUP,
server_id,
db_id, cast_id)
self.assertEquals(cast_get_data.status_code, 200)
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
response = self.tester.get(
self.url + str(utils.SERVER_GROUP) + '/' + str(
self.server_id) + '/' +
str(self.db_id) + '/' + str(self.cast_id),
content_type='html/json')
self.assertEquals(response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added cast, database, server and the
'parent_id.pkl' file which is created in setup() function.
:return: None
"""
cast_utils.delete_cast(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added cast."""
connection = utils.get_db_connection(self.server['db'],
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'])
cast_utils.drop_cast(connection, self.source_type,
self.target_type)
database_utils.disconnect_database(self, self.server_id,
self.db_id)
self.server['db'] = self.default_db

View File

@ -6,102 +6,76 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import json
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression.test_utils import get_ids
from regression import parent_node_dict
from . import utils as cast_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from regression.test_setup import advanced_config_data
import json
class CastsPutTestCase(BaseTestGenerator):
""" This class will fetch the cast node added under database node. """
scenarios = [
# Fetching default URL for cast node.
('Check Cast Node', dict(url='/browser/cast/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add cast(s) to databases
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
cast_utils.add_cast(cls.tester)
def setUp(self):
""" This function will create cast."""
self.default_db = self.server["db"]
self.database_info = parent_node_dict['database'][-1]
self.db_name = self.database_info['db_name']
self.server["db"] = self.db_name
self.source_type = 'character'
self.target_type = 'cidr'
self.cast_id = cast_utils.create_cast(self.server, self.source_type,
self.target_type)
def runTest(self):
""" This function will update added cast."""
all_id = get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
cast_ids_dict = all_id["cid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(self.tester,
self.server_id = self.database_info["server_id"]
self.db_id = self.database_info['db_id']
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
server_id,
db_id)
if len(db_con) == 0:
raise Exception("No database(s) to delete for server id %s"
% server_id)
cast_id = cast_ids_dict[server_id]
cast_get_data = cast_utils.verify_cast(self.tester,
utils.SERVER_GROUP,
server_id,
db_id, cast_id)
if cast_get_data.status_code == 200:
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
connection = utils.get_db_connection(self.server['db'],
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'])
response = cast_utils.verify_cast(connection, self.source_type,
self.target_type)
if len(response) == 0:
raise Exception("Could not find cast.")
data = {
"description": advanced_config_data["cast_update_data"]
["comment"],
"id": cast_id
"description": "This is cast update comment",
"id": self.cast_id
}
put_response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(
db_id) +
'/' + str(cast_id),
str(self.server_id) + '/' + str(
self.db_id) +
'/' + str(self.cast_id),
data=json.dumps(data),
follow_redirects=True)
self.assertEquals(put_response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added cast, database, server and the
'parent_id.pkl' file which is created in setUpClass.
:return: None
"""
cast_utils.delete_cast(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added cast."""
connection = utils.get_db_connection(self.server['db'],
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'])
cast_utils.drop_cast(connection, self.source_type,
self.target_type)
database_utils.disconnect_database(self, self.server_id,
self.db_id)
self.server['db'] = self.default_db

View File

@ -6,140 +6,96 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import sys
import traceback
import os
import pickle
import json
from regression.test_setup import advanced_config_data, pickle_path
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
CAST_URL = '/browser/cast/obj/'
from regression.test_utils import get_db_connection
def get_cast_config_data(server_connect_data):
adv_config_data = None
db_user = server_connect_data['data']['user']['name']
# Get the config data of appropriate db user
for config_test_data in advanced_config_data['casts_credentials']:
if db_user == config_test_data['owner']:
adv_config_data = config_test_data
def get_cast_data():
data = {
"castcontext": adv_config_data
['cast_context'],
"encoding": adv_config_data
['encoding'],
"name": adv_config_data
['name'],
"srctyp": adv_config_data
['source_type'],
"trgtyp": adv_config_data
['target_type']
"castcontext": "IMPLICIT",
"encoding": "UTF8",
"name": "money->bigint",
"srctyp": "money",
"trgtyp": "bigint",
}
return data
def add_cast(tester):
def create_cast(server, source_type, target_type):
"""
This function add the cast in the existing database
:param tester: test object
:type tester: flask test object
:return:None
This function add a cast into database
:param server: server details
:type server: dict
:param source_type: source type for cast to be added
:type source_type: str
:param target_type: target type for cast to be added
:type target_type: str
:return cast id
:rtype: int
"""
try:
connection = get_db_connection(server['db'],
server['username'],
server['db_password'],
server['host'],
server['port'])
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor = connection.cursor()
pg_cursor.execute("CREATE CAST (%s AS %s) WITHOUT"
" FUNCTION AS IMPLICIT" % (source_type, target_type))
connection.set_isolation_level(old_isolation_level)
connection.commit()
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
server_group = utils.config_data['server_group']
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester, server_group,
server_id, db_id)
if db_con['data']['connected']:
server_connect_response = server_utils.verify_server(
tester, server_group, server_id)
data = get_cast_config_data(server_connect_response)
response = tester.post(CAST_URL + str(server_group) + '/' +
str(server_id) + '/' + str(
db_id) + '/',
data=json.dumps(data),
content_type='html/json')
assert response.status_code == 200
response_data = json.loads(response.data.decode('utf-8'))
write_cast_info(response_data, server_id)
# Get 'oid' from newly created cast
pg_cursor.execute(
"SELECT ca.oid FROM pg_cast ca WHERE ca.castsource = "
"(SELECT t.oid FROM pg_type t WHERE format_type(t.oid, NULL)='%s') "
"AND ca.casttarget = (SELECT t.oid FROM pg_type t WHERE "
"format_type(t.oid, NULL) = '%s')" % (source_type, target_type))
oid = pg_cursor.fetchone()
cast_id = ''
if oid:
cast_id = oid[0]
connection.close()
return cast_id
except Exception:
traceback.print_exc(file=sys.stderr)
def write_cast_info(response_data, server_id):
"""
This function writes the server's details to file parent_id.pkl
:param response_data: server's data
:type response_data: list of dictionary
:param pickle_id_dict: contains ids of server,database,tables etc.
:type pickle_id_dict: dict
:return: None
"""
cast_id = response_data['node']['_id']
pickle_id_dict = utils.get_pickle_id_dict()
if os.path.isfile(pickle_path):
existing_server_id = open(pickle_path, 'rb')
tol_server_id = pickle.load(existing_server_id)
pickle_id_dict = tol_server_id
if 'cid' in pickle_id_dict:
if pickle_id_dict['cid']:
# Add the cast_id as value in dict
pickle_id_dict["cid"][0].update({server_id: cast_id})
else:
# Create new dict with server_id and cast_id
pickle_id_dict["cid"].append({server_id: cast_id})
cast_output = open(pickle_path, 'wb')
pickle.dump(pickle_id_dict, cast_output)
cast_output.close()
def verify_cast(connection, source_type, target_type):
""" This function will verify current cast."""
try:
pg_cursor = connection.cursor()
pg_cursor.execute(
"SELECT * FROM pg_cast ca WHERE ca.castsource = "
"(SELECT t.oid FROM pg_type t WHERE format_type(t.oid, NULL)='%s') "
"AND ca.casttarget = (SELECT t.oid FROM pg_type t WHERE "
"format_type(t.oid, NULL) = '%s')" % (source_type, target_type))
casts = pg_cursor.fetchall()
connection.close()
return casts
except Exception:
traceback.print_exc(file=sys.stderr)
def verify_cast(tester, server_group, server_id, db_id, cast_id):
def drop_cast(connection, source_type, target_type):
"""This function used to drop the cast"""
cast_response = tester.get(CAST_URL + str(server_group) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(cast_id),
content_type='html/json')
return cast_response
def delete_cast(tester):
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
cast_ids_dict = all_id["cid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester, utils.SERVER_GROUP,
server_id,
db_id)
if len(db_con) == 0:
raise Exception("No database(s) to delete for server id %s"
% server_id)
cast_id = cast_ids_dict[server_id]
cast_get_data = verify_cast(tester, utils.SERVER_GROUP,
server_id,
db_id, cast_id)
if cast_get_data.status_code == 200:
delete_response = tester.delete(
CAST_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(cast_id),
follow_redirects=True)
return delete_response
try:
pg_cursor = connection.cursor()
pg_cursor.execute(
"SELECT * FROM pg_cast ca WHERE ca.castsource = "
"(SELECT t.oid FROM pg_type t WHERE format_type(t.oid, NULL)='%s') "
"AND ca.casttarget = (SELECT t.oid FROM pg_type t WHERE "
"format_type(t.oid, NULL) = '%s')" % (source_type, target_type))
if pg_cursor.fetchall():
pg_cursor.execute(
"DROP CAST (%s AS %s) CASCADE" % (source_type, target_type))
connection.commit()
connection.close()
except Exception:
traceback.print_exc(file=sys.stderr)

View File

@ -7,73 +7,73 @@
#
# ##################################################################
import uuid
import json
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.schemas.functions.tests \
import utils as func_utils
from . import utils as event_trigger_utils
from regression import test_utils as utils
from regression import parent_node_dict
from regression import trigger_funcs_utils
class EventTriggerAddTestCase(BaseTestGenerator):
""" This class will add new event trigger under schema node. """
""" This class will add new event trigger under test schema. """
scenarios = [
# Fetching default URL for event trigger node.
('Fetch Event Trigger Node URL',
dict(url='/browser/event_trigger/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add trigger function(s) to schema(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
schema_utils.add_schemas(cls.tester)
func_utils.add_trigger_function(cls.tester, cls.server_connect_response,
cls.server_ids)
def setUp(self):
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.schema_name = self.schema_data['schema_name']
self.schema_id = self.schema_data['schema_id']
self.extension_name = "postgres_fdw"
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.func_name = "trigger_func_%s" % str(uuid.uuid4())[1:6]
self.db_user = self.server["username"]
self.function_info = trigger_funcs_utils.create_trigger_function(
self.server, self.db_name, self.schema_name, self.func_name)
def runTest(self):
""" This function will add event trigger under database node. """
event_trigger_utils.add_event_trigger(self.tester)
""" This function will add event trigger under test database. """
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database.")
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema.")
func_name = self.function_info[1]
func_response = trigger_funcs_utils.verify_trigger_function(
self.server,
self.db_name,
func_name)
if not func_response:
raise Exception("Could not find the trigger function.")
data = {
"enabled": "O",
"eventfunname": "%s.%s" % (self.schema_name, self.func_name),
"eventname": "DDL_COMMAND_END",
"eventowner": self.db_user,
"name": "event_trigger_add_%s" % (str(uuid.uuid4())[1:6]),
"providers": []
}
response = self.tester.post(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(self.db_id) +
'/', data=json.dumps(data),
content_type='html/json')
self.assertAlmostEquals(response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""This function deletes the added schema, database, server and parent
id file
:return: None
"""
event_trigger_utils.delete_event_trigger(cls.tester)
func_utils.delete_trigger_function(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
# Disconnect database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -7,82 +7,76 @@
#
# ##################################################################
import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.schemas.functions.tests \
import utils as func_utils
from regression import test_utils as utils
from regression import parent_node_dict
from regression import trigger_funcs_utils
from . import utils as event_trigger_utils
import json
class EventTriggerDeleteTestCase(BaseTestGenerator):
""" This class will fetch added event trigger under database node. """
""" This class will delete added event trigger under test database. """
scenarios = [
# Fetching default URL for event trigger node.
('Fetch Event Trigger Node URL',
dict(url='/browser/event_trigger/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add trigger function(s) to schema(s)
5. Add event trigger(s) to database(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
schema_utils.add_schemas(cls.tester)
func_utils.add_trigger_function(cls.tester, cls.server_connect_response,
cls.server_ids)
event_trigger_utils.add_event_trigger(cls.tester)
def setUp(self):
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.schema_name = self.schema_data['schema_name']
self.schema_id = self.schema_data['schema_id']
self.extension_name = "postgres_fdw"
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.db_user = self.server["username"]
self.func_name = "trigger_func_%s" % str(uuid.uuid4())[1:6]
self.trigger_name = "event_trigger_delete_%s" % (
str(uuid.uuid4())[1:6])
self.function_info = trigger_funcs_utils.create_trigger_function(
self.server, self.db_name, self.schema_name, self.func_name)
self.event_trigger_id = event_trigger_utils.create_event_trigger(
self.server, self.db_name, self.schema_name, self.func_name,
self.trigger_name)
def runTest(self):
""" This function will delete event trigger under database node. """
""" This function will delete event trigger under test database. """
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database.")
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema.")
func_name = self.function_info[1]
func_response = trigger_funcs_utils.verify_trigger_function(
self.server,
self.db_name,
func_name)
if not func_response:
raise Exception("Could not find the trigger function.")
trigger_response = event_trigger_utils.verify_event_trigger(
self.server, self.db_name,
self.trigger_name)
if not trigger_response:
raise Exception("Could not find event trigger.")
del_response = self.tester.delete(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' +
str(self.db_id) + '/' +
str(self.event_trigger_id),
follow_redirects=True)
self.assertEquals(del_response.status_code, 200)
del_response = event_trigger_utils.delete_event_trigger(self.tester)
del_respdata = json.loads(del_response.data.decode("utf-8"))
self.assertTrue(del_respdata['success'], 1)
@classmethod
def tearDownClass(cls):
"""
This function delete the added schema, database, server and parent
id file
:return: None
"""
func_utils.delete_trigger_function(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -7,91 +7,70 @@
#
# ##################################################################
import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.schemas.functions.tests \
import utils as func_utils
from regression import test_utils as utils
from regression import parent_node_dict
from regression import trigger_funcs_utils
from . import utils as event_trigger_utils
class EventTriggerGetTestCase(BaseTestGenerator):
""" This class will fetch added event trigger under schema node. """
""" This class will fetch added event trigger under test database. """
scenarios = [
# Fetching default URL for event trigger node.
('Fetch Event Trigger Node URL',
dict(url='/browser/event_trigger/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add trigger function(s) to schema(s)
5. Add event trigger(s) to database(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
schema_utils.add_schemas(cls.tester)
func_utils.add_trigger_function(cls.tester, cls.server_connect_response,
cls.server_ids)
event_trigger_utils.add_event_trigger(cls.tester)
def setUp(self):
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.schema_name = self.schema_data['schema_name']
self.schema_id = self.schema_data['schema_id']
self.extension_name = "postgres_fdw"
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.db_user = self.server["username"]
self.func_name = "trigger_func_%s" % str(uuid.uuid4())[1:6]
self.trigger_name = "event_trigger_get_%s" % (str(uuid.uuid4())[1:6])
self.function_info = trigger_funcs_utils.create_trigger_function(
self.server, self.db_name, self.schema_name, self.func_name)
self.event_trigger_id = event_trigger_utils.create_event_trigger(
self.server, self.db_name, self.schema_name, self.func_name,
self.trigger_name)
def runTest(self):
""" This function will fetch event trigger under database node. """
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
event_trigger_ids_dict = all_id["etid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
event_trigger_id = event_trigger_ids_dict[server_id]
response = event_trigger_utils.verify_event_trigger(
self.tester, utils.SERVER_GROUP, server_id, db_id,
event_trigger_id)
""" This function will fetch added event trigger under test database.
"""
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database.")
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema.")
func_name = self.function_info[1]
func_response = trigger_funcs_utils.verify_trigger_function(
self.server,
self.db_name,
func_name)
if not func_response:
raise Exception("Could not find the trigger function.")
response = self.tester.get(
self.url + str(utils.SERVER_GROUP) + '/'
+ str(self.server_id) + '/' + str(self.db_id) + '/' +
str(self.event_trigger_id),
content_type='html/json')
self.assertEquals(response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""This function deletes the added schema, database, server and parent
id file
:return: None
"""
event_trigger_utils.delete_event_trigger(cls.tester)
func_utils.delete_trigger_function(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,109 +6,80 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
import json
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.schemas.functions.tests \
import utils as func_utils
from regression import test_utils as utils
from regression import parent_node_dict
from regression import trigger_funcs_utils
from . import utils as event_trigger_utils
import json
from regression.test_setup import advanced_config_data
class EventTriggerPutTestCase(BaseTestGenerator):
""" This class will fetch added event trigger under database node. """
""" This class will fetch added event trigger under test database. """
scenarios = [
# Fetching default URL for event trigger node.
('Fetch Event Trigger Node URL',
dict(url='/browser/event_trigger/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add trigger function(s) to schema(s)
5. Add event trigger(s) to database(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
schema_utils.add_schemas(cls.tester)
func_utils.add_trigger_function(cls.tester, cls.server_connect_response,
cls.server_ids)
event_trigger_utils.add_event_trigger(cls.tester)
def setUp(self):
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.schema_name = self.schema_data['schema_name']
self.schema_id = self.schema_data['schema_id']
self.extension_name = "postgres_fdw"
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.db_user = self.server["username"]
self.func_name = "trigger_func_%s" % str(uuid.uuid4())[1:6]
self.trigger_name = "event_trigger_put_%s" % (str(uuid.uuid4())[1:6])
self.function_info = trigger_funcs_utils.create_trigger_function(
self.server, self.db_name, self.schema_name, self.func_name)
self.event_trigger_id = event_trigger_utils.create_event_trigger(
self.server, self.db_name, self.schema_name, self.func_name,
self.trigger_name)
def runTest(self):
""" This function will update event trigger under database node. """
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
event_trigger_ids_dict = all_id["etid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
event_trigger_id = event_trigger_ids_dict[server_id]
response = event_trigger_utils.verify_event_trigger(
self.tester, utils.SERVER_GROUP, server_id, db_id,
event_trigger_id)
if response.status_code == 200:
data = \
{
"comment":
advanced_config_data['event_trigger_update_data']
['comment'],
"id": event_trigger_id
""" This function will update event trigger under test database. """
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database.")
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema.")
func_name = self.function_info[1]
func_response = trigger_funcs_utils.verify_trigger_function(
self.server,
self.db_name,
func_name)
if not func_response:
raise Exception("Could not find the trigger function.")
trigger_response = event_trigger_utils.verify_event_trigger(
self.server, self.db_name, self.trigger_name)
if not trigger_response:
raise Exception("Could not find event trigger.")
data = {
"comment": "This is event trigger update comment",
"id": self.event_trigger_id
}
put_response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(event_trigger_id),
str(self.server_id) + '/' + str(self.db_id) +
'/' + str(self.event_trigger_id),
data=json.dumps(data),
follow_redirects=True)
self.assertEquals(put_response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""This function deletes the added schema, database, server and parent
id file
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)
:return: None
"""
event_trigger_utils.delete_event_trigger(cls.tester)
func_utils.delete_trigger_function(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()

View File

@ -6,143 +6,82 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import sys
import traceback
import os
import pickle
import json
import uuid
from regression.test_setup import advanced_config_data, pickle_path
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.schemas.functions.tests \
import utils as func_utils
EVENT_TRIGGER_URL = '/browser/event_trigger/obj/'
from regression.test_utils import get_db_connection
def get_event_trigger_config_data(schema_name, server_connect_data,
trigger_func_name):
adv_config_data = None
db_user = server_connect_data['data']['user']['name']
# Get the config data of appropriate db user
for config_test_data in advanced_config_data['event_trigger_credentials']:
if db_user == config_test_data['owner']:
adv_config_data = config_test_data
data = {
"enabled": adv_config_data['enable'],
"eventfunname": "{0}.{1}".format(schema_name, trigger_func_name),
"eventname": adv_config_data['event_name'],
"eventowner": adv_config_data['owner'],
"name": "event_trigger_{}".format(str(uuid.uuid4())[1:4]),
"providers": adv_config_data['provider']
}
return data
def add_event_trigger(tester):
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_info_dict = all_id["scid"][0]
trigger_func_info_dict = all_id["tfnid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester, utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
server_connect_response = server_utils.verify_server(
tester, utils.SERVER_GROUP, server_id)
schema_info = schema_info_dict[int(server_id)]
trigger_func_list = trigger_func_info_dict[int(server_id)]
trigger_func_info = \
filter(lambda x: x[2] == "event_trigger", trigger_func_list)[0]
trigger_func_name = trigger_func_info[1].replace("()", "")
trigger_func_id = trigger_func_info[0]
trigger_func_response = \
func_utils.verify_trigger_function(tester, server_id,
db_id, schema_info[0],
trigger_func_id)
if trigger_func_response.status_code == 200:
data = get_event_trigger_config_data(
schema_info[1],
server_connect_response, trigger_func_name)
response = tester.post(
EVENT_TRIGGER_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) +
'/', data=json.dumps(data),
content_type='html/json')
assert response.status_code == 200
response_data = json.loads(response.data.decode('utf-8'))
write_event_trigger_info(response_data, server_id)
def write_event_trigger_info(response_data, server_id):
def create_event_trigger(server, db_name, schema_name, func_name,
trigger_name):
"""
This function writes the schema id into parent_id.pkl
:param response_data: extension add response data
:type response_data: dict
:param server_id: server id
:type server_id: str
:return: None
This function creates the event trigger into test database.
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param schema_name: schema name
:type schema_name: str
:param func_name: function name
:type func_name: str
:param trigger_name: trigger name
:type trigger_name: str
:return trigger_id: trigger id
:rtype: int
"""
event_trigger_id = response_data['node']['_id']
pickle_id_dict = utils.get_pickle_id_dict()
if os.path.isfile(pickle_path):
existing_server_id = open(pickle_path, 'rb')
tol_server_id = pickle.load(existing_server_id)
pickle_id_dict = tol_server_id
if 'etid' in pickle_id_dict:
if pickle_id_dict['etid']:
# Add the event_trigger_id as value in dict
pickle_id_dict["etid"][0].update({server_id: event_trigger_id})
else:
# Create new dict with server_id and event_trigger_id
pickle_id_dict["etid"].append({server_id: event_trigger_id})
event_trigger_output = open(pickle_path, 'wb')
pickle.dump(pickle_id_dict, event_trigger_output)
event_trigger_output.close()
try:
connection = get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor = connection.cursor()
pg_cursor.execute('''CREATE EVENT TRIGGER "%s" ON DDL_COMMAND_END
EXECUTE PROCEDURE "%s"."%s"()''' % (trigger_name, schema_name,
func_name))
connection.set_isolation_level(old_isolation_level)
connection.commit()
# Get 'oid' from newly created event trigger
pg_cursor.execute(
"SELECT oid FROM pg_event_trigger WHERE evtname = '%s'"
% trigger_name)
oid = pg_cursor.fetchone()
trigger_id = ''
if oid:
trigger_id = oid[0]
connection.close()
return trigger_id
except Exception:
traceback.print_exc(file=sys.stderr)
def verify_event_trigger(tester, server_group, server_id, db_id,
event_trigger_id):
response = tester.get(EVENT_TRIGGER_URL + str(server_group) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(event_trigger_id),
content_type='html/json')
return response
def delete_event_trigger(tester):
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
event_trigger_ids_dict = all_id["etid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
event_trigger_id = event_trigger_ids_dict[server_id]
response = verify_event_trigger(tester,
utils.SERVER_GROUP,
server_id,
db_id,
event_trigger_id)
if response.status_code == 200:
del_response = tester.delete(
EVENT_TRIGGER_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' +
str(db_id) + '/' +
str(event_trigger_id),
follow_redirects=True)
return del_response
def verify_event_trigger(server, db_name, trigger_name):
"""
This function verifies the event trigger is present in the database
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param trigger_name: trigger name to be verified
:type trigger_name: str
:return event_trigger: event trigger's details
:rtype event_trigger: tuple
"""
try:
connection = get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
pg_cursor.execute(
"SELECT oid FROM pg_event_trigger WHERE evtname = '%s'"
% trigger_name)
event_trigger = pg_cursor.fetchone()
connection.close()
return event_trigger
except Exception:
traceback.print_exc(file=sys.stderr)

View File

@ -6,67 +6,49 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import json
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression import test_utils as utils
from . import utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import\
utils as schema_utils
from . import utils as extension_utils
class ExtensionsAddTestCase(BaseTestGenerator):
scenarios = [
# Fetching default URL for extension node.
('Check Extension Node', dict(url='/browser/extension/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
schema_utils.add_schemas(cls.tester)
def runTest(self):
""" This function will add extension under 1st server of tree node. """
""" This function will add extension under test schema. """
self.schema_data = parent_node_dict["schema"][-1]
self.server_id = self.schema_data["server_id"]
self.db_id = self.schema_data['db_id']
self.schema_name = self.schema_data['schema_name']
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)
extension_utils.add_extensions(self.tester)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
self.data = extension_utils.get_extension_data(self.schema_name)
response = self.tester.post(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(
self.db_id) + '/',
data=json.dumps(self.data),
content_type='html/json')
self.assertEquals(response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""This function deletes the added schema, database, server and parent
id file
:return: None
def tearDown(self):
"""This function disconnect the test database and drop added extension.
"""
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
db_name = parent_node_dict["database"][-1]['db_name']
extension_utils.drop_extension(self.server, db_name, self.data['name'])
database_utils.disconnect_database(self, self.server_id,
self.db_id)

View File

@ -1,3 +1,11 @@
# # #################################################################
# #
# # pgAdmin 4 - PostgreSQL Tools
# #
# # Copyright (C) 2013 - 2016, The pgAdmin Development Team
# # This software is released under the PostgreSQL Licence
# #
# # ##################################################################
# #################################################################
#
# pgAdmin 4 - PostgreSQL Tools
@ -6,69 +14,52 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import json
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from . import utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from . import utils as extension_utils
class ExtensionsDeleteTestCase(BaseTestGenerator):
scenarios = [
# Fetching default URL for extension node.
('Check Extension Node', dict(url='/browser/extension/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add extension(s) to schema(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schema(s) to database(s)
schema_utils.add_schemas(cls.tester)
# Add extension(s) to schema(s)
extension_utils.add_extensions(cls.tester)
def setUp(self):
""" This function will create extension."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
def runTest(self):
""" This function will add extension under 1st server of tree node. """
""" This function will delete extension added test database. """
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
response = extension_utils.verify_extension(self.server, self.db_name,
self.extension_name)
if not response:
raise Exception("Could not find extension.")
delete_response = self.tester.delete(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(self.db_id) +
'/' + str(self.extension_id),
follow_redirects=True)
self.assertEquals(delete_response.status_code, 200)
delete_respdata = extension_utils.delete_extension(self.tester)
self.assertTrue(delete_respdata['success'], 1)
@classmethod
def tearDownClass(cls):
"""This function deletes the added schema, database, server and parent
id file
"""
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database. """
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -1,87 +1,57 @@
#################################################################
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
# #################################################################
#
# # pgAdmin 4 - PostgreSQL Tools
# #
# # Copyright (C) 2013 - 2016, The pgAdmin Development Team
# # This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import json
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from . import utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from . import utils as extension_utils
class ExtensionsGetTestCase(BaseTestGenerator):
scenarios = [
# Fetching default URL for extension node.
('Check Extension Node', dict(url='/browser/extension/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add extension(s) to schema(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
schema_utils.add_schemas(cls.tester)
extension_utils.add_extensions(cls.tester)
def setUp(self):
""" This function will create extension."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
def runTest(self):
""" This function will add extension under 1st server of tree node. """
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
extension_ids_dict = all_id["eid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
extension_id = extension_ids_dict[server_id]
response = extension_utils.verify_extensions(self.tester,
""" This function will fetch added extension under database name. """
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
server_id, db_id,
extension_id)
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
response = self.tester.get(
self.url + str(utils.SERVER_GROUP) + '/' + str(
self.server_id) + '/' +
str(self.db_id) + '/' + str(self.extension_id),
content_type='html/json')
self.assertEquals(response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""This function deletes the added schema, database, server and parent
id file
"""
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added
extension."""
extension_utils.drop_extension(self.server, self.db_name,
self.extension_name)
database_utils.disconnect_database(self, self.server_id,
self.db_id)

View File

@ -6,99 +6,62 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import json
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from . import utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from . import utils as extension_utils
class ExtensionsPutTestCase(BaseTestGenerator):
scenarios = [
# Fetching default URL for extension node.
('Check Extension Node', dict(url='/browser/extension/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add extension(s) to schema(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
schema_utils.add_schemas(cls.tester)
extension_utils.add_extensions(cls.tester)
def setUp(self):
""" This function will create extension."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
def runTest(self):
""" This function will add extension under 1st server of tree node. """
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
extension_ids_dict = all_id["eid"][0]
schema_info_dict = all_id["scid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
extension_id = extension_ids_dict[server_id]
response = extension_utils.verify_extensions(self.tester,
""" This function will update extension added under test database. """
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
server_id, db_id,
extension_id)
if response.status_code == 200:
schema_name = schema_info_dict[int(server_id)][1]
data = \
{
"id": extension_id,
"schema": schema_name
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
response = extension_utils.verify_extension(self.server, self.db_name,
self.extension_name)
if not response:
raise Exception("Could not find extension.")
data = {
"schema": "public",
"id": self.extension_id
}
put_response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(extension_id),
str(self.server_id) + '/' + str(
self.db_id) +
'/' + str(self.extension_id),
data=json.dumps(data),
follow_redirects=True)
self.assertEquals(put_response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""This function deletes the added schema, database, server and parent
id file
"""
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added
extension."""
extension_utils.drop_extension(self.server, self.db_name,
self.extension_name)
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,129 +6,119 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import sys
import traceback
from regression.test_utils import get_db_connection
import os
import pickle
import json
from regression.test_setup import advanced_config_data, pickle_path
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
EXTENSION_URL = '/browser/extension/obj/'
def get_extension_config_data(schema_name, server_connect_data):
adv_config_data = None
db_user = server_connect_data['data']['user']['name']
# Get the config data of appropriate db user
for config_test_data in advanced_config_data['extension_credentials']:
if db_user == config_test_data['owner']:
adv_config_data = config_test_data
def get_extension_data(schema_name):
data = {
"name": adv_config_data['name'],
"relocatable": adv_config_data['relocate'],
"name": "postgres_fdw",
"relocatable": "true",
"schema": schema_name,
"version": adv_config_data['version']
"version": "1.0"
}
return data
def write_extension_info(response_data, server_id):
def create_extension(server, db_name, extension_name, schema_name):
"""
This function writes the schema id into parent_id.pkl
This function used to create extension under the existing dummy database
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param extension_name: extension name to be added
:type extension_name: str
:param schema_name: schema name
:type schema_name: str
:return extension_id: extension id
:rtype: int
"""
try:
connection = get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor = connection.cursor()
pg_cursor.execute(
'''CREATE EXTENSION "%s" SCHEMA "%s"''' % (extension_name,
schema_name))
connection.set_isolation_level(old_isolation_level)
connection.commit()
# Get 'oid' from newly created extension
pg_cursor.execute(
"SELECT oid FROM pg_extension WHERE extname = '%s'" %
extension_name)
oid = pg_cursor.fetchone()
extension_id = ''
if oid:
extension_id = oid[0]
connection.close()
return extension_id
except Exception:
traceback.print_exc(file=sys.stderr)
:param response_data: extension add response data
:type response_data: dict
:param server_id: server id
:type server_id: str
def verify_extension(server, db_name, extension_name):
"""
This function will verify current extension.
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param extension_name: extension name to be added
:type extension_name: str
:return extension: extension detail
:rtype: tuple
"""
try:
connection = get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
pg_cursor.execute(
"select * from pg_extension where extname='%s'" % extension_name)
extension = pg_cursor.fetchone()
connection.close()
return extension
except Exception:
traceback.print_exc(file=sys.stderr)
def drop_extension(server, db_name, extension_name):
"""
This function used to drop the extension.
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param extension_name: extension name
:type extension_name: str
:return: None
"""
extension_id = response_data['node']['_id']
# schema_name = str(response_data['node']['label'])
pickle_id_dict = utils.get_pickle_id_dict()
if os.path.isfile(pickle_path):
existing_server_id = open(pickle_path, 'rb')
tol_server_id = pickle.load(existing_server_id)
pickle_id_dict = tol_server_id
if 'eid' in pickle_id_dict:
if pickle_id_dict['eid']:
# Add the extension_id as value in dict
pickle_id_dict["eid"][0].update({server_id: extension_id})
else:
# Create new dict with server_id and extension_id
pickle_id_dict["eid"].append({server_id: extension_id})
extension_output = open(pickle_path, 'wb')
pickle.dump(pickle_id_dict, extension_output)
extension_output.close()
def add_extensions(tester):
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_info_dict = all_id["scid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester, utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
server_connect_response = server_utils.verify_server(
tester, utils.SERVER_GROUP, server_id)
schema_name = schema_info_dict[int(server_id)][1]
data = get_extension_config_data(schema_name,
server_connect_response)
response = tester.post(
EXTENSION_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(
db_id) + '/',
data=json.dumps(data),
content_type='html/json')
assert response.status_code == 200
response_data = json.loads(response.data.decode('utf-8'))
write_extension_info(response_data, server_id)
def verify_extensions(tester, server_group, server_id, db_id, extension_id):
response = tester.get(EXTENSION_URL + str(server_group) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(extension_id),
content_type='html/json')
return response
def delete_extension(tester):
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
extension_ids_dict = all_id["eid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
extension_id = extension_ids_dict[server_id]
response = verify_extensions(tester,
utils.SERVER_GROUP,
server_id, db_id,
extension_id)
if response.status_code == 200:
delete_response = tester.delete(
EXTENSION_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' +
str(db_id) + '/' +
str(extension_id),
follow_redirects=True)
delete_respdata = json.loads(delete_response.data.decode())
return delete_respdata
try:
connection = get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
pg_cursor.execute(
"SELECT * FROM pg_extension WHERE extname='%s'"
% extension_name)
if pg_cursor.fetchall():
pg_cursor.execute(
"DROP EXTENSION %s CASCADE" % extension_name)
connection.commit()
connection.close()
except Exception:
traceback.print_exc(file=sys.stderr)

View File

@ -6,83 +6,100 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import uuid
import json
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import\
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import \
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.tests \
import utils as fdw_utils
from . import utils as fsrv_utils
from regression import parent_node_dict
from regression import test_utils as utils
class ForeignServerAddTestCase(BaseTestGenerator):
"""
This class will add foreign server under database node.
"""
scenarios = [
# Fetching default URL for foreign server node.
('Check FSRV Node', dict(url='/browser/foreign_server/obj/'))
]
@classmethod
def setUpClass(cls):
""""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add extension(s) to schema(s)
5. Add foreign data wrapper(s) to extension(s)
:return: None"
"""
# Add the server(s)
server_utils.add_server(cls.tester)
# Connect to server(s)
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database(s) to connected server(s)
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schema(s) under connected database(s)
schema_utils.add_schemas(cls.tester)
# Add extension(s) to schema(s)
extension_utils.add_extensions(cls.tester)
# Add foreign data wrapper(s) to extension(s)
fdw_utils.add_fdw(cls.tester)
def setUp(self):
""" This function will create extension and foreign data wrapper."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.fdw_name = "fdw_{0}".format(str(uuid.uuid4())[1:6])
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name,
self.fdw_name)
def runTest(self):
""" This function will add foreign server under database node. """
"""This function will fetch foreign data wrapper present under test
database."""
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
fdw_response = fdw_utils.verify_fdw(self.server, self.db_name,
self.fdw_name)
if not fdw_response:
raise Exception("Could not find FDW.")
db_user = self.server["username"]
data = {
"fsrvacl": [
{
"grantee": db_user,
"grantor": db_user,
"privileges":
[
{
"privilege_type": "U",
"privilege": "true",
"with_grant": "false"
}
]
}
],
"fsrvoptions": [
{
"fsrvoption": "host",
"fsrvvalue": self.server['host']
},
{
"fsrvoption": "port",
"fsrvvalue": str(self.server['port'])
},
{
"fsrvoption": "dbname",
"fsrvvalue": self.db_name
}
],
"fsrvowner": db_user,
"name": "test_fsrv_add_%s" % (str(uuid.uuid4())[1:6])
}
response = self.tester.post(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(self.db_id) + '/'
+ str(self.fdw_id) + '/', data=json.dumps(data),
content_type='html/json')
self.assertEquals(response.status_code, 200)
fsrv_utils.add_fsrv(self.tester)
@classmethod
def tearDownClass(cls):
"""This function deletes the added schema, database, server and parent
id file
:return: None
"""
fsrv_utils.delete_fsrv(cls.tester)
fdw_utils.delete_fdw(cls.tester)
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added foreign
data wrapper."""
extension_utils.drop_extension(self.server, self.db_name,
self.extension_name)
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,89 +6,73 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import\
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import \
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.tests \
import utils as fdw_utils
from regression import parent_node_dict
from regression import test_utils as utils
from . import utils as fsrv_utils
class ForeignServerDeleteTestCase(BaseTestGenerator):
"""
This class will add foreign server under FDW node.
"""
"""This class will add foreign server under FDW node."""
scenarios = [
# Fetching default URL for foreign server node.
('Check FSRV Node', dict(url='/browser/foreign_server/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add extension(s) to schema(s)
5. Add foreign data wrapper(s) to extension(s)
6. Add foreign server(s) to foreign data wrapper(s)
def setUp(self):
""" This function will create extension and foreign data wrapper."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.fdw_name = "test_fdw_%s" % (str(uuid.uuid4())[1:6])
self.fsrv_name = "test_fsrv_%s" % (str(uuid.uuid4())[1:6])
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name,
self.fdw_name)
self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name,
self.fsrv_name, self.fdw_name)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schema(s) under connected database(s)
schema_utils.add_schemas(cls.tester)
# Add extension(s) to schema(s)
extension_utils.add_extensions(cls.tester)
# Add foreign data wrapper(s) to extension(s)
fdw_utils.add_fdw(cls.tester)
# Add foreign server(s) to foreign data wrapper
fsrv_utils.add_fsrv(cls.tester)
#
def runTest(self):
""" This function will delete foreign server under FDW node. """
"""This function will fetch foreign server present under test
database."""
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
fdw_response = fdw_utils.verify_fdw(self.server, self.db_name,
self.fdw_name)
if not fdw_response:
raise Exception("Could not find FDW.")
fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name,
self.fsrv_name)
if not fsrv_response:
raise Exception("Could not find FSRV.")
delete_response = self.tester.delete(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(self.db_id) +
'/' + str(self.fdw_id) + '/' +
str(self.fsrv_id),
follow_redirects=True)
self.assertEquals(delete_response.status_code, 200)
delete_respdata = fsrv_utils.delete_fsrv(self.tester)
self.assertTrue(delete_respdata['success'], 1)
@classmethod
def tearDownClass(cls):
"""This function deletes the added schema, database, server and parent
id file
:return: None
"""
fdw_utils.delete_fdw(cls.tester)
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added extension
and dependant objects."""
extension_utils.drop_extension(self.server, self.db_name,
self.extension_name)
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,105 +6,68 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import\
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import \
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.tests\
import utils as fdw_utils
from regression import parent_node_dict
from regression import test_utils as utils
from . import utils as fsrv_utils
class ForeignServerGetTestCase(BaseTestGenerator):
"""
This class will add foreign server under FDW node.
"""
class ForeignServerGetTestCase(BaseTestGenerator):
"""This class will add foreign server under FDW node."""
scenarios = [
# Fetching default URL for foreign server node.
('Check FSRV Node', dict(url='/browser/foreign_server/obj/'))
]
def setUp(self):
""" This function will create extension and foreign data wrapper."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:6])
self.fsrv_name = "test_fsrv_add_%s" % (str(uuid.uuid4())[1:6])
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name,
self.fdw_name)
self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name,
self.fsrv_name, self.fdw_name)
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add extension(s) to schema(s)
5. Add foreign data wrapper(s) to extension(s)
6. Add foreign server(s) to foreign data wrapper(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schema(s) under connected database(s)
schema_utils.add_schemas(cls.tester)
# Add extension(s) to schema(s)
extension_utils.add_extensions(cls.tester)
# Add foreign data wrapper(s) to extension(s)
fdw_utils.add_fdw(cls.tester)
# Add foreign server(s) to foreign data wrapper
fsrv_utils.add_fsrv(cls.tester)
#
def runTest(self):
""" This function will fetch foreign server under FDW node. """
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
fdw_ids_dict = all_id["fid"][0]
fsrv_ids_dict = all_id["fsid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
fdw_id = fdw_ids_dict[server_id]
fsrv_id = fsrv_ids_dict[server_id]
response = fsrv_utils.verify_fsrv(self.tester, utils.SERVER_GROUP,
server_id, db_id,
fdw_id, fsrv_id)
self.assertEquals(response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""This function deletes the added schema, database, server and parent
id file
:return: None
"""
fsrv_utils.delete_fsrv(cls.tester)
fdw_utils.delete_fdw(cls.tester)
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
"""This function will fetch foreign server present under test
database."""
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
fdw_response = fdw_utils.verify_fdw(self.server, self.db_name,
self.fdw_name)
if not fdw_response:
raise Exception("Could not find FDW.")
fsrv_response = self.tester.get(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(self.db_id) + '/'
+ str(self.fdw_id) + '/' + str(self.fsrv_id),
content_type='html/json')
self.assertEquals(fsrv_response.status_code, 200)
def tearDown(self):
"""This function disconnect the test database and drop added extension
and dependant objects."""
extension_utils.drop_extension(self.server, self.db_name,
self.extension_name)
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,123 +6,78 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import\
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.tests\
import utils as fdw_utils
from . import utils as fsrv_utils
from regression.test_setup import advanced_config_data
from __future__ import print_function
import uuid
import json
class ForeignServerPutTestCase(BaseTestGenerator):
"""
This class will add foreign server under FDW node.
"""
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import \
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.tests\
import utils as fdw_utils
from regression import parent_node_dict
from regression import test_utils as utils
from . import utils as fsrv_utils
class ForeignServerPutTestCase(BaseTestGenerator):
"""This class will add foreign server under FDW node."""
scenarios = [
# Fetching default URL for foreign server node.
('Check FSRV Node', dict(url='/browser/foreign_server/obj/'))
]
def setUp(self):
""" This function will create extension and foreign data wrapper."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:6])
self.fsrv_name = "test_fsrv_put_%s" % (str(uuid.uuid4())[1:6])
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name,
self.fdw_name)
self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name,
self.fsrv_name, self.fdw_name)
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add extension(s) to schema(s)
5. Add foreign data wrapper(s) to extension(s)
6. Add foreign server(s) to foreign data wrapper(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schema(s) under connected database(s)
schema_utils.add_schemas(cls.tester)
# Add extension(s) to schema(s)
extension_utils.add_extensions(cls.tester)
# Add foreign data wrapper(s) to extension(s)
fdw_utils.add_fdw(cls.tester)
# Add foreign server(s) to foreign data wrapper
fsrv_utils.add_fsrv(cls.tester)
#
def runTest(self):
""" This function will update foreign server under FDW node. """
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
fdw_ids_dict = all_id["fid"][0]
fsrv_ids_dict = all_id["fsid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
fdw_id = fdw_ids_dict[server_id]
fsrv_id = fsrv_ids_dict[server_id]
response = fsrv_utils.verify_fsrv(self.tester, utils.SERVER_GROUP,
server_id, db_id,
fdw_id, fsrv_id)
if response.status_code == 200:
data = \
{
"description": advanced_config_data['FSRV_update_data']
['comment'],
"id": fsrv_id
"""This function will update foreign server present under test
database."""
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
fdw_response = fdw_utils.verify_fdw(self.server, self.db_name,
self.fdw_name)
if not fdw_response:
raise Exception("Could not find FDW.")
fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name,
self.fsrv_name)
if not fsrv_response:
raise Exception("Could not find FSRV.")
data = {
"description": "This is foreign server update comment",
"id": self.fsrv_id
}
put_response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(fdw_id) + '/' +
str(fsrv_id), data=json.dumps(data),
str(self.server_id) + '/' + str(self.db_id) +
'/' + str(self.fdw_id) + '/' +
str(self.fsrv_id), data=json.dumps(data),
follow_redirects=True)
self.assertEquals(put_response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added foreign server(s) ,
foreign data wrapper(s), extension(s), schema(s), database(s), server(s)
and parent id file
:return: None
"""
fsrv_utils.delete_fsrv(cls.tester)
fdw_utils.delete_fdw(cls.tester)
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added extension
and dependant objects."""
extension_utils.drop_extension(self.server, self.db_name,
self.extension_name)
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -7,143 +7,74 @@
#
# ##################################################################
from __future__ import print_function
import os
import pickle
import json
from regression.test_setup import advanced_config_data, pickle_path
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
import uuid
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.tests \
import utils as fdw_utils
import sys
from regression.test_utils import get_db_connection
file_name = os.path.basename(__file__)
FSRV_URL = '/browser/foreign_server/obj/'
def get_fsrv_config_data(server_connect_data):
adv_config_data = None
db_user = server_connect_data['data']['user']['name']
# Get the config data of appropriate db user
for config_test_data in advanced_config_data['fsrv_credentials']:
if db_user == config_test_data['owner']:
adv_config_data = config_test_data
data = {
"fsrvacl": adv_config_data['fsrv_acl'],
"fsrvoptions": adv_config_data['fsrv_options'],
"fsrvowner": adv_config_data['owner'],
"name": "fsrv_{}".format(str(uuid.uuid4())[1:4])
}
return data
def add_fsrv(tester):
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
fdw_ids_dict = all_id["fid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester,
utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']['connected']:
server_connect_response = server_utils.verify_server(
tester, utils.SERVER_GROUP, server_id)
fdw_id = fdw_ids_dict[server_id]
response = fdw_utils.verify_fdws(tester,
utils.SERVER_GROUP,
server_id, db_id,
fdw_id)
if response.status_code == 200:
data = get_fsrv_config_data(server_connect_response)
response = tester.post(
FSRV_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(
db_id) +
'/' + str(fdw_id) + '/',
data=json.dumps(data),
content_type='html/json')
assert response.status_code == 200
response_data = json.loads(response.data.decode())
write_fsrv_info(response_data, server_id)
def write_fsrv_info(response_data, server_id):
def create_fsrv(server, db_name, fsrv_name, fdw_name):
"""
This function writes the schema id into parent_id.pkl
This function will create foreign data wrapper under the existing
dummy database.
:param response_data: foreign server add response data
:type response_data: dict
:param server_id: server id
:type server_id: str
:return: None
:param server: test_server, test_db, fsrv_name, fdw_name
:return: fsrv_id
"""
fsrv_id = response_data['node']['_id']
pickle_id_dict = utils.get_pickle_id_dict()
if os.path.isfile(pickle_path):
existing_server_id = open(pickle_path, 'rb')
tol_server_id = pickle.load(existing_server_id)
pickle_id_dict = tol_server_id
if 'fsid' in pickle_id_dict:
if pickle_id_dict['fsid']:
# Add the FSRV_id as value in dict
pickle_id_dict["fsid"][0].update({server_id: fsrv_id})
else:
# Create new dict with server_id and fsrv_id
pickle_id_dict["fsid"].append({server_id: fsrv_id})
fsrv_output = open(pickle_path, 'wb')
pickle.dump(pickle_id_dict, fsrv_output)
fsrv_output.close()
try:
connection = get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor = connection.cursor()
pg_cursor.execute("CREATE SERVER {0} FOREIGN DATA WRAPPER {1} OPTIONS "
"(host '{2}', dbname '{3}', port '{4}')".format
(fsrv_name, fdw_name, server['host'], db_name,
server['port']))
connection.set_isolation_level(old_isolation_level)
connection.commit()
# Get 'oid' from newly created foreign server
pg_cursor.execute(
"SELECT oid FROM pg_foreign_server WHERE srvname = '%s'"
% fsrv_name)
oid = pg_cursor.fetchone()
fsrv_id = ''
if oid:
fsrv_id = oid[0]
connection.close()
return fsrv_id
except Exception as exception:
exception = "Exception: %s: line:%s %s" % (
file_name, sys.exc_traceback.tb_lineno, exception)
print(exception, file=sys.stderr)
def verify_fsrv(tester, server_group, server_id, db_id, fdw_id, fsrv_id):
response = tester.get(FSRV_URL + str(server_group) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(fdw_id) + '/' + str(fsrv_id),
content_type='html/json')
return response
def verify_fsrv(server, db_name , fsrv_name):
""" This function will verify current foreign server."""
try:
connection = get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
def delete_fsrv(tester):
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
fdw_ids_dict = all_id["fid"][0]
fsrv_ids_dict = all_id["fsid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
fdw_id = fdw_ids_dict[server_id]
fsrv_id = fsrv_ids_dict[server_id]
response = verify_fsrv(tester, utils.SERVER_GROUP,
server_id, db_id,
fdw_id, fsrv_id)
if response.status_code == 200:
delete_response = tester.delete(
FSRV_URL + str(utils.SERVER_GROUP) +
'/' + str(server_id) + '/' +
str(db_id) + '/' +
str(fdw_id) + '/' +
str(fsrv_id),
follow_redirects=True)
delete_respdata = json.loads(delete_response.data.decode())
return delete_respdata
pg_cursor.execute(
"SELECT oid FROM pg_foreign_server WHERE srvname = '%s'"
% fsrv_name)
fsrvs = pg_cursor.fetchall()
connection.close()
return fsrvs
except Exception as exception:
exception = "%s: line:%s %s" % (
file_name, sys.exc_traceback.tb_lineno, exception)
print(exception, file=sys.stderr)

View File

@ -6,93 +6,92 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import uuid
import json
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import\
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import \
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.tests\
import utils as fdw_utils
from regression import parent_node_dict
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.\
foreign_servers.tests import utils as fsrv_utils
from . import utils as um_utils
class UserMappingAddTestCase(BaseTestGenerator):
"""
This class will add user mapping under foreign server node.
"""
"""This class will add user mapping under foreign server node."""
scenarios = [
# Fetching default URL for user mapping node.
('Check user mapping Node', dict(url='/browser/user_mapping/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add extension(s) to schema(s)
5. Add foreign data wrapper(s) to extension(s)
6. Add foreign server(s) to foreign data wrapper(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schema(s) under connected database(s)
schema_utils.add_schemas(cls.tester)
# Add extension(s) to schema(s)
extension_utils.add_extensions(cls.tester)
# Add foreign data wrapper(s) to extension(s)
fdw_utils.add_fdw(cls.tester)
# Add foreign server(s) to foreign data wrapper
fsrv_utils.add_fsrv(cls.tester)
def setUp(self):
""" This function will create extension and foreign data wrapper."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:6])
self.fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:6])
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name,
self.fdw_name)
self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name,
self.fsrv_name, self.fdw_name)
def runTest(self):
""" This function will add user mapping under foreign server node. """
"""This function will update foreign server present under test
database. """
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
fdw_response = fdw_utils.verify_fdw(self.server, self.db_name,
self.fdw_name)
if not fdw_response:
raise Exception("Could not find FDW.")
fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name,
self.fsrv_name)
if not fsrv_response:
raise Exception("Could not find FSRV.")
db_user = self.server["username"]
data = {
"name": db_user,
"um_options": [],
"umoptions": [
{
"umoption": "user",
"umvalue": self.server["username"]
},
{
"umoption": "password",
"umvalue": self.server["db_password"]
}
]
}
response = self.tester.post(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(
self.db_id) +
'/' + str(self.fdw_id) + '/' + str(self.fsrv_id) + '/',
data=json.dumps(data),
content_type='html/json')
self.assertEquals(response.status_code, 200)
um_utils.add_um(self.tester)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added user mapping(s), foreign server(s),
foreign data wrapper(s), extension(s), schema(s), database(s),
server(s) and parent id file
:return: None
"""
um_utils.delete_um(cls.tester)
fsrv_utils.delete_fsrv(cls.tester)
fdw_utils.delete_fdw(cls.tester)
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added extension
and dependant objects."""
extension_utils.drop_extension(self.server, self.db_name,
self.extension_name)
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,97 +6,81 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import\
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import \
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.tests \
import utils as fdw_utils
from regression import parent_node_dict
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \
foreign_servers.tests import utils as fsrv_utils
from . import utils as um_utils
class UserMappingDeleteTestCase(BaseTestGenerator):
"""
This class will delete user mapping under foreign server node.
"""
"""This class will delete user mapping under foreign server node."""
scenarios = [
# Fetching default URL for user mapping node.
('Check user mapping Node', dict(url='/browser/user_mapping/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add extension(s) to schema(s)
5. Add foreign data wrapper(s) to extension(s)
6. Add foreign server(s) to foreign data wrapper(s)
7. Add user mapping(s) to foreign server(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schema(s) under connected database(s)
schema_utils.add_schemas(cls.tester)
# Add extension(s) to schema(s)
extension_utils.add_extensions(cls.tester)
# Add foreign data wrapper(s) to extension(s)
fdw_utils.add_fdw(cls.tester)
# Add foreign server(s) to foreign data wrapper
fsrv_utils.add_fsrv(cls.tester)
# Add user mapping(s) to foreign server(s)
um_utils.add_um(cls.tester)
def setUp(self):
""" This function will create extension and foreign data wrapper."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:6])
self.fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:6])
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name,
self.fdw_name)
self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name,
self.fsrv_name, self.fdw_name)
self.um_id = um_utils.create_user_mapping(self.server, self.db_name,
self.fsrv_name)
def runTest(self):
""" This function delete user mapping under foreign server node. """
"""This function will delete user mapping present under test
database. """
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
fdw_response = fdw_utils.verify_fdw(self.server, self.db_name,
self.fdw_name)
if not fdw_response:
raise Exception("Could not find FDW.")
fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name,
self.fsrv_name)
if not fsrv_response:
raise Exception("Could not find FSRV.")
um_response = um_utils.verify_user_mapping(self.server, self.db_name,
self.fsrv_name)
if not um_response:
raise Exception("Could not find user mapping.")
delete_response = self.tester.delete(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(self.db_id) +
'/' + str(self.fdw_id) + '/' +
str(self.fsrv_id) + '/' + str(self.um_id),
follow_redirects=True)
self.assertEquals(delete_response.status_code, 200)
delete_respdata = um_utils.delete_um(self.tester)
self.assertTrue(delete_respdata['success'], 1)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added foreign server(s) ,
foreign data wrapper(s), extension(s), schema(s), database(s),
server(s) and parent id file
:return: None
"""
fsrv_utils.delete_fsrv(cls.tester)
fdw_utils.delete_fdw(cls.tester)
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added extension
and dependant objects."""
extension_utils.drop_extension(self.server, self.db_name,
self.extension_name)
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,115 +6,77 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import\
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import \
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.tests \
import utils as fdw_utils
from regression import parent_node_dict
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \
foreign_servers.tests import utils as fsrv_utils
from . import utils as um_utils
class UserMappingGetTestCase(BaseTestGenerator):
"""
This class will add user mapping under foreign server node.
"""
"""This class will add user mapping under foreign server node."""
scenarios = [
# Fetching default URL for user mapping node.
('Check user mapping Node', dict(url='/browser/user_mapping/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add extension(s) to schema(s)
5. Add foreign data wrapper(s) to extension(s)
6. Add foreign server(s) to foreign data wrapper(s)
7. Add user mapping(s) to foreign server(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schema(s) under connected database(s)
schema_utils.add_schemas(cls.tester)
# Add extension(s) to schema(s)
extension_utils.add_extensions(cls.tester)
# Add foreign data wrapper(s) to extension(s)
fdw_utils.add_fdw(cls.tester)
# Add foreign server(s) to foreign data wrapper
fsrv_utils.add_fsrv(cls.tester)
# Add user mapping(s) to foreign server(s)
um_utils.add_um(cls.tester)
def setUp(self):
""" This function will create extension and foreign data wrapper."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:6])
self.fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:6])
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name,
self.fdw_name)
self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name,
self.fsrv_name, self.fdw_name)
self.um_id = um_utils.create_user_mapping(self.server, self.db_name,
self.fsrv_name)
def runTest(self):
""" This function will fetch user mapping added to foreign server
node. """
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
fdw_ids_dict = all_id["fid"][0]
fsrv_ids_dict = all_id["fsid"][0]
um_ids_dict = all_id["umid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
fdw_id = fdw_ids_dict[server_id]
fsrv_id = fsrv_ids_dict[server_id]
um_id = um_ids_dict[server_id]
response = um_utils.verify_um(self.tester, utils.SERVER_GROUP,
server_id, db_id,
fdw_id, fsrv_id, um_id)
"""This function will update foreign server present under test
database."""
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
fdw_response = fdw_utils.verify_fdw(self.server, self.db_name,
self.fdw_name)
if not fdw_response:
raise Exception("Could not find FDW.")
fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name,
self.fsrv_name)
if not fsrv_response:
raise Exception("Could not find FSRV.")
response = self.tester.get(self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(
self.db_id) +
'/' + str(self.fdw_id) + '/' + str(
self.fsrv_id) + '/' + str(
self.um_id), content_type='html/json')
self.assertEquals(response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""
This function delete the added foreign server(s) ,
foreign data wrapper(s), extension(s), schema(s), database(s),
server(s) and parent id file
:return: None
"""
um_utils.delete_um(cls.tester)
fsrv_utils.delete_fsrv(cls.tester)
fdw_utils.delete_fdw(cls.tester)
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added extension
and dependant objects."""
extension_utils.drop_extension(self.server, self.db_name,
self.extension_name)
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,133 +6,95 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import uuid
import json
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import\
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import \
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.tests \
import utils as fdw_utils
from regression import parent_node_dict
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers. \
foreign_servers.tests import utils as fsrv_utils
from . import utils as um_utils
from regression.test_setup import advanced_config_data
import json
class UserMappingPutTestCase(BaseTestGenerator):
"""
This class will update user mapping under foreign server node.
"""
"""This class will update user mapping under foreign server node."""
scenarios = [
# Fetching default URL for user mapping node.
('Check user mapping Node', dict(url='/browser/user_mapping/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add extension(s) to schema(s)
5. Add foreign data wrapper(s) to extension(s)
6. Add foreign server(s) to foreign data wrapper(s)
7. Add user mapping(s) to foreign server(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schema(s) under connected database(s)
schema_utils.add_schemas(cls.tester)
# Add extension(s) to schema(s)
extension_utils.add_extensions(cls.tester)
# Add foreign data wrapper(s) to extension(s)
fdw_utils.add_fdw(cls.tester)
# Add foreign server(s) to foreign data wrapper
fsrv_utils.add_fsrv(cls.tester)
# Add user mapping(s) to foreign server(s)
um_utils.add_um(cls.tester)
def setUp(self):
""" This function will create extension and foreign data wrapper."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.fdw_name = "fdw_%s" % (str(uuid.uuid4())[1:6])
self.fsrv_name = "fsrv_%s" % (str(uuid.uuid4())[1:6])
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name,
self.fdw_name)
self.fsrv_id = fsrv_utils.create_fsrv(self.server, self.db_name,
self.fsrv_name, self.fdw_name)
self.um_id = um_utils.create_user_mapping(self.server, self.db_name,
self.fsrv_name)
def runTest(self):
""" This function update user mapping under foreign server node. """
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
fdw_ids_dict = all_id["fid"][0]
fsrv_ids_dict = all_id["fsid"][0]
um_ids_dict = all_id["umid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
fdw_id = fdw_ids_dict[server_id]
fsrv_id = fsrv_ids_dict[server_id]
um_id = um_ids_dict[server_id]
response = um_utils.verify_um(self.tester, utils.SERVER_GROUP,
server_id, db_id,
fdw_id, fsrv_id, um_id)
if response.status_code == 200:
data = \
{
"id": um_id,
"""This function will update foreign server present under test
database"""
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
fdw_response = fdw_utils.verify_fdw(self.server, self.db_name,
self.fdw_name)
if not fdw_response:
raise Exception("Could not find FDW.")
fsrv_response = fsrv_utils.verify_fsrv(self.server, self.db_name,
self.fsrv_name)
if not fsrv_response:
raise Exception("Could not find FSRV.")
um_response = um_utils.verify_user_mapping(self.server, self.db_name,
self.fsrv_name)
if not um_response:
raise Exception("Could not find user mapping.")
data = {
"id": self.um_id,
"umoptions":
advanced_config_data['user_mapping_update_data']
['options']
{
"changed":
[
{"umoption": "user",
"umvalue": "public"
}
]
}
}
put_response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(fdw_id) + '/' +
str(fsrv_id) + '/' + str(um_id),
str(self.server_id) + '/' + str(self.db_id) +
'/' + str(self.fdw_id) + '/' +
str(self.fsrv_id) + '/' + str(self.um_id),
data=json.dumps(data),
follow_redirects=True)
self.assertEquals(put_response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added foreign server(s) ,
foreign data wrapper(s), extension(s), schema(s), database(s),
server(s) and parent id file
:return: None
"""
um_utils.delete_um(cls.tester)
fsrv_utils.delete_fsrv(cls.tester)
fdw_utils.delete_fdw(cls.tester)
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added
extension and dependant objects."""
extension_utils.drop_extension(self.server, self.db_name,
self.extension_name)
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,151 +6,78 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import sys
import traceback
import os
import pickle
import json
from regression.test_setup import advanced_config_data, pickle_path
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.tests \
import utils as fdw_utils
from pgadmin.browser.server_groups.servers.databases.foreign_data_wrappers.\
foreign_servers.tests import utils as fsrv_utils
from regression.test_utils import get_db_connection
UM_URL = '/browser/user_mapping/obj/'
def get_um_config_data(server_connect_data):
adv_config_data = None
db_user = server_connect_data['data']['user']['name']
# Get the config data of appropriate db user
for config_test_data in advanced_config_data['user_mapping_credentials']:
if db_user == config_test_data['owner']:
adv_config_data = config_test_data
data = {
"name": adv_config_data['name'],
"um_options": adv_config_data['option'],
"umoptions": adv_config_data['options']
}
return data
def add_um(tester):
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
fdw_ids_dict = all_id["fid"][0]
fsrv_ids_dict = all_id["fsid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester,
utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']['connected']:
server_connect_response = server_utils.verify_server(
tester, utils.SERVER_GROUP, server_id)
fdw_id = fdw_ids_dict[server_id]
fdw_response = fdw_utils.verify_fdws(tester,
utils.SERVER_GROUP,
server_id, db_id,
fdw_id)
fsrv_id = fsrv_ids_dict[server_id]
fsrv_response = fsrv_utils.verify_fsrv(tester, utils.SERVER_GROUP,
server_id, db_id,
fdw_id, fsrv_id)
if fsrv_response.status_code == 200:
data = get_um_config_data(server_connect_response)
response = tester.post(
UM_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(
db_id) +
'/' + str(fdw_id) + '/' + str(fsrv_id) + '/',
data=json.dumps(data),
content_type='html/json')
assert response.status_code == 200
response_data = json.loads(response.data.decode())
write_um_info(response_data, server_id)
def write_um_info(response_data, server_id):
def create_user_mapping(server, db_name, fsrv_name):
"""
This function writes the schema id into parent_id.pkl
:param response_data: foreign server add response data
:type response_data: dict
:param server_id: server id
:type server_id: str
:return: None
This function will create user mapping under the existing
dummy database.
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param fsrv_name: FS name
:type fsrv_name: str
:return um_id: user mapping id
:rtype: int
"""
um_id = response_data['node']['_id']
pickle_id_dict = utils.get_pickle_id_dict()
if os.path.isfile(pickle_path):
existing_server_id = open(pickle_path, 'rb')
tol_server_id = pickle.load(existing_server_id)
pickle_id_dict = tol_server_id
if 'umid' in pickle_id_dict:
if pickle_id_dict['umid']:
# Add the umid as value in dict
pickle_id_dict["umid"][0].update({server_id: um_id})
else:
# Create new dict with server_id and umid
pickle_id_dict["umid"].append({server_id: um_id})
fsrv_output = open(pickle_path, 'wb')
pickle.dump(pickle_id_dict, fsrv_output)
fsrv_output.close()
try:
connection = get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor = connection.cursor()
query = "CREATE USER MAPPING FOR %s SERVER %s OPTIONS" \
" (user '%s', password '%s')" % (server['username'], fsrv_name, server['username'], server['db_password'])
pg_cursor.execute(query)
connection.set_isolation_level(old_isolation_level)
connection.commit()
# Get 'oid' from newly created user mapping
pg_cursor.execute(
"select umid from pg_user_mappings where srvname = '%s' order by"
" umid asc limit 1" % fsrv_name)
oid = pg_cursor.fetchone()
um_id = ''
if oid:
um_id = oid[0]
connection.close()
return um_id
except Exception:
traceback.print_exc(file=sys.stderr)
def verify_um(tester, server_group, server_id, db_id, fdw_id, fsrv_id, um_id):
response = tester.get(UM_URL + str(server_group) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(fdw_id) + '/' + str(fsrv_id) + '/' + str(
um_id),
content_type='html/json')
return response
def delete_um(tester):
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
fdw_ids_dict = all_id["fid"][0]
fsrv_ids_dict = all_id["fsid"][0]
um_ids_dict = all_id["umid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
fdw_id = fdw_ids_dict[server_id]
fsrv_id = fsrv_ids_dict[server_id]
um_id = um_ids_dict[server_id]
response = verify_um(tester, utils.SERVER_GROUP,
server_id, db_id,
fdw_id, fsrv_id, um_id)
if response.status_code == 200:
delete_response = tester.delete(
UM_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(fdw_id) + '/' +
str(fsrv_id) + '/' + str(um_id),
follow_redirects=True)
delete_respdata = json.loads(delete_response.data.decode())
return delete_respdata
def verify_user_mapping(server, db_name, fsrv_name):
"""
This function will verify current foreign server.
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param fsrv_name: FS name
:type fsrv_name: str
:return user_mapping: user mapping record
:rtype: tuple
"""
try:
connection = get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
pg_cursor.execute(
"select umid from pg_user_mappings where srvname = '%s' order by"
" umid asc limit 1" % fsrv_name)
user_mapping = pg_cursor.fetchone()
connection.close()
return user_mapping
except Exception:
traceback.print_exc(file=sys.stderr)

View File

@ -6,70 +6,63 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import json
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import\
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import \
utils as extension_utils
from . import utils as fdw_utils
from regression import parent_node_dict
from regression import test_utils as utils
class FDWDAddTestCase(BaseTestGenerator):
""" This class will add foreign data wrappers under database node. """
scenarios = [
# Fetching default URL for foreign_data_wrapper node.
('Check FDW Node',
dict(url='/browser/foreign_data_wrapper/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schema(s) to connected database(s)
4. Add extension(s) to schema(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
schema_utils.add_schemas(cls.tester)
extension_utils.add_extensions(cls.tester)
def setUp(self):
""" This function will create extension."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
def runTest(self):
""" This function will add extension under 1st server of tree node. """
fdw_utils.add_fdw(self.tester)
"""This function will add foreign data wrapper under test database."""
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
extension_response = extension_utils.verify_extension(
self.server, self.db_name, self.extension_name)
if not extension_response:
raise Exception("Could not find extension.")
self.data = fdw_utils.get_fdw_data(self.schema_name,
self.server['username'])
response = self.tester.post(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(self.db_id) + '/',
data=json.dumps(self.data),
content_type='html/json')
self.assertEquals(response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""This function deletes the added schema, database, server and parent
id file
"""
fdw_utils.delete_fdw(cls.tester)
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and
drop added extension."""
extension_utils.drop_extension(self.server, self.db_name,
self.extension_name)
database_utils.disconnect_database(self, self.server_id,
self.db_id)

View File

@ -6,74 +6,69 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import\
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import \
utils as extension_utils
from . import utils as fdw_utils
import json
from regression import parent_node_dict
from regression import test_utils as utils
class FDWDDeleteTestCase(BaseTestGenerator):
""" This class will delete foreign data wrappers under database node. """
"""This class will delete foreign data wrappers under test database."""
scenarios = [
# Fetching default URL for foreign_data_wrapper node.
('Check FDW Node',
dict(url='/browser/foreign_data_wrapper/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the following tasks:
1. Add and connect to the test server(s)
2. Add database(s) connected to server(s)
3. Add schemas to connected database(s)
4. Add extension(s) to schema(s)
5. Add foreign data wrapper(s) to extension(s)
:return: None
"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
schema_utils.add_schemas(cls.tester)
extension_utils.add_extensions(cls.tester)
fdw_utils.add_fdw(cls.tester)
def setUp(self):
""" This function will create extension and foreign data wrapper."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.fdw_name = "fdw_{0}".format(str(uuid.uuid4())[1:6])
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name,
self.fdw_name)
def runTest(self):
""" This function will delete added FDW. """
"""This function will fetch foreign data wrapper present under test
database."""
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
extension_response = extension_utils.verify_extension(
self.server, self.db_name, self.extension_name)
if not extension_response:
raise Exception("Could not find extension.")
fdw_response = fdw_utils.verify_fdw(self.server, self.db_name,
self.fdw_name)
if not fdw_response:
raise Exception("Could not find FDW.")
delete_response = self.tester.delete(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' +
str(self.db_id) + '/' + str(self.fdw_id),
follow_redirects=True)
self.assertEquals(delete_response.status_code, 200)
delete_respdata = fdw_utils.delete_fdw(self.tester)
self.assertTrue(delete_respdata['success'], 1)
@classmethod
def tearDownClass(cls):
"""This function deletes the added schema, database, server and parent
id file
"""
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added extension
and dependant objects."""
extension_utils.drop_extension(self.server, self.db_name,
self.extension_name)
database_utils.disconnect_database(self, self.server_id,
self.db_id)

View File

@ -7,77 +7,65 @@
#
# ##################################################################
from __future__ import print_function
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import\
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import \
utils as extension_utils
from . import utils as fdw_utils
from regression import parent_node_dict
from regression import test_utils as utils
import uuid
class FDWDGetTestCase(BaseTestGenerator):
""" This class will add foreign data wrappers under database node. """
""" This class will add foreign data wrappers under test database. """
scenarios = [
# Fetching default URL for foreign_data_wrapper node.
('Check FDW Node',
dict(url='/browser/foreign_data_wrapper/obj/'))
]
@classmethod
def setUpClass(cls):
"""This function use to add/connect the servers and create databases"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
schema_utils.add_schemas(cls.tester)
extension_utils.add_extensions(cls.tester)
fdw_utils.add_fdw(cls.tester)
def setUp(self):
""" This function will create extension and foreign data wrapper."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.fdw_name = "fdw_{0}".format(str(uuid.uuid4())[1:4])
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name,
self.fdw_name)
def runTest(self):
""" This function will get added FDW. """
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
fdw_ids_dict = all_id["fid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
fdw_id = fdw_ids_dict[server_id]
response = fdw_utils.verify_fdws(self.tester,
"""This function will fetch foreign data wrapper present under test
database."""
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
server_id, db_id,
fdw_id)
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
extension_response = extension_utils.verify_extension(
self.server, self.db_name, self.extension_name)
if not extension_response:
raise Exception("Could not find extension.")
response = self.tester.get(
self.url + str(utils.SERVER_GROUP) + '/' + str(
self.server_id) + '/' +
str(self.db_id) + '/' + str(self.fdw_id),
content_type='html/json')
self.assertEquals(response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""This function deletes the added schema, database, server and parent
id file
"""
fdw_utils.delete_fdw(cls.tester)
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added extension
and dependant objects."""
extension_utils.drop_extension(self.server, self.db_name,
self.extension_name)
database_utils.disconnect_database(self, self.server_id,
self.db_id)

View File

@ -6,96 +6,75 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import uuid
import json
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import\
utils as extension_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.extensions.tests import \
utils as extension_utils
from . import utils as fdw_utils
from regression.test_setup import advanced_config_data
import json
from regression import parent_node_dict
from regression import test_utils as utils
class FDWDPutTestCase(BaseTestGenerator):
""" This class will add foreign data wrappers under database node. """
"""This class will update foreign data wrappers under test database."""
scenarios = [
# Fetching default URL for foreign_data_wrapper node.
('Check FDW Node',
dict(url='/browser/foreign_data_wrapper/obj/'))
]
@classmethod
def setUpClass(cls):
"""This function use to add/connect the servers and create databases"""
# Add the server
server_utils.add_server(cls.tester)
# Connect to servers
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add databases to connected servers
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
schema_utils.add_schemas(cls.tester)
extension_utils.add_extensions(cls.tester)
fdw_utils.add_fdw(cls.tester)
def setUp(self):
""" This function will create extension and foreign data wrapper."""
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.schema_name = self.schema_data['schema_name']
self.extension_name = "postgres_fdw"
self.fdw_name = "fdw_put_%s".format(str(uuid.uuid4())[1:6])
self.extension_id = extension_utils.create_extension(
self.server, self.db_name, self.extension_name, self.schema_name)
self.fdw_id = fdw_utils.create_fdw(self.server, self.db_name,
self.fdw_name)
def runTest(self):
""" This function will update added FDW. """
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
fdw_ids_dict = all_id["fid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
fdw_id = fdw_ids_dict[server_id]
response = fdw_utils.verify_fdws(self.tester,
""" This function will fetch foreign data wrapper present under
test database. """
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
server_id, db_id,
fdw_id)
if response.status_code == 200:
data = \
{
"description": advanced_config_data['fdw_update_data']
['comment'],
"id": fdw_id
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database.")
extension_response = extension_utils.verify_extension(
self.server, self.db_name, self.extension_name)
if not extension_response:
raise Exception("Could not find extension.")
fdw_response = fdw_utils.verify_fdw(self.server, self.db_name,
self.fdw_name)
if not fdw_response:
raise Exception("Could not find FDW.")
data = {
"description": "This is FDW update comment",
"id": self.fdw_id
}
put_response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' +
str(db_id) + '/' + str(fdw_id),
str(self.server_id) + '/' +
str(self.db_id) + '/' + str(self.fdw_id),
data=json.dumps(data),
follow_redirects=True)
self.assertEquals(put_response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added schema, database, server and parent
id file
"""
fdw_utils.delete_fdw(cls.tester)
extension_utils.delete_extension(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
"""This function disconnect the test database and drop added extension
and dependant objects."""
extension_utils.drop_extension(self.server, self.db_name,
self.extension_name)
database_utils.disconnect_database(self, self.server_id,
self.db_id)

View File

@ -6,128 +6,103 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import os
import pickle
import json
from regression.test_setup import advanced_config_data, pickle_path
from regression import test_utils as utils
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from __future__ import print_function
import traceback
import uuid
import sys
FDW_URL = '/browser/foreign_data_wrapper/obj/'
from regression.test_utils import get_db_connection
def get_fdw_config_data(schema_name, server_connect_data):
adv_config_data = None
db_user = server_connect_data['data']['user']['name']
# Get the config data of appropriate db user
for config_test_data in advanced_config_data['fdw_credentials']:
if db_user == config_test_data['owner']:
adv_config_data = config_test_data
def get_fdw_data(schema_name, db_user):
data = {
"fdwacl": adv_config_data['acl'],
"fdwhan": "{0}.{1}".format(schema_name, adv_config_data['handler']),
"fdwoptions": adv_config_data['options'],
"fdwowner": adv_config_data['owner'],
"fdwvalue": "{0}.{1}".format(schema_name, adv_config_data['validator']),
"name": "fdw_{}".format(str(uuid.uuid4())[1:4])
"fdwacl":
[
{
"grantee": db_user,
"grantor": db_user,
"privileges":
[
{
"privilege_type": "U",
"privilege": "true",
"with_grant": "true"
}
]
}
],
"fdwhan": "%s.%s" % (schema_name, "postgres_fdw_handler"),
"fdwoptions": [],
"fdwowner": db_user,
"fdwvalue": "%s.%s" % (schema_name, "postgres_fdw_validator"),
"name": "fdw_add_%s" % (str(uuid.uuid4())[1:6])
}
return data
def add_fdw(tester):
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_info_dict = all_id["scid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester, utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
server_connect_response = server_utils.verify_server(
tester, utils.SERVER_GROUP, server_id)
schema_name = schema_info_dict[int(server_id)][1]
data = get_fdw_config_data(schema_name,
server_connect_response)
response = tester.post(FDW_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) + '/',
data=json.dumps(data),
content_type='html/json')
assert response.status_code == 200
response_data = json.loads(response.data.decode('utf-8'))
write_fdw_info(response_data, server_id)
def write_fdw_info(response_data, server_id):
def create_fdw(server, db_name, fdw_name):
"""
This function writes the sequence id into parent_id.pkl
:param response_data: FDW add response data
:type response_data: dict
:param server_id: server id
:type server_id: str
:return: None
This function will create foreign data wrapper under the existing
dummy database.
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param fdw_name: FDW name
:type fdw_name: str
:return fdw_id: fdw id
:rtype: int
"""
fdw_id = response_data['node']['_id']
pickle_id_dict = utils.get_pickle_id_dict()
if os.path.isfile(pickle_path):
existing_server_id = open(pickle_path, 'rb')
tol_server_id = pickle.load(existing_server_id)
pickle_id_dict = tol_server_id
if 'fid' in pickle_id_dict:
if pickle_id_dict['fid']:
# Add the FDW_id as value in dict
pickle_id_dict["fid"][0].update({server_id: fdw_id})
else:
# Create new dict with server_id and FDW_id
pickle_id_dict["fid"].append({server_id: fdw_id})
fdw_output = open(pickle_path, 'wb')
pickle.dump(pickle_id_dict, fdw_output)
fdw_output.close()
try:
connection = get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor = connection.cursor()
pg_cursor.execute('''CREATE FOREIGN DATA WRAPPER "%s"''' % fdw_name)
connection.set_isolation_level(old_isolation_level)
connection.commit()
# Get 'oid' from newly created foreign data wrapper
pg_cursor.execute(
"SELECT oid FROM pg_foreign_data_wrapper WHERE fdwname = '%s'"
% fdw_name)
oid = pg_cursor.fetchone()
fdw_id = ''
if oid:
fdw_id = oid[0]
connection.close()
return fdw_id
except Exception:
traceback.print_exc(file=sys.stderr)
def verify_fdws(tester, server_group, server_id, db_id, fdw_id):
response = tester.get(FDW_URL + str(server_group) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(fdw_id),
content_type='html/json')
return response
def delete_fdw(tester):
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
fdw_ids_dict = all_id["fid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
fdw_id = fdw_ids_dict[server_id]
response = verify_fdws(tester,
utils.SERVER_GROUP,
server_id, db_id,
fdw_id)
if response.status_code == 200:
delete_response = tester.delete(
FDW_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' +
str(db_id) + '/' + str(fdw_id),
follow_redirects=True)
delete_respdata = json.loads(delete_response.data.decode())
return delete_respdata
def verify_fdw(server, db_name, fdw_name):
"""
This function will verify current foreign data wrapper.
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param fdw_name: FDW name
:type fdw_name: str
:return fdw: fdw details
:rtype: tuple
"""
try:
connection = get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
pg_cursor.execute(
"SELECT oid FROM pg_foreign_data_wrapper WHERE fdwname = '%s'"
% fdw_name)
fdw = pg_cursor.fetchone()
connection.close()
return fdw
except Exception:
traceback.print_exc(file=sys.stderr)

View File

@ -6,69 +6,69 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
import json
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from . import utils as collation_utils
class CollationAddTestCase(BaseTestGenerator):
""" This class will add new collation under schema node. """
scenarios = [
# Fetching default URL for collation node.
('Default Node URL', dict(url='/browser/collation/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
4. Add the schemas
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
def setUp(self):
self.database_info = parent_node_dict["database"][-1]
self.db_name = self.database_info["db_name"]
# Change the db name, so that schema will create in newly created db
self.schema_name = "schema_get_%s" % str(uuid.uuid4())[1:6]
connection = utils.get_db_connection(self.db_name,
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'])
self.schema_details = schema_utils.create_schema(connection,
self.schema_name)
def runTest(self):
""" This function will add collation under schema node. """
schema_info = parent_node_dict["schema"][-1]
server_id = schema_info["server_id"]
db_id = schema_info["db_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
server_id, db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add collation.")
schema_id = self.schema_details[0]
schema_name = self.schema_details[1]
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
schema_name)
if not schema_response:
raise Exception("Could not find the schema to add the collation.")
collation_utils.add_collation(
self.tester, self.server_connect_response, self.server_ids)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added collations, schemas, database,
server and the 'parent_id.pkl' file which is created in setup()
function.
:return: None
"""
collation_utils.delete_collation(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
data = {
"copy_collation": "pg_catalog.\"POSIX\"",
"name": "collation_add_%s" % str(uuid.uuid4())[1:6],
"owner": self.server["username"],
"schema": schema_name
}
response = self.tester.post(self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(
db_id) + '/' + str(schema_id) + '/',
data=json.dumps(data),
content_type='html/json')
self.assertEquals(response.status_code, 200)
# Disconnect the database
database_utils.disconnect_database(self, server_id, db_id)
def tearDown(self):
pass

View File

@ -6,11 +6,11 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
@ -20,58 +20,47 @@ from . import utils as collation_utils
class CollationDeleteTestCase(BaseTestGenerator):
""" This class will delete added collation under schema node. """
scenarios = [
# Fetching default URL for collation node.
('Fetch collation Node URL', dict(url='/browser/collation/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
4. Add the schemas
5. Add the collations
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
# Add collations
collation_utils.add_collation(cls.tester, cls.server_connect_response,
cls.server_ids)
def setUp(self):
self.schema_info = parent_node_dict["schema"][-1]
self.schema_name = self.schema_info["schema_name"]
self.db_name = parent_node_dict["database"][-1]["db_name"]
coll_name = "collation_get_%s" % str(uuid.uuid4())[1:6]
self.collation = collation_utils.create_collation(self.server,
self.schema_name,
coll_name,
self.db_name)
def runTest(self):
""" This function will delete collation under schema node. """
server_id = self.schema_info["server_id"]
db_id = self.schema_info["db_id"]
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
server_id,
db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database.")
collation_utils.delete_collation(self.tester)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added schemas, database,
server and the 'parent_id.pkl' file which is created in setup()
function.
:return: None
"""
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema.")
collation_id = self.collation[0]
schema_id = self.schema_info["schema_id"]
get_response = self.tester.delete(
self.url + str(utils.SERVER_GROUP) + '/' + str(
server_id) + '/' +
str(db_id) + '/' + str(schema_id) + '/' + str(collation_id),
content_type='html/json')
self.assertEquals(get_response.status_code, 200)
# Disconnect database to delete it
database_utils.disconnect_database(self, server_id, db_id)
def tearDown(self):
pass

View File

@ -6,12 +6,11 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import json
import uuid
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
@ -21,81 +20,47 @@ from . import utils as collation_utils
class CollationGetTestCase(BaseTestGenerator):
""" This class will fetch new collation under schema node. """
scenarios = [
# Fetching default URL for collation node.
('Fetch collation Node URL', dict(url='/browser/collation/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
4. Add the schemas
5. Add the collations
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
# Add collations
collation_utils.add_collation(cls.tester, cls.server_connect_response,
cls.server_ids)
def setUp(self):
self.schema_info = parent_node_dict["schema"][-1]
self.schema_name = self.schema_info["schema_name"]
self.db_name = parent_node_dict["database"][-1]["db_name"]
coll_name = "collation_get_%s" % str(uuid.uuid4())[1:6]
self.collation = collation_utils.create_collation(self.server,
self.schema_name,
coll_name,
self.db_name)
def runTest(self):
""" This function will fetch collation under schema node. """
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
collation_ids_dict = all_id["coid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(self.tester,
server_id = self.schema_info["server_id"]
db_id = self.schema_info["db_id"]
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
schema_info = schema_ids_dict[int(server_id)]
schema_response = schema_utils.verify_schemas(
self.tester, server_id, db_id, schema_info[0])
schema_response = json.loads(
schema_response.data.decode('utf-8'))
if len(schema_response) != 0:
collation_id = collation_ids_dict[int(server_id)]
get_response = collation_utils.verify_collation(
self.tester, server_id, db_id, schema_info[0],
collation_id)
server_id,
db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database.")
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema.")
collation_id = self.collation[0]
schema_id = self.schema_info["schema_id"]
get_response = self.tester.get(
self.url + str(utils.SERVER_GROUP) + '/' + str(
server_id) + '/' +
str(db_id) + '/' + str(schema_id) + '/' + str(collation_id),
content_type='html/json')
self.assertEquals(get_response.status_code, 200)
# Disconnect database to delete it
database_utils.disconnect_database(self, server_id, db_id)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added collations, schemas, database,
server and the 'parent_id.pkl' file which is created in setup()
function.
:return: None
"""
collation_utils.delete_collation(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
pass

View File

@ -6,13 +6,12 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
import json
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from regression.test_setup import advanced_config_data
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
@ -22,104 +21,61 @@ from . import utils as collation_utils
class CollationPutTestCase(BaseTestGenerator):
""" This class will update added collation under schema node. """
scenarios = [
# Fetching default URL for collation node.
('Fetch collation Node URL', dict(url='/browser/collation/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
4. Add the schemas
5. Add the collations
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
# Add collations
collation_utils.add_collation(cls.tester, cls.server_connect_response,
cls.server_ids)
def setUp(self):
self.schema_info = parent_node_dict["schema"][-1]
self.schema_name = self.schema_info["schema_name"]
self.db_name = parent_node_dict["database"][-1]["db_name"]
coll_name = "collation_get_%s" % str(uuid.uuid4())[1:6]
self.collation = collation_utils.create_collation(self.server,
self.schema_name,
coll_name,
self.db_name)
def runTest(self):
""" This function will update collation under schema node. """
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
collation_ids_dict = all_id["coid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(self.tester,
server_id = self.schema_info["server_id"]
db_id = self.schema_info["db_id"]
# Verify database
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
schema_info = schema_ids_dict[int(server_id)]
schema_response = schema_utils.verify_schemas(self.tester,
server_id,
db_id,
schema_info[0])
schema_response = json.loads(
schema_response.data.decode('utf-8'))
if len(schema_response) != 0:
collation_id = collation_ids_dict[int(server_id)]
get_response = collation_utils.verify_collation(
self.tester, server_id, db_id, schema_info[0],
collation_id)
get_response_data = json.loads(
get_response.data.decode('utf-8'))
if len(get_response_data) == 0:
raise Exception("No collation node to update.")
db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database.")
# Verify schema
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema.")
# Verify collation
coll_name = self.collation[1]
collation_response = collation_utils.verify_collation(self.server,
self.db_name,
coll_name)
if not collation_response:
raise Exception("Could not find the collation.")
collation_id = self.collation[0]
schema_id = self.schema_info["schema_id"]
data = {
"description":
advanced_config_data['collation_update_data']
['comment'],
"id": collation_id,
"description": "This is collation update comment",
"id": collation_id
}
put_response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' +
str(db_id) + '/' +
str(schema_info[0]) + '/' +
put_response = self.tester.put(self.url + str(utils.SERVER_GROUP) +
'/' + str(server_id) + '/' + str(db_id)
+ '/' + str(schema_id) + '/' +
str(collation_id),
data=json.dumps(data),
follow_redirects=True)
self.assertEquals(put_response.status_code, 200)
# Disconnect database to delete it
database_utils.disconnect_database(self, server_id, db_id)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added collations, schemas, database,
server and the 'parent_id.pkl' file which is created in setup()
function.
:return: None
"""
collation_utils.delete_collation(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
pass

View File

@ -6,148 +6,52 @@
# #This software is released under the PostgreSQL Licence
#
# ##########################################################################
from __future__ import print_function
import traceback
import sys
import json
import os
import pickle
from regression.test_setup import pickle_path, advanced_config_data
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from regression import test_utils as utils
COLLATION_URL = '/browser/collation/obj/'
def get_collation_config_data(server_connect_data):
"""This function returns the collation config data"""
adv_config_data = None
data = None
db_user = server_connect_data['data']['user']['name']
# Get the config data of appropriate db user
for config_test_data in \
advanced_config_data['collation_credentials']:
if db_user == config_test_data['owner']:
adv_config_data = config_test_data
if adv_config_data is not None:
data = {
"copy_collation": adv_config_data['copy_collation'],
"name": adv_config_data['name'],
"owner": adv_config_data['owner'],
"schema": adv_config_data['schema']
}
return data
def write_collation_id(response_data, server_id):
"""
This function writes the server and collation id
:param response_data: collation response data
:type response_data: dict
:param server_id: server id
:type server_id: int
:return: None
"""
collation_id = response_data['node']['_id']
pickle_id_dict = utils.get_pickle_id_dict()
if os.path.isfile(pickle_path):
existing_server_id = open(pickle_path, 'rb')
tol_server_id = pickle.load(existing_server_id)
pickle_id_dict = tol_server_id
if 'coid' in pickle_id_dict:
if pickle_id_dict['coid']:
# Add the db_id as value in dict
pickle_id_dict["coid"][0].update(
{int(server_id): collation_id})
else:
# Create new dict with server_id and db_id
pickle_id_dict["coid"].append(
{int(server_id): collation_id})
db_output = open(pickle_path, 'wb')
pickle.dump(pickle_id_dict, db_output)
db_output.close()
def add_collation(tester, server_connect_response, server_ids):
def create_collation(server, schema_name, coll_name, db_name):
"""This function add the collation to schemas"""
try:
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
pg_cursor.execute('CREATE COLLATION %s.%s FROM pg_catalog."POSIX"' %
(schema_name, coll_name))
connection.commit()
all_id = utils.get_ids()
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
for server_connect_response, server_id in zip(server_connect_response,
server_ids):
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester, utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
schema_info = schema_ids_dict[int(server_id)]
schema_utils.verify_schemas(tester, server_id, db_id,
schema_info[0])
data = get_collation_config_data(server_connect_response)
data['schema'] = schema_info[1]
response = tester.post(
COLLATION_URL + str(utils.SERVER_GROUP) + '/' + str(server_id)
+ '/' + str(db_id) + '/' + str(schema_info[0]) + '/',
data=json.dumps(data), content_type='html/json')
response_data = json.loads(response.data.decode('utf-8'))
write_collation_id(response_data, server_id)
# Get 'oid' from newly created database
pg_cursor.execute("SELECT coll.oid, coll.collname FROM"
" pg_collation coll WHERE coll.collname='%s'" %
coll_name)
collation = pg_cursor.fetchone()
connection.close()
return collation
except Exception:
traceback.print_exc(file=sys.stderr)
def verify_collation(tester, server_id, db_id, schema_id, collation_id):
"""This function verifies the collation using GET API"""
get_response = tester.get(
COLLATION_URL + str(utils.SERVER_GROUP) + '/' + str(server_id) + '/' +
str(db_id) + '/' + str(schema_id) + '/' + str(collation_id),
content_type='html/json')
return get_response
def delete_collation(tester):
"""This function deletes the collations from schema"""
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
collation_ids_dict = all_id["coid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester, utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
schema_info = schema_ids_dict[int(server_id)]
schema_response = schema_utils.verify_schemas(tester, server_id,
db_id,
schema_info[0])
schema_response = json.loads(schema_response.data.decode('utf-8'))
if len(schema_response) != 0:
collation_id = collation_ids_dict[int(server_id)]
get_response = verify_collation(
tester, server_id, db_id, schema_info[0], collation_id)
get_response_data = json.loads(
get_response.data.decode('utf-8'))
if len(get_response_data) == 0:
raise Exception("No collation node to delete.")
del_response = tester.delete(
COLLATION_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) + '/' +
str(schema_info[0]) + '/' + str(collation_id),
follow_redirects=True)
assert del_response.status_code == 200
del_response_data = json.loads(
del_response.data.decode('utf-8'))
assert del_response_data['success'] == 1
def verify_collation(server, db_name, coll_name):
"""This function verifies the collation is exist or not"""
try:
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
# Get 'oid' from newly created database
pg_cursor.execute("SELECT coll.oid, coll.collname FROM"
" pg_collation coll WHERE coll.collname='%s'" %
coll_name)
collation = pg_cursor.fetchone()
connection.close()
return collation
except Exception:
traceback.print_exc(file=sys.stderr)

View File

@ -0,0 +1,15 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
from pgadmin.utils.route import BaseTestGenerator
class DomainTestGenerator(BaseTestGenerator):
def runTest(self):
return

View File

@ -0,0 +1,78 @@
# #################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
import json
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
class DomainAddTestCase(BaseTestGenerator):
""" This class will add new domain under schema node. """
scenarios = [
# Fetching default URL for domain node.
('Fetch domain Node URL', dict(url='/browser/domain/obj/'))
]
def setUp(self):
pass
def runTest(self):
""" This function will add domain under schema node. """
db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add collation.")
schema_id = schema_info["schema_id"]
schema_name = schema_info["schema_name"]
schema_response = schema_utils.verify_schemas(self.server,
db_name,
schema_name)
if not schema_response:
raise Exception("Could not find the schema to add the collation.")
data = {
"basensp": schema_name,
"basetype": "character",
"collname": "pg_catalog.\"POSIX\"",
"constraints": [{
"conname": "num",
"convalidated": True
}],
"is_tlength": True,
"max_val": 2147483647,
"min_val": 1,
"name": "domain_add_%s" % (str(uuid.uuid4())[1:6]),
"owner": self.server["username"],
"seclabels": [],
"typdefault": "1",
"typlen": "10"
}
# Call POST API to add domain
response = self.tester.post(self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(
self.db_id) +
'/' + str(schema_id) + '/',
data=json.dumps(data),
content_type='html/json')
self.assertEquals(response.status_code, 200)
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -0,0 +1,69 @@
# #################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from . import utils as domain_utils
class DomainDeleteTestCase(BaseTestGenerator):
""" This class will delete new domain under schema node. """
scenarios = [
# Fetching default URL for domain node.
('Fetch domain Node URL', dict(url='/browser/domain/delete/'))
]
def setUp(self):
self.database_info = parent_node_dict["database"][-1]
self.db_name = self.database_info["db_name"]
self.schema_info = parent_node_dict["schema"][-1]
self.schema_name = self.schema_info["schema_name"]
self.schema_id = self.schema_info["schema_id"]
self.domain_name = "domain_delete_%s" % (str(uuid.uuid4())[1:6])
self.domain_info = domain_utils.create_domain(self.server,
self.db_name,
self.schema_name,
self.schema_id,
self.domain_name)
def runTest(self):
""" This function will add domain under schema node. """
db_id = self.database_info["db_id"]
server_id = self.database_info["server_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
server_id, db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to get the domain.")
db_name = self.database_info["db_name"]
schema_response = schema_utils.verify_schemas(self.server,
db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to get the domain.")
domain_id = self.domain_info[0]
# Call GET API to verify the domain
get_response = self.tester.delete(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' +
str(db_id) + '/' +
str(self.schema_id) + '/' +
str(domain_id),
content_type='html/json')
self.assertEquals(get_response.status_code, 200)
# Disconnect the database
database_utils.disconnect_database(self, server_id, db_id)
def tearDown(self):
pass

View File

@ -0,0 +1,69 @@
# #################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from . import utils as domain_utils
class DomainGetTestCase(BaseTestGenerator):
""" This class will fetch new collation under schema node. """
scenarios = [
# Fetching default URL for domain node.
('Fetch domain Node URL', dict(url='/browser/domain/obj/'))
]
def setUp(self):
self.database_info = parent_node_dict["database"][-1]
self.db_name = self.database_info["db_name"]
self.schema_info = parent_node_dict["schema"][-1]
self.schema_name = self.schema_info["schema_name"]
self.schema_id = self.schema_info["schema_id"]
self.domain_name = "domain_get_%s" % (str(uuid.uuid4())[1:6])
self.domain_info = domain_utils.create_domain(self.server,
self.db_name,
self.schema_name,
self.schema_id,
self.domain_name)
def runTest(self):
""" This function will add domain under schema node. """
db_id = self.database_info["db_id"]
server_id = self.database_info["server_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
server_id, db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to get the domain.")
db_name = self.database_info["db_name"]
schema_response = schema_utils.verify_schemas(self.server,
db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to get the domain.")
domain_id = self.domain_info[0]
# Call GET API to verify the domain
get_response = self.tester.get(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' +
str(db_id) + '/' +
str(self.schema_id) + '/' +
str(domain_id),
content_type='html/json')
self.assertEquals(get_response.status_code, 200)
# Disconnect the database
database_utils.disconnect_database(self, server_id, db_id)
def tearDown(self):
pass

View File

@ -0,0 +1,79 @@
# #################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
import json
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from . import utils as domain_utils
class DomainPutTestCase(BaseTestGenerator):
""" This class will fetch new collation under schema node. """
scenarios = [
# Fetching default URL for domain node.
('Fetch domain Node URL', dict(url='/browser/domain/obj/'))
]
def setUp(self):
self.database_info = parent_node_dict["database"][-1]
self.db_name = self.database_info["db_name"]
self.schema_info = parent_node_dict["schema"][-1]
self.schema_name = self.schema_info["schema_name"]
self.schema_id = self.schema_info["schema_id"]
self.domain_name = "domain_put_%s" % (str(uuid.uuid4())[1:6])
self.domain_info = domain_utils.create_domain(self.server,
self.db_name,
self.schema_name,
self.schema_id,
self.domain_name)
def runTest(self):
""" This function will update domain under schema node. """
db_id = self.database_info["db_id"]
server_id = self.database_info["server_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
server_id, db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to get the domain.")
db_name = self.database_info["db_name"]
schema_response = schema_utils.verify_schemas(self.server,
db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to get the domain.")
domain_response = domain_utils.verify_domain(self.server,
db_name,
self.schema_id,
self.domain_name)
if not domain_response:
raise Exception("Could not find the domain to update.")
domain_id = self.domain_info[0]
data = {"description": "This is domain update comment",
"id": domain_id,
}
response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' +
str(db_id) + '/' +
str(self.schema_id) + '/' +
str(domain_id),
data=json.dumps(data),
follow_redirects=True)
self.assertEquals(response.status_code, 200)
# Disconnect the database
database_utils.disconnect_database(self, server_id, db_id)
def tearDown(self):
pass

View File

@ -0,0 +1,77 @@
# ##########################################################################
#
# #pgAdmin 4 - PostgreSQL Tools
#
# #Copyright (C) 2013 - 2016, The pgAdmin Development Team
# #This software is released under the PostgreSQL Licence
#
# ##########################################################################
from __future__ import print_function
import traceback
import sys
from regression import test_utils as utils
def create_domain(server, db_name, schema_name, schema_id, domain_name):
"""
This function is used to add the domain to existing schema
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param schema_name: schema name
:type schema_name: str
:param schema_id: schema id
:type schema_id: int
:param domain_name: domain name
:type domain_name: str
:return: None
"""
try:
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
query = 'CREATE DOMAIN '+schema_name+'.'+domain_name+' AS' \
' character(10) COLLATE pg_catalog."POSIX" DEFAULT 1'
pg_cursor.execute(query)
connection.commit()
# Get 'oid' from newly created domain
pg_cursor.execute("SELECT d.oid, d.typname FROM pg_type d WHERE"
" d.typname='%s' AND d.typnamespace='%s'" %
(domain_name, schema_id))
domains = pg_cursor.fetchone()
connection.close()
return domains
except Exception:
traceback.print_exc(file=sys.stderr)
def verify_domain(server, db_name, schema_id, domain_name):
"""
This function get the oid & name of the domain
:param server: server details
:type server: dict
:param db_name: db name
:type db_name: str
:param schema_id: schema id
:type schema_id: int
:param domain_name: domain name
:type domain_name: str
:return:
"""
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
pg_cursor.execute("SELECT d.oid, d.typname FROM pg_type d WHERE"
" d.typname='%s' AND d.typnamespace='%s'" %
(domain_name, schema_id))
domains = pg_cursor.fetchone()
connection.close()
return domains

View File

@ -6,69 +6,94 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
import json
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from . import utils as trigger_funcs_utils
class TriggerFuncAddTestCase(BaseTestGenerator):
""" This class will add new trigger function under schema node. """
scenarios = [
# Fetching default URL for trigger function node.
('Fetch Trigger Function Node URL', dict(
url='/browser/trigger_function/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
4. Add the schemas
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
def runTest(self):
""" This function will add trigger function under schema node. """
db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
server_id = schema_info["server_id"]
db_id = schema_info["db_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
server_id, db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add a function.")
schema_id = schema_info["schema_id"]
schema_name = schema_info["schema_name"]
schema_response = schema_utils.verify_schemas(self.server,
db_name,
schema_name)
if not schema_response:
raise Exception("Could not find the schema to add a function.")
db_user = self.server["username"]
data = {
"acl": [
{
"grantee": db_user,
"grantor": db_user,
"privileges":
[
{
"privilege_type": "X",
"privilege": True,
"with_grant": True
}
]
}
],
"arguments": [],
"funcowner": db_user,
"lanname": "plpgsql",
"name": "test_abort_any_command",
"options": [],
"proleakproof": True,
"pronamespace": 2200,
"prorettypename": "event_trigger/trigger",
"prosecdef": True,
"prosrc": "BEGIN RAISE EXCEPTION 'command % is disabled',"
" tg_tag; END;",
"provolatile": "s",
"seclabels": [],
"variables": [
{
"name": "enable_sort",
"value": True
}
]
}
# Get the type from data. We are adding two types
# i.e. event_trigger and trigger.
trigger_func_types = data['prorettypename'].split('/')
for func_type in trigger_func_types:
data['prorettypename'] = func_type
data["name"] = "test_event_add_%s" % str(uuid.uuid4())[1:6]
if schema_id:
data['pronamespace'] = schema_id
else:
schema_id = data['pronamespace']
response = self.tester.post(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) + '/' + str(schema_id)
+ '/', data=json.dumps(data), content_type='html/json')
trigger_funcs_utils.add_trigger_function(
self.tester, self.server_connect_response, self.server_ids)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added trigger function, schemas, database,
server and the 'parent_id.pkl' file which is created in setup()
function.
:return: None
"""
trigger_funcs_utils.delete_trigger_function(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
self.assertEquals(response.status_code, 200)
# Disconnect the database
database_utils.disconnect_database(self, server_id, db_id)

View File

@ -6,8 +6,10 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
@ -19,59 +21,44 @@ from . import utils as trigger_funcs_utils
class TriggerFuncDeleteTestCase(BaseTestGenerator):
""" This class will delete the trigger function under schema node. """
scenarios = [
# Fetching default URL for trigger function node.
('Fetch Trigger Function Node URL',
dict(url='/browser/trigger_function/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
4. Add the schemas
5. Add the trigger functions
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
# Add trigger functions
trigger_funcs_utils.add_trigger_function(
cls.tester, cls.server_connect_response, cls.server_ids)
def setUp(self):
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.schema_name = parent_node_dict["schema"][-1]["schema_name"]
self.schema_id = parent_node_dict["schema"][-1]["schema_id"]
func_name = "test_event_delete_%s" % str(uuid.uuid4())[1:6]
db_user = self.server["username"]
self.function_info = trigger_funcs_utils.create_trigger_function(
self.server, self.db_name, self.schema_name, func_name)
def runTest(self):
""" This function will delete trigger function under database node. """
schema_info = parent_node_dict["schema"][-1]
server_id = schema_info["server_id"]
db_id = schema_info["db_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
server_id, db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add collation.")
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to add the collation.")
trigger_func_id = self.function_info[0]
response = self.tester.delete(
self.url + str(utils.SERVER_GROUP) + '/'
+ str(server_id) + '/' + str(db_id) + '/' +
str(self.schema_id) + '/' + str(trigger_func_id),
content_type='html/json')
self.assertEquals(response.status_code, 200)
# Disconnect the database
database_utils.disconnect_database(self, server_id, db_id)
trigger_funcs_utils.delete_trigger_function(self.tester)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added trigger function, schemas, database,
server and the 'parent_id.pkl' file which is created in setup()
function.
:return: None
"""
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
pass

View File

@ -6,10 +6,11 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
@ -19,80 +20,44 @@ from . import utils as trigger_funcs_utils
class TriggerFuncGetTestCase(BaseTestGenerator):
"""This class will fetch added trigger function under schema node."""
scenarios = [
# Fetching default URL for trigger function node.
('Fetch Trigger Function Node URL',
dict(url='/browser/trigger_function/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
4. Add the schemas
5. Add the trigger functions
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
# Add trigger functions
trigger_funcs_utils.add_trigger_function(
cls.tester, cls.server_connect_response, cls.server_ids)
def setUp(self):
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.schema_name = parent_node_dict["schema"][-1]["schema_name"]
self.schema_id = parent_node_dict["schema"][-1]["schema_id"]
func_name = "test_event_get_%s" % str(uuid.uuid4())[1:6]
db_user = self.server["username"]
self.function_info = trigger_funcs_utils.create_trigger_function(
self.server, self.db_name, self.schema_name, func_name)
def runTest(self):
""" This function will delete trigger function under database node. """
schema_info = parent_node_dict["schema"][-1]
server_id = schema_info["server_id"]
db_id = schema_info["db_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
server_id, db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add collation.")
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to add the collation.")
trigger_func_id = self.function_info[0]
response = self.tester.get(
self.url + str(utils.SERVER_GROUP) + '/'
+ str(server_id) + '/' + str(db_id) + '/' +
str(self.schema_id) + '/' + str(trigger_func_id),
content_type='html/json')
self.assertEquals(response.status_code, 200)
# Disconnect the database
database_utils.disconnect_database(self, server_id, db_id)
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
trigger_ids_dict = all_id["tfnid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(
self.tester, utils.SERVER_GROUP, server_id, db_id)
if db_con['data']["connected"]:
schema_id = schema_ids_dict[int(server_id)][0]
schema_response = schema_utils.verify_schemas(
self.tester, server_id, db_id, schema_id)
if schema_response.status_code == 200:
trigger_func_list = trigger_ids_dict[int(server_id)]
for trigger_func in trigger_func_list:
trigger_func_id = trigger_func[0]
trigger_response = \
trigger_funcs_utils.verify_trigger_function(
self.tester, server_id, db_id, schema_id,
trigger_func_id)
self.assertTrue(trigger_response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added trigger function, schemas, database,
server and the 'parent_id.pkl' file which is created in setup()
function.
:return: None
"""
trigger_funcs_utils.delete_trigger_function(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
pass

View File

@ -6,13 +6,12 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
import json
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from regression.test_setup import advanced_config_data
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
@ -22,99 +21,59 @@ from . import utils as trigger_funcs_utils
class TriggerFuncPutTestCase(BaseTestGenerator):
""" This class will update new trigger function under schema node. """
scenarios = [
# Fetching default URL for trigger function node.
('Fetch Trigger Function Node URL',
dict(url='/browser/trigger_function/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
4. Add the schemas
5. Add the trigger functions
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
# Add trigger functions
trigger_funcs_utils.add_trigger_function(
cls.tester, cls.server_connect_response, cls.server_ids)
def setUp(self):
self.db_name = parent_node_dict["database"][-1]["db_name"]
self.schema_name = parent_node_dict["schema"][-1]["schema_name"]
self.schema_id = parent_node_dict["schema"][-1]["schema_id"]
func_name = "test_event_put_%s" % str(uuid.uuid4())[1:6]
db_user = self.server["username"]
self.function_info = trigger_funcs_utils.create_trigger_function(
self.server, self.db_name, self.schema_name, func_name)
def runTest(self):
""" This function will update trigger function under database node. """
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
trigger_ids_dict = all_id["tfnid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(self.tester,
utils.SERVER_GROUP,
schema_info = parent_node_dict["schema"][-1]
server_id = schema_info["server_id"]
db_id = schema_info["db_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
schema_id = schema_ids_dict[int(server_id)]
schema_response = schema_utils.verify_schemas(self.tester,
server_id,
db_id,
schema_id)
if schema_response.status_code == 200:
trigger_func_list = trigger_ids_dict[int(server_id)]
for trigger_func in trigger_func_list:
trigger_func_id = trigger_func[0]
trigger_response = \
trigger_funcs_utils.verify_trigger_function(
self.tester, server_id, db_id, schema_id,
trigger_func_id)
if trigger_response.status_code == 200:
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add collation.")
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to add the collation.")
func_name = self.function_info[1]
func_response = trigger_funcs_utils.verify_trigger_function(
self.server,
self.db_name,
func_name)
if not func_response:
raise Exception("Could not find the trigger function to update"
" it's details.")
trigger_func_id = self.function_info[0]
data = {
"description": advanced_config_data[
'trigger_func_update_data']['comment'],
"description": "This is trigger function update comment",
"id": trigger_func_id
}
put_response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' +
str(db_id) + '/' +
str(schema_id) + '/' +
put_response = self.tester.put(self.url + str(utils.SERVER_GROUP) +
'/' + str(server_id) + '/' + str(db_id)
+ '/' + str(self.schema_id) + '/' +
str(trigger_func_id),
data=json.dumps(data),
follow_redirects=True)
self.assertEquals(put_response.status_code, 200)
# Disconnect the database
database_utils.disconnect_database(self, server_id, db_id)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added trigger function, schemas, database,
server and the 'parent_id.pkl' file which is created in setup()
function.
:return: None
"""
trigger_funcs_utils.delete_trigger_function(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
pass

View File

@ -6,174 +6,50 @@
# #This software is released under the PostgreSQL Licence
#
# ##########################################################################
from __future__ import print_function
import traceback
import sys
import json
import os
import pickle
import uuid
from regression.test_setup import pickle_path, advanced_config_data
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from regression import test_utils as utils
TRIGGER_FUNCTIONS_URL = '/browser/trigger_function/obj/'
TRIGGER_FUNCTIONS_DELETE_URL = '/browser/trigger_function/delete/'
def get_trigger_func_data(server_connect_data):
"""This function returns the trigger function config data"""
adv_config_data = None
data = None
db_user = server_connect_data['data']['user']['name']
# Get the config data of appropriate db user
for config_test_data in \
advanced_config_data['trigger_function_credentials']:
if db_user == config_test_data['fun_owner']:
adv_config_data = config_test_data
if adv_config_data is not None:
data = {
"acl": adv_config_data['acl'],
"arguments": adv_config_data['args'],
"funcowner": adv_config_data['fun_owner'],
"lanname": adv_config_data['language'],
"name": adv_config_data['name'],
"options": adv_config_data['options'],
"proleakproof": adv_config_data['leak_proof'],
"pronamespace": adv_config_data['namespace'],
"prorettypename": adv_config_data['type'],
"prosecdef": adv_config_data['sec_def'],
"prosrc": adv_config_data['code'],
"provolatile": adv_config_data['volitile'],
"seclabels": adv_config_data['sec_label'],
"variables": adv_config_data['Variable']
}
return data
def write_trigger_func_id(trigger_func_ids_list, server_id):
"""
This function writes the server and trigger function related data like
server id and trigger function name
:param trigger_func_ids_list: list of trigger functions ids
:type trigger_func_ids_list: list
:param server_id: server id
:type server_id: int
:return: None
"""
pickle_id_dict = utils.get_pickle_id_dict()
if os.path.isfile(pickle_path):
existing_server_id = open(pickle_path, 'rb')
tol_server_id = pickle.load(existing_server_id)
pickle_id_dict = tol_server_id
if 'tfnid' in pickle_id_dict:
if pickle_id_dict['tfnid']:
# Add the db_id as value in dict
pickle_id_dict["tfnid"][0].update(
{int(server_id): trigger_func_ids_list})
else:
# Create new dict with server_id and db_id
pickle_id_dict["tfnid"].append(
{int(server_id): trigger_func_ids_list})
db_output = open(pickle_path, 'wb')
pickle.dump(pickle_id_dict, db_output)
db_output.close()
def add_trigger_function(tester, server_connect_response, server_ids):
def create_trigger_function(server, db_name, schema_name, func_name):
"""This function add the trigger function to schema"""
all_id = utils.get_ids()
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
for server_connect_response, server_id in zip(server_connect_response,
server_ids):
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester, utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
schema_id = schema_ids_dict[int(server_id)][0]
schema_utils.verify_schemas(tester, server_id, db_id,
schema_id)
data = get_trigger_func_data(server_connect_response)
# Get the type from config data. We are adding two types
# i.e. event_trigger and trigger.
trigger_func_types = data['prorettypename'].split('/')
trigger_func_ids_list = []
for func_type in trigger_func_types:
data['prorettypename'] = func_type
data["name"] = "event_{}".format(str(uuid.uuid4())[1:8])
if schema_id:
data['pronamespace'] = schema_id
else:
schema_id = data['pronamespace']
response = tester.post(
TRIGGER_FUNCTIONS_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) + '/' + str(schema_id)
+ '/', data=json.dumps(data), content_type='html/json')
assert response.status_code == 200
response_data = json.loads(response.data.decode('utf-8'))
trigger_func_id = response_data['node']['_id']
event_trigger_name = str(response_data['node']['label'])
trigger_func_ids_list.append(
(trigger_func_id, event_trigger_name, func_type))
write_trigger_func_id(trigger_func_ids_list, server_id)
try:
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
query = "CREATE FUNCTION "+schema_name+"."+func_name+"()" \
" RETURNS event_trigger LANGUAGE 'plpgsql' STABLE LEAKPROOF" \
" SECURITY DEFINER SET enable_sort=true AS $BODY$ BEGIN" \
" NULL; END; $BODY$"
pg_cursor.execute(query)
connection.commit()
# Get 'oid' from newly created function
pg_cursor.execute("SELECT pro.oid, pro.proname FROM"
" pg_proc pro WHERE pro.proname='%s'" %
func_name)
functions = pg_cursor.fetchone()
connection.close()
return functions
except Exception:
traceback.print_exc(file=sys.stderr)
def verify_trigger_function(tester, server_id, db_id, schema_id,
trigger_func_id):
"""This function verifies the trigger function with GET API"""
get_response = tester.get(
TRIGGER_FUNCTIONS_URL + str(utils.SERVER_GROUP) + '/'
+ str(server_id) + '/' + str(db_id) + '/' +
str(schema_id) + '/' + str(trigger_func_id),
content_type='html/json')
assert get_response.status_code == 200
return get_response
def delete_trigger_function(tester):
"""This function add the trigger function to schema"""
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
trigger_ids_dict = all_id["tfnid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester, utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
schema_id = schema_ids_dict[int(server_id)][0]
schema_response = schema_utils.verify_schemas(
tester, server_id, db_id, schema_id)
if schema_response.status_code == 200:
trigger_func_list = trigger_ids_dict[int(server_id)]
for trigger_func in trigger_func_list:
trigger_func_id = trigger_func[0]
trigger_response = verify_trigger_function(
tester, server_id, db_id, schema_id, trigger_func_id)
if trigger_response.status_code == 200:
del_response = tester.delete(
TRIGGER_FUNCTIONS_DELETE_URL + str(
utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) + '/' +
str(schema_id) + '/' + str(trigger_func_id),
follow_redirects=True)
assert del_response.status_code == 200
del_response_data = json.loads(
del_response.data.decode('utf-8'))
assert del_response_data['success'] == 1
def verify_trigger_function(server, db_name, func_name):
"""This function verifies the trigger function in db"""
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
pg_cursor.execute("SELECT pro.oid, pro.proname FROM"
" pg_proc pro WHERE pro.proname='%s'" %
func_name)
functions = pg_cursor.fetchone()
connection.close()
return functions

View File

@ -6,67 +6,90 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import json
import uuid
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from . import utils as sequence_utils
class SequenceAddTestCase(BaseTestGenerator):
""" This class will add new sequence(s) under schema node. """
scenarios = [
# Fetching default URL for sequence node.
('Fetch sequence Node URL', dict(url='/browser/sequence/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server(s)
2. Connect to server(s)
3. Add the database(s)
4. Add the schema(s)
:return: None
"""
# First, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
def setUp(self):
pass
def runTest(self):
"""This function will add sequence(s) under schema node."""
db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add sequence.")
schema_id = schema_info["schema_id"]
schema_name = schema_info["schema_name"]
schema_response = schema_utils.verify_schemas(self.server,
db_name,
schema_name)
if not schema_response:
raise Exception("Could not find the schema to add sequence.")
db_user = self.server["username"]
data = {
"cache": "1",
"cycled": True,
"increment": "1",
"maximum": "100000",
"minimum": "1",
"name": "test_sequence_add_%s" % (str(uuid.uuid4())[1:6]),
"relacl": [
{
"grantee": db_user,
"grantor": db_user,
"privileges":
[
{
"privilege_type": "r",
"privilege": True,
"with_grant": True
},
{
"privilege_type": "w",
"privilege": True,
"with_grant": False
},
{
"privilege_type": "U",
"privilege": True,
"with_grant": False
}
]
}
],
"schema": schema_name,
"securities": [],
"seqowner": db_user,
"start": "100"
}
response = self.tester.post(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(self.db_id) +
'/' + str(schema_id) + '/',
data=json.dumps(data),
content_type='html/json')
self.assertEquals(response.status_code, 200)
sequence_utils.add_sequences(self.tester)
@classmethod
def tearDownClass(cls):
"""This function deletes the added sequence, schema, database, server
and parent id file
:return: None
"""
sequence_utils.delete_sequence(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,10 +6,11 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
@ -19,57 +20,47 @@ from . import utils as sequence_utils
class SequenceDeleteTestCase(BaseTestGenerator):
"""This class will delete added sequence under schema node."""
scenarios = [
# Fetching default URL for sequence node.
('Fetch sequence Node URL', dict(url='/browser/sequence/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server(s)
2. Connect to server(s)
3. Add database(s)
4. Add schema(s)
5. Add sequence(s)
:return: None
"""
# First, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database(s)
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schema(s)
schema_utils.add_schemas(cls.tester)
# Add sequence(s)
sequence_utils.add_sequences(cls.tester)
def setUp(self):
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add sequence.")
self.schema_id = schema_info["schema_id"]
self.schema_name = schema_info["schema_name"]
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to add sequence.")
self.sequence_name = "test_sequence_delete_%s" % str(uuid.uuid4())[1:6]
self.sequence_id = sequence_utils.create_sequences(
self.server, self.db_name, self.schema_name, self.sequence_name)
def runTest(self):
""" This function will delete added sequence under schema node. """
sequence_response = sequence_utils.verify_sequence(self.server,
self.db_name,
self.sequence_name)
if not sequence_response:
raise Exception("Could not find the sequence to delete.")
response = self.tester.delete(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' +
str(self.db_id) + '/' +
str(self.schema_id) + '/' +
str(self.sequence_id),
follow_redirects=True)
self.assertEquals(response.status_code, 200)
sequence_utils.delete_sequence(self.tester)
@classmethod
def tearDownClass(cls):
"""This function deletes the added sequence, schema, database, server
and parent id file
:return: None
"""
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,10 +6,11 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
@ -19,75 +20,42 @@ from . import utils as sequence_utils
class SequenceGetTestCase(BaseTestGenerator):
"""This class will fetch added sequence under schema node."""
scenarios = [
# Fetching default URL for sequence node.
('Fetch sequence Node URL', dict(url='/browser/sequence/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server(s)
2. Connect to server(s)
3. Add database(s)
4. Add schema(s)
5. Add sequence(s)
:return: None
"""
# First, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database(s)
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schema(s)
schema_utils.add_schemas(cls.tester)
# Add sequence(s)
sequence_utils.add_sequences(cls.tester)
def setUp(self):
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add sequence.")
self.schema_id = schema_info["schema_id"]
self.schema_name = schema_info["schema_name"]
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to add sequence.")
self.sequence_name = "test_sequence_delete_%s" % str(uuid.uuid4())[1:6]
self.sequence_id = sequence_utils.create_sequences(
self.server, self.db_name, self.schema_name, self.sequence_name)
def runTest(self):
"""This function will fetch added sequence under schema node."""
response = self.tester.get(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' +
str(self.db_id) + '/' +
str(self.schema_id) + '/' +
str(self.sequence_id),
follow_redirects=True)
self.assertEquals(response.status_code, 200)
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
sequence_ids_dict = all_id["seid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
schema_info = schema_ids_dict[int(server_id)]
schema_id = schema_info[0]
sequence_id = sequence_ids_dict[server_id]
get_response = sequence_utils.verify_sequence(self.tester,
utils.SERVER_GROUP,
server_id,
db_id,
schema_id,
sequence_id)
self.assertEquals(get_response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""This function delete the added sequence, schema, database, server
and parent id file
:return: None
"""
sequence_utils.delete_sequence(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,8 +6,10 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
@ -16,97 +18,56 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from . import utils as sequence_utils
import json
from regression.test_setup import advanced_config_data
class SequencePutTestCase(BaseTestGenerator):
"""This class will update added sequence under schema node."""
scenarios = [
# Fetching default URL for sequence node.
('Fetch sequence Node URL', dict(url='/browser/sequence/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server(s)
2. Connect to server(s)
3. Add database(s)
4. Add schema(s)
5. Add sequence(s)
:return: None
"""
# First, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database(s)
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schema(s)
schema_utils.add_schemas(cls.tester)
# Add sequence(s)
sequence_utils.add_sequences(cls.tester)
def setUp(self):
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add sequence.")
self.schema_id = schema_info["schema_id"]
self.schema_name = schema_info["schema_name"]
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to add sequence.")
self.sequence_name = "test_sequence_delete_%s" % str(uuid.uuid4())[1:6]
self.sequence_id = sequence_utils.create_sequences(
self.server, self.db_name, self.schema_name, self.sequence_name)
def runTest(self):
"""This function will update added sequence under schema node."""
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
sequence_ids_dict = all_id["seid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
schema_info = schema_ids_dict[int(server_id)]
schema_id = schema_info[0]
sequence_id = sequence_ids_dict[server_id]
get_response = sequence_utils.verify_sequence(self.tester,
utils.SERVER_GROUP,
server_id,
db_id,
schema_id,
sequence_id)
if get_response.status_code == 200:
data = \
{
"comment":
advanced_config_data['sequnce_update_data']
['comment'],
"id": sequence_id
sequence_response = sequence_utils.verify_sequence(self.server,
self.db_name,
self.sequence_name)
if not sequence_response:
raise Exception("Could not find the sequence to delete.")
data = {
"comment": "This is sequence update comment",
"id": self.sequence_id
}
put_response = self.tester.put(
response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' +
str(db_id) + '/' +
str(schema_id) + '/' +
str(sequence_id),
str(self.server_id) + '/' +
str(self.db_id) + '/' +
str(self.schema_id) + '/' +
str(self.sequence_id),
data=json.dumps(data),
follow_redirects=True)
self.assertEquals(put_response.status_code, 200)
self.assertEquals(response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""This function deletes the added sequence, schema, database, server
and parent id file
:return: None
"""
sequence_utils.delete_sequence(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,147 +6,74 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from __future__ import print_function
import traceback
import sys
import json
import os
import pickle
from regression.test_setup import pickle_path, advanced_config_data
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from regression import test_utils as utils
import uuid
SEQUENCE_URL = '/browser/sequence/obj/'
def get_sequence_config_data(schema_name, server_connect_data):
adv_config_data = None
db_user = server_connect_data['data']['user']['name']
# Get the config data of appropriate db user
for config_test_data in advanced_config_data['sequence_credentials']:
if db_user == config_test_data['owner']:
adv_config_data = config_test_data
data = \
{
"cache": adv_config_data['cache'],
"cycled": adv_config_data['cycled'],
"increment": adv_config_data['increment'],
"maximum": adv_config_data['max_value'],
"minimum": adv_config_data['min_value'],
"name": "sequence_{0}".format(str(uuid.uuid4())[1:4]),
"relacl": adv_config_data['acl'],
"schema": schema_name,
"securities": adv_config_data['security'],
"seqowner": adv_config_data['owner'],
"start": adv_config_data['start_val']
}
return data
def add_sequences(tester):
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_info_dict = all_id["scid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester, utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
server_connect_response = server_utils.verify_server(
tester, utils.SERVER_GROUP, server_id)
schema_name = schema_info_dict[int(server_id)][1]
data = get_sequence_config_data(schema_name,
server_connect_response)
schema_id = schema_info_dict[int(server_id)][0]
response = tester.post(
SEQUENCE_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(schema_id) + '/',
data=json.dumps(data),
content_type='html/json')
assert response.status_code == 200
response_data = json.loads(response.data.decode('utf-8'))
write_sequence_info(response_data, server_id)
def write_sequence_info(response_data, server_id):
def create_sequences(server, db_name, schema_name, sequence_name):
"""
This function writes the sequence id into parent_id.pkl
:param response_data: sequence add response data
:type response_data: dict
:param server_id: server id
:type server_id: str
:return: None
This function used to create sequence in schema provided.
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param schema_name: schema name
:type schema_name: str
:param sequence_name: sequence name
:type sequence_name: str
:return sequence_id: sequence id
:rtype: int
"""
sequence_id = response_data['node']['_id']
pickle_id_dict = utils.get_pickle_id_dict()
if os.path.isfile(pickle_path):
existing_server_id = open(pickle_path, 'rb')
tol_server_id = pickle.load(existing_server_id)
pickle_id_dict = tol_server_id
if 'fid' in pickle_id_dict:
if pickle_id_dict['seid']:
# Add the sequence_id as value in dict
pickle_id_dict["seid"][0].update({server_id: sequence_id})
else:
# Create new dict with server_id and sequence_id
pickle_id_dict["seid"].append({server_id: sequence_id})
sequence_output = open(pickle_path, 'wb')
pickle.dump(pickle_id_dict, sequence_output)
sequence_output.close()
try:
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
query = "CREATE SEQUENCE %s.%s START 101" % (schema_name,
sequence_name)
pg_cursor.execute(query)
connection.commit()
# Get 'oid' from newly created sequence
pg_cursor.execute("select oid from pg_class where relname='%s'" %
sequence_name)
sequence = pg_cursor.fetchone()
sequence_id = ''
if sequence:
sequence_id = sequence[0]
connection.close()
return sequence_id
except Exception:
traceback.print_exc(file=sys.stderr)
def verify_sequence(tester, server_group, server_id, db_id, schema_id,
sequence_id):
"""This function verifies the sequence using GET API"""
get_response = tester.get(SEQUENCE_URL + str(server_group) + '/' +
str(server_id) + '/' +
str(db_id) + '/' +
str(schema_id) + '/' +
str(sequence_id),
content_type='html/json')
return get_response
def delete_sequence(tester):
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
sequence_ids_dict = all_id["seid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
schema_info = schema_ids_dict[int(server_id)]
schema_id = schema_info[0]
sequence_id = sequence_ids_dict[server_id]
get_response = verify_sequence(tester,
utils.SERVER_GROUP,
server_id,
db_id,
schema_id,
sequence_id)
if get_response.status_code == 200:
delete_response = tester.delete(
SEQUENCE_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' +
str(db_id) + '/' +
str(schema_id) + '/' +
str(sequence_id),
follow_redirects=True)
assert delete_response.status_code == 200
del_resp_data = json.loads(delete_response.data.decode('utf-8'))
assert del_resp_data['success'] == 1
def verify_sequence(server, db_name, sequence_name):
"""
This function verifies the sequence in database
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param sequence_name: sequence name
:type sequence_name: str
:return sequence: sequence record from database
:rtype: tuple
"""
try:
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
pg_cursor.execute("select * from pg_class where relname='%s'" %
sequence_name)
sequence = pg_cursor.fetchone()
connection.close()
return sequence
except Exception:
traceback.print_exc(file=sys.stderr)

View File

@ -387,6 +387,7 @@ class SynonymView(PGChildNodeView):
SQL = render_template("/".join([self.template_path,
'create.sql']),
data=data, conn=self.conn, comment=False)
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)

View File

@ -6,69 +6,72 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
import json
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.schemas.sequences.tests \
import utils as sequence_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from . import utils as synonym_utils
class SynonymAddTestCase(BaseTestGenerator):
"""This class will add new synonym under schema node."""
scenarios = [
# Fetching default URL for synonym node.
('Default Node URL', dict(url='/browser/synonym/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
4. Add the schemas
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
def setUp(self):
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
server_con = server_utils.connect_server(self, self.server_id)
if server_con:
if "server_type" in server_con["data"]:
if server_con["data"]["server_type"] == "pg":
message = "Synonym not supported by PG."
self.skipTest(message)
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add synonym.")
self.schema_id = schema_info["schema_id"]
self.schema_name = schema_info["schema_name"]
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to add the synonym.")
self.sequence_name = "test_sequence_synonym_%s" % \
str(uuid.uuid4())[1:6]
self.sequence_id = sequence_utils.create_sequences(
self.server, self.db_name, self.schema_name, self.sequence_name)
def runTest(self):
"""This function will add synonym under schema node."""
db_user = self.server["username"]
data = {
"owner": db_user,
"schema": self.schema_name,
"synobjname": self.sequence_name,
"synobjschema": self.schema_name,
"targettype": "Sequence",
"name": "synonym_add_%s" % (str(uuid.uuid4())[1:6])
}
response = self.tester.post(
self.url + str(utils.SERVER_GROUP) + '/' + str(self.server_id)
+ '/' + str(self.db_id) + '/' + str(self.schema_id) + '/',
data=json.dumps(data), content_type='html/json')
self.assertEquals(response.status_code, 200)
synonym_utils.add_synonym(
self.tester, self.server_connect_response, self.server_ids)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added synonyms, schemas, database,
server and the 'parent_id.pkl' file which is created in setup()
function.
:return: None
"""
synonym_utils.delete_synonym(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,11 +6,14 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.schemas.sequences.tests \
import utils as sequence_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
@ -20,58 +23,58 @@ from . import utils as synonym_utils
class SynonymDeleteTestCase(BaseTestGenerator):
"""This class will delete added synonym under schema node."""
scenarios = [
# Fetching default URL for synonym node.
('Fetch synonym Node URL', dict(url='/browser/synonym/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
4. Add the schemas
5. Add the synonyms
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
# Add synonyms
synonym_utils.add_synonym(cls.tester, cls.server_connect_response,
cls.server_ids)
def setUp(self):
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
server_con = server_utils.connect_server(self, self.server_id)
if server_con:
if "server_type" in server_con["data"]:
if server_con["data"]["server_type"] == "pg":
message = "Synonym not supported by PG."
self.skipTest(message)
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add synonym.")
self.schema_id = schema_info["schema_id"]
self.schema_name = schema_info["schema_name"]
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to add the synonym.")
self.sequence_name = "test_sequence_synonym_%s" % \
str(uuid.uuid4())[1:6]
self.sequence_id = sequence_utils.create_sequences(
self.server, self.db_name, self.schema_name, self.sequence_name)
self.synonym_name = "test_synonym_delete_%s" % str(uuid.uuid4())[1:6]
self.synonym_id = synonym_utils.create_synonym(self.server,
self.db_name,
self.schema_name,
self.synonym_name,
self.sequence_name)
def runTest(self):
"""This function will delete synonym under schema node."""
synonym_response = synonym_utils.verify_synonym(self.server,
self.db_name,
self.synonym_name)
if not synonym_response:
raise Exception("No synonym node to delete.")
response = self.tester.delete(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(self.db_id) + '/' +
str(self.schema_id) + '/' + str(self.synonym_id),
follow_redirects=True)
self.assertEquals(response.status_code, 200)
synonym_utils.delete_synonym(self.tester)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added schemas, database,
server and the 'parent_id.pkl' file which is created in setup()
function.
:return: None
"""
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,14 +6,16 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import json
import uuid
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.sequences.tests\
import utils as sequence_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from . import utils as synonym_utils
@ -21,81 +23,53 @@ from . import utils as synonym_utils
class SynonymGetTestCase(BaseTestGenerator):
"""This class will fetch new synonym under schema node."""
scenarios = [
# Fetching default URL for synonym node.
('Fetch synonym Node URL', dict(url='/browser/synonym/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
4. Add the schemas
5. Add the synonyms
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
# Add synonyms
synonym_utils.add_synonym(cls.tester, cls.server_connect_response,
cls.server_ids)
def setUp(self):
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
server_con = server_utils.connect_server(self, self.server_id)
if server_con:
if "server_type" in server_con["data"]:
if server_con["data"]["server_type"] == "pg":
message = "Synonym not supported by PG."
self.skipTest(message)
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add synonym.")
self.schema_id = schema_info["schema_id"]
self.schema_name = schema_info["schema_name"]
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to add the synonym.")
self.sequence_name = "test_sequence_synonym_%s" %\
str(uuid.uuid4())[1:6]
self.sequence_id = sequence_utils.create_sequences(
self.server, self.db_name, self.schema_name, self.sequence_name)
self.synonym_name = "test_synonym_get_%s" % str(uuid.uuid4())[1:6]
self.synonym_id = synonym_utils.create_synonym(self.server,
self.db_name,
self.schema_name,
self.synonym_name,
self.sequence_name)
def runTest(self):
"""This function will fetch synonym under schema node."""
response = self.tester.get(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(self.db_id) + '/' +
str(self.schema_id) + '/' + str(self.synonym_id),
follow_redirects=True)
self.assertEquals(response.status_code, 200)
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
synonym_ids_dict = all_id["syid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(self.tester,
utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
schema_info = schema_ids_dict[int(server_id)]
schema_response = schema_utils.verify_schemas(
self.tester, server_id, db_id, schema_info[0])
schema_response = json.loads(
schema_response.data.decode('utf-8'))
if len(schema_response) != 0:
synonym_id = synonym_ids_dict[int(server_id)]
get_response = synonym_utils.verify_synonym(
self.tester, server_id, db_id, schema_info[0],
synonym_id)
self.assertEquals(get_response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added synonyms, schemas, database,
server and the 'parent_id.pkl' file which is created in setup()
function.
:return: None
"""
synonym_utils.delete_synonym(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,15 +6,19 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
import json
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from regression.test_setup import advanced_config_data
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.sequences.tests \
import utils as sequence_utils
from pgadmin.browser.server_groups.servers.databases.schemas.functions.tests \
import utils as functions_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from . import utils as synonym_utils
@ -22,104 +26,71 @@ from . import utils as synonym_utils
class SynonymPutTestCase(BaseTestGenerator):
"""This class will update added synonym under schema node."""
scenarios = [
# Fetching default URL for synonym node.
('Fetch synonym Node URL', dict(url='/browser/synonym/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
4. Add the schemas
5. Add the synonyms
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
# Add synonyms
synonym_utils.add_synonym(cls.tester, cls.server_connect_response,
cls.server_ids)
def setUp(self):
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
server_con = server_utils.connect_server(self, self.server_id)
if server_con:
if "server_type" in server_con["data"]:
if server_con["data"]["server_type"] == "pg":
message = "Synonym not supported by PG."
self.skipTest(message)
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add synonym.")
self.schema_id = schema_info["schema_id"]
self.schema_name = schema_info["schema_name"]
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to add the synonym.")
self.sequence_name = "test_sequence_synonym_%s" % \
str(uuid.uuid4())[1:6]
self.sequence_id = sequence_utils.create_sequences(
self.server, self.db_name, self.schema_name, self.sequence_name)
self.synonym_name = "test_synonym_put_%s" % str(uuid.uuid4())[1:6]
self.synonym_id = synonym_utils.create_synonym(self.server,
self.db_name,
self.schema_name,
self.synonym_name,
self.sequence_name)
def runTest(self):
"""This function will update synonym under schema node."""
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
synonym_ids_dict = all_id["syid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(self.tester,
utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
schema_info = schema_ids_dict[int(server_id)]
schema_response = schema_utils.verify_schemas(self.tester,
server_id,
db_id,
schema_info[0])
schema_response = json.loads(
schema_response.data.decode('utf-8'))
if len(schema_response) != 0:
synonym_id = synonym_ids_dict[int(server_id)]
get_response = synonym_utils.verify_synonym(
self.tester, server_id, db_id, schema_info[0],
synonym_id)
get_response_data = json.loads(
get_response.data.decode('utf-8'))
if len(get_response_data) == 0:
synonym_response = synonym_utils.verify_synonym(self.server,
self.db_name,
self.synonym_name)
if not synonym_response:
raise Exception("No synonym node to update.")
func_name = "test_function_synonym_%s" % str(uuid.uuid4())[1:6]
self.table_id = functions_utils.create_trigger_function(
self.server, self.db_name, self.schema_name, func_name)
data = {
"description":
advanced_config_data['synonym_update_data']
['comment'],
"id": synonym_id,
"name": self.synonym_name,
"synobjname": func_name,
"synobjschema": self.schema_name,
"targettype": "Function"
}
put_response = self.tester.put(
response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' +
str(db_id) + '/' +
str(schema_info[0]) + '/' +
str(synonym_id),
str(self.server_id) + '/' +
str(self.db_id) + '/' +
str(self.schema_id) + '/' +
str(self.synonym_name),
data=json.dumps(data),
follow_redirects=True)
self.assertEquals(response.status_code, 200)
self.assertEquals(put_response.status_code, 200)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added synonyms, schemas, database,
server and the 'parent_id.pkl' file which is created in setup()
function.
:return: None
"""
synonym_utils.delete_synonym(cls.tester)
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -6,149 +6,73 @@
# #This software is released under the PostgreSQL Licence
#
# ##########################################################################
from __future__ import print_function
import traceback
import sys
import json
import os
import pickle
from regression.test_setup import pickle_path, advanced_config_data
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as database_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from regression import test_utils as utils
SYNONYM_URL = '/browser/synonym/obj/'
def get_synonym_config_data(server_connect_data):
"""This function returns the synonym config data"""
adv_config_data = None
data = None
db_user = server_connect_data['data']['user']['name']
# Get the config data of appropriate db user
for config_test_data in \
advanced_config_data['synonym_credentials']:
if db_user == config_test_data['owner']:
adv_config_data = config_test_data
if adv_config_data is not None:
data = {
"name": adv_config_data['name'],
"schema": adv_config_data['schema'],
"synobjname": adv_config_data['synobjname'],
"synobjschema": adv_config_data['synobjschema'],
"targettype": adv_config_data['targettype']
}
return data
def write_synonym_id(response_data, server_id):
def create_synonym(server, db_name, schema_name, synonym_name, sequence_name):
"""
This function writes the server and synonym id
:param response_data: synonym response data
:type response_data: dict
:param server_id: server id
:type server_id: int
:return: None
This function create the synonym on given schema node.
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param schema_name: schema name
:type schema_name: str
:param synonym_name: synonym name
:type synonym_name: str
:param sequence_name: sequence name
:type sequence_name: str
:return synonym_id: synonym_id
:rtype: int
"""
synonym_id = response_data['node']['_id']
pickle_id_dict = utils.get_pickle_id_dict()
if os.path.isfile(pickle_path):
existing_server_id = open(pickle_path, 'rb')
tol_server_id = pickle.load(existing_server_id)
pickle_id_dict = tol_server_id
if 'syid' in pickle_id_dict:
if pickle_id_dict['syid']:
# Add the db_id as value in dict
pickle_id_dict["syid"][0].update(
{int(server_id): synonym_id})
else:
# Create new dict with server_id and db_id
pickle_id_dict["syid"].append(
{int(server_id): synonym_id})
db_output = open(pickle_path, 'wb')
pickle.dump(pickle_id_dict, db_output)
db_output.close()
try:
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
query = "CREATE OR REPLACE SYNONYM %s.%s FOR %s.%s" % (
schema_name, synonym_name, schema_name, sequence_name)
pg_cursor.execute(query)
connection.commit()
# Get 'oid' from newly created synonym
pg_cursor.execute("SELECT oid FROM pg_synonym WHERE synname='%s'" %
synonym_name)
synonym_id = pg_cursor.fetchone()
connection.close()
return synonym_id
except Exception:
traceback.print_exc(file=sys.stderr)
def add_synonym(tester, server_connect_response, server_ids):
"""This function add the synonym to schemas"""
all_id = utils.get_ids()
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
for server_connect_response, server_id in zip(server_connect_response,
server_ids):
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester, utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
schema_info = schema_ids_dict[int(server_id)]
schema_utils.verify_schemas(tester, server_id, db_id,
schema_info[0])
data = get_synonym_config_data(server_connect_response)
data['schema'] = schema_info[1]
response = tester.post(
SYNONYM_URL + str(utils.SERVER_GROUP) + '/' + str(server_id)
+ '/' + str(db_id) + '/' + str(schema_info[0]) + '/',
data=json.dumps(data), content_type='html/json')
response_data = json.loads(response.data.decode('utf-8'))
write_synonym_id(response_data, server_id)
def verify_synonym(tester, server_id, db_id, schema_id, synonym_id):
"""This function verifies the synonym using GET API"""
get_response = tester.get(
SYNONYM_URL + str(utils.SERVER_GROUP) + '/' + str(server_id) + '/' +
str(db_id) + '/' + str(schema_id) + '/' + str(synonym_id),
content_type='html/json')
return get_response
def delete_synonym(tester):
"""This function deletes the synonyms from schema"""
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
synonym_ids_dict = all_id["syid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester, utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
schema_info = schema_ids_dict[int(server_id)]
schema_response = schema_utils.verify_schemas(tester, server_id,
db_id,
schema_info[0])
schema_response = json.loads(schema_response.data.decode('utf-8'))
if len(schema_response) != 0:
synonym_id = synonym_ids_dict[int(server_id)]
get_response = verify_synonym(
tester, server_id, db_id, schema_info[0], synonym_id)
get_response_data = json.loads(
get_response.data.decode('utf-8'))
if len(get_response_data) == 0:
raise Exception("No synonym node to delete.")
del_response = tester.delete(
SYNONYM_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) + '/' +
str(schema_info[0]) + '/' + str(synonym_id),
follow_redirects=True)
assert del_response.status_code == 200
del_response_data = json.loads(
del_response.data.decode('utf-8'))
assert del_response_data['success'] == 1
def verify_synonym(server, db_name, synonym_name):
"""
This function create the synonym on given schema node.
:param server: server details
:type server: dict
:param db_name: database name
:type db_name: str
:param synonym_name: synonym name
:type synonym_name: str
:return synonym: synonym record from database
:rtype: tuple
"""
try:
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
pg_cursor.execute("SELECT * FROM pg_synonym WHERE synname='%s'" %
synonym_name)
synonym = pg_cursor.fetchone()
connection.close()
return synonym
except Exception:
traceback.print_exc(file=sys.stderr)

View File

@ -6,60 +6,66 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import json
import uuid
from regression import test_utils as utils
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from regression import parent_node_dict
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from . import utils as schema_utils
class SchemaAddTestCase(BaseTestGenerator):
""" This class will add new schema under database node. """
scenarios = [
# Fetching default URL for schema node.
('Check Schema Node URL', dict(url='/browser/schema/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
def runTest(self):
""" This function will add schema under database node. """
database_info = parent_node_dict["database"][-1]
server_id = database_info["server_id"]
schema_utils.add_schemas(self.tester)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added schemas, database, server
and the 'parent_id.pkl' file which is created in setup() function.
:return: None
"""
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
db_id = database_info["db_id"]
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
server_id,
db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database to add the schema.")
db_user = self.server["username"]
data = {
"deffuncacl": [],
"defseqacl": [],
"deftblacl": [],
"deftypeacl": [],
"name": "test_schema_{0}".format(str(uuid.uuid4())[1:6]),
"namespaceowner": db_user,
"nspacl": [
{
"grantee": db_user,
"grantor": db_user,
"privileges":
[
{
"privilege_type": "C",
"privilege": True,
"with_grant": False
},
{
"privilege_type": "U",
"privilege": True,
"with_grant": False
}
]
}
],
"seclabels": []
}
response = self.tester.post(self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) +
'/', data=json.dumps(data),
content_type='html/json')
self.assertEquals(response.status_code, 200)

View File

@ -6,10 +6,11 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import uuid
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from . import utils as schema_utils
@ -23,44 +24,42 @@ class SchemaDeleteTestCase(BaseTestGenerator):
('Check Schema Node URL', dict(url='/browser/schema/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
def setUp(self):
self.database_info = parent_node_dict["database"][-1]
self.db_name = self.database_info["db_name"]
# Change the db name, so that schema will create in newly created db
self.schema_name = "schema_get_%s" % str(uuid.uuid4())[1:6]
connection = utils.get_db_connection(self.db_name,
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'])
self.schema_details = schema_utils.create_schema(connection,
self.schema_name)
def runTest(self):
""" This function will delete schema under database node. """
server_id = self.database_info["server_id"]
db_id = self.database_info["db_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
server_id, db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to delete the"
" schema.")
schema_utils.delete_schema(self.tester)
schema_id = self.schema_details[0]
schema_name = self.schema_details[1]
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
schema_name)
if not schema_response:
raise Exception("Could not find the schema to delete.")
@classmethod
def tearDownClass(cls):
"""
This function deletes the added schemas, database, server
and the 'parent_id.pkl' file which is created in setup() function.
response = self.tester.delete(self.url + str(utils.SERVER_GROUP)
+ '/' + str(server_id) + '/' +
str(db_id) + '/' + str(schema_id),
follow_redirects=True)
self.assertEquals(response.status_code, 200)
:return: None
"""
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
pass

View File

@ -6,77 +6,45 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from . import utils as schema_utils
class SchemaGetTestCase(BaseTestGenerator):
""" This class will add new schema under database node. """
scenarios = [
# Fetching default URL for extension node.
('Check Schema Node URL', dict(url='/browser/schema/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
def runTest(self):
""" This function will delete schema under database node. """
schema = parent_node_dict["schema"][-1]
db_id = schema["db_id"]
server_id = schema["server_id"]
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
server_response = server_utils.connect_server(self, server_id)
if not server_response["data"]["connected"]:
raise Exception("Could not connect to server to connect the"
" database.")
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(self.tester,
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
schema_id = schema_ids_dict[int(server_id)][0]
schema_response = schema_utils.verify_schemas(self.tester,
server_id, db_id,
schema_id)
self.assertTrue(schema_response.status_code, 200)
server_id,
db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to database to get the schema.")
@classmethod
def tearDownClass(cls):
"""
This function deletes the added schemas, database, server
and the 'parent_id.pkl' file which is created in setup() function.
:return: None
"""
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
schema_id = schema["schema_id"]
schema_response = self.tester.get(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(schema_id),
content_type='html/json')
self.assertEquals(schema_response.status_code, 200)
# Disconnect the database
database_utils.disconnect_database(self, server_id, db_id)

View File

@ -6,86 +6,123 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import json
import uuid
from regression import test_utils as utils
from regression import parent_node_dict
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from regression.test_setup import advanced_config_data
from . import utils as schema_utils
class SchemaPutTestCase(BaseTestGenerator):
""" This class will update the schema under database node. """
scenarios = [
# Fetching default URL for extension node.
('Check Schema Node URL', dict(url='/browser/schema/obj/'))
]
@classmethod
def setUpClass(cls):
"""
This function perform the three tasks
1. Add the test server
2. Connect to server
3. Add the databases
:return: None
"""
# Firstly, add the server
server_utils.add_server(cls.tester)
# Connect to server
cls.server_connect_response, cls.server_group, cls.server_ids = \
server_utils.connect_server(cls.tester)
if len(cls.server_connect_response) == 0:
raise Exception("No Server(s) connected to add the database!!!")
# Add database
database_utils.add_database(cls.tester, cls.server_connect_response,
cls.server_ids)
# Add schemas
schema_utils.add_schemas(cls.tester)
def setUp(self):
self.database_info = parent_node_dict["database"][-1]
self.db_name = self.database_info["db_name"]
# Change the db name, so that schema will create in newly created db
self.schema_name = "schema_get_%s" % str(uuid.uuid4())[1:6]
connection = utils.get_db_connection(self.db_name,
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'])
self.schema_details = schema_utils.create_schema(connection,
self.schema_name)
def runTest(self):
""" This function will delete schema under database node. """
all_id = utils.get_ids()
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
for server_connect_data, server_id in zip(self.server_connect_response,
self.server_ids):
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(self.tester,
utils.SERVER_GROUP,
server_id = self.database_info["server_id"]
db_id = self.database_info["db_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
schema_id = schema_ids_dict[int(server_id)][0]
schema_response = schema_utils.verify_schemas(self.tester,
server_id, db_id,
schema_id)
schema_response = json.loads(
schema_response.data.decode('utf-8'))
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to delete the"
" schema.")
schema_id = self.schema_details[0]
schema_name = self.schema_details[1]
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
schema_name)
if not schema_response:
raise Exception("No schema(s) to update.")
raise Exception("Could not find the schema to update.")
adv_config_data = None
data = None
db_user = server_connect_data['data']['user']['name']
# Get the config data of appropriate db user
for config_test_data in advanced_config_data['schema_update_data']:
if db_user == config_test_data['owner']:
adv_config_data = config_test_data
if adv_config_data is not None:
db_user = self.server["username"]
data = {
"deffuncacl": adv_config_data["func_acl"],
"defseqacl": adv_config_data["seq_acl"],
"deftblacl": adv_config_data["tbl_acl"],
"deffuncacl": {
"added":
[
{
"grantee": db_user,
"grantor": db_user,
"privileges":
[
{
"privilege_type": "X",
"privilege": True,
"with_grant": True
}
]
}
]
},
"defseqacl": {
"added":
[
{
"grantee": db_user,
"grantor": db_user,
"privileges":
[
{
"privilege_type": "r",
"privilege": True,
"with_grant": False
},
{
"privilege_type": "w",
"privilege": True,
"with_grant": False
},
{
"privilege_type": "U",
"privilege": True,
"with_grant": False
}
]
}
]
},
"deftblacl": {
"added":
[
{
"grantee": "public",
"grantor": db_user,
"privileges":
[
{
"privilege_type": "D",
"privilege": True,
"with_grant": False
},
{
"privilege_type": "x",
"privilege": True,
"with_grant": False
}
]
}
]
},
"id": schema_id
}
put_response = self.tester.put(
@ -94,19 +131,8 @@ class SchemaPutTestCase(BaseTestGenerator):
data=json.dumps(data), follow_redirects=True)
self.assertEquals(put_response.status_code, 200)
response_data = json.loads(put_response.data.decode('utf-8'))
self.assertTrue(response_data['success'], 1)
# Disconnect the database
database_utils.disconnect_database(self, server_id, db_id)
@classmethod
def tearDownClass(cls):
"""
This function deletes the added schemas, database, server
and the 'parent_id.pkl' file which is created in setup() function.
:return: None
"""
schema_utils.delete_schema(cls.tester)
database_utils.delete_database(cls.tester)
server_utils.delete_server(cls.tester)
utils.delete_parent_id_file()
def tearDown(self):
pass

View File

@ -6,144 +6,79 @@
# This software is released under the PostgreSQL Licence
#
# ##################################################################
import json
import os
import pickle
from __future__ import print_function
import sys
import uuid
import traceback
from regression.test_setup import pickle_path, advanced_config_data
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from regression import test_utils as utils
SCHEMA_URL = '/browser/schema/obj/'
SCHEMA_DELETE_URL = '/browser/schema/delete/'
def get_schema_config_data(server_connect_response):
def get_schema_config_data(db_user):
"""This function is used to get advance config test data for schema"""
adv_config_data = None
data = None
db_user = server_connect_response['data']['user']['name']
# Get the config data of appropriate db user
for config_test_data in advanced_config_data['schema_credentials']:
if db_user == config_test_data['owner']:
adv_config_data = config_test_data
if adv_config_data is not None:
data = {
"deffuncacl": adv_config_data['func_acl'],
"defseqacl": adv_config_data['seq_acl'],
"deftblacl": adv_config_data['tbl_acl'],
"deftypeacl": adv_config_data['type_acl'],
"name": "schema_{0}".format(str(uuid.uuid4())[1:8]),
"namespaceowner": adv_config_data['owner'],
"nspacl": adv_config_data['privilege'],
"seclabels": adv_config_data['sec_label']
"deffuncacl": [],
"defseqacl": [],
"deftblacl": [],
"deftypeacl": [],
"name": "test_schema_{0}".format(str(uuid.uuid4())[1:8]),
"namespaceowner": db_user,
"nspacl": [
{
"grantee": db_user,
"grantor": db_user,
"privileges":
[
{
"privilege_type": "C",
"privilege": True,
"with_grant": False
},
{
"privilege_type": "U",
"privilege": True,
"with_grant": False
}
]
}
],
"seclabels": []
}
return data
def write_schema_id(response_data, server_id):
"""
This function writes the schema id into parent_id.pkl
:param response_data: schema add response data
:type response_data: dict
:param server_id: server id
:type server_id: str
:return: None
"""
schema_id = response_data['node']['_id']
schema_name = str(response_data['node']['label'])
pickle_id_dict = utils.get_pickle_id_dict()
if os.path.isfile(pickle_path):
existing_server_id = open(pickle_path, 'rb')
tol_server_id = pickle.load(existing_server_id)
pickle_id_dict = tol_server_id
if 'scid' in pickle_id_dict:
if pickle_id_dict['scid']:
# Add the schema_id as value in dict
pickle_id_dict["scid"][0].update(
{int(server_id): [schema_id, schema_name]})
else:
# Create new dict with server_id and schema_id
pickle_id_dict["scid"].append(
{int(server_id): [schema_id, schema_name]})
schema_output = open(pickle_path, 'wb')
pickle.dump(pickle_id_dict, schema_output)
schema_output.close()
def add_schemas(tester):
def create_schema(connection, schema_name):
"""This function add the schemas into databases"""
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
for server_id in server_ids:
server_connect_response = server_utils.verify_server(
tester, str(utils.SERVER_GROUP), server_id)
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester, utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
data = get_schema_config_data(
server_connect_response)
response = tester.post(
SCHEMA_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) +
'/', data=json.dumps(data),
content_type='html/json')
assert response.status_code == 200
response_data = json.loads(response.data.decode('utf-8'))
write_schema_id(response_data, server_id)
try:
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor = connection.cursor()
pg_cursor.execute("CREATE SCHEMA %s" % schema_name)
connection.set_isolation_level(old_isolation_level)
connection.commit()
# Get schema details of newly created schema
pg_cursor.execute("SELECT sch.oid, sch.nspname FROM pg_namespace sch"
" WHERE sch.nspname='%s'" % schema_name)
schema = pg_cursor.fetchone()
connection.close()
return schema
except Exception:
traceback.print_exc(file=sys.stderr)
def verify_schemas(tester, server_id, db_id, schema_id):
def verify_schemas(server, db_name, schema_name):
"""This function verifies the schema is exists"""
response = tester.get(SCHEMA_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id) +
'/' + str(schema_id),
content_type='html/json')
return response
def delete_schema(tester):
"""This function delete schemas from the databases"""
all_id = utils.get_ids()
server_ids = all_id["sid"]
db_ids_dict = all_id["did"][0]
schema_ids_dict = all_id["scid"][0]
for server_id in server_ids:
db_id = db_ids_dict[int(server_id)]
db_con = database_utils.verify_database(tester, utils.SERVER_GROUP,
server_id, db_id)
if db_con['data']["connected"]:
schema_id = schema_ids_dict[int(server_id)][0]
schema_response = verify_schemas(tester, server_id, db_id,
schema_id)
schema_response = json.loads(schema_response.data.decode('utf-8'))
if not schema_response:
raise Exception("No schema(s) to delete.")
del_response = tester.delete(
SCHEMA_DELETE_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' +
str(db_id) + '/' +
str(schema_id),
follow_redirects=True)
assert del_response.status_code == 200
response_data = json.loads(del_response.data.decode('utf-8'))
assert response_data['success'] == 1
try:
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'])
pg_cursor = connection.cursor()
pg_cursor.execute("SELECT * FROM pg_namespace sch"
" WHERE sch.nspname='%s'" % schema_name)
schema = pg_cursor.fetchone()
connection.close()
return schema
except Exception:
traceback.print_exc(file=sys.stderr)

View File

@ -11,14 +11,13 @@ import json
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression import test_server_dict
from regression import parent_node_dict
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from . import utils as database_utils
class DatabaseAddTestCase(BaseTestGenerator):
"""This class will test the ADD database API"""
scenarios = [
# Fetching default URL for database node.
('Check Databases Node URL', dict(url='/browser/database/obj/'))
@ -30,7 +29,7 @@ class DatabaseAddTestCase(BaseTestGenerator):
def runTest(self):
""" This function will add database under 1st server of tree node. """
self.db_name = ''
self.server_id = test_server_dict["server"][0]["server_id"]
self.server_id = parent_node_dict["server"][-1]["server_id"]
server_response = server_utils.connect_server(self, self.server_id)
if server_response["info"] == "Server connected.":
db_owner = server_response['data']['user']['name']
@ -43,8 +42,9 @@ class DatabaseAddTestCase(BaseTestGenerator):
self.assertEquals(response.status_code, 200)
response_data = json.loads(response.data.decode('utf-8'))
db_id = response_data['node']['_id']
db_dict = {"db_id": db_id, "db_name": self.db_name}
utils.write_node_info(int(self.server_id), "did", db_dict)
db_dict = {"server_id": self.server_id, "db_id": db_id,
"db_name": self.db_name}
utils.write_node_info("did", db_dict)
else:
raise Exception("Error while connecting server to add the"
" database.")

View File

@ -10,7 +10,7 @@ import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression import test_server_dict
from regression import parent_node_dict
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
@ -24,16 +24,19 @@ class DatabaseDeleteTestCase(BaseTestGenerator):
def setUp(self):
self.db_name = "db_delete_%s" % str(uuid.uuid4())[1:4],
self.db_id = utils.create_database(self.server, self.db_name)
self.server_id = parent_node_dict["server"][-1]["server_id"]
db_dict = {"server_id": self.server_id, "db_id": self.db_id,
"db_name": self.db_name}
utils.write_node_info("did", db_dict)
def runTest(self):
""" This function will delete the database."""
server_id = test_server_dict["server"][0]["server_id"]
server_response = server_utils.connect_server(self, server_id)
server_response = server_utils.connect_server(self, self.server_id)
if server_response["data"]["connected"]:
db_id = self.db_id
response = self.tester.delete(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id),
str(self.server_id) + '/' + str(db_id),
follow_redirects=True)
self.assertEquals(response.status_code, 200)
else:

View File

@ -9,7 +9,7 @@
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression import test_server_dict
from regression import parent_node_dict
from . import utils as database_utils
@ -24,10 +24,10 @@ class DatabasesGetTestCase(BaseTestGenerator):
def runTest(self):
""" This function will fetch added database. """
server_data = test_server_dict["database"][0]
server_data = parent_node_dict["database"][-1]
self.server_id = server_data["server_id"]
self.db_id = server_data['db_id']
db_con = database_utils.verify_database(self,
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)

View File

@ -12,7 +12,7 @@ import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression import test_server_dict
from regression import parent_node_dict
from . import utils as database_utils
@ -26,32 +26,36 @@ class DatabasesUpdateTestCase(BaseTestGenerator):
def setUp(self):
self.db_name = "test_db_put_%s" % str(uuid.uuid4())[1:8],
self.db_id = utils.create_database(self.server, self.db_name)
self.server_id = parent_node_dict["server"][-1]["server_id"]
db_dict = {"server_id": self.server_id, "db_id": self.db_id,
"db_name": self.db_name}
utils.write_node_info("did", db_dict)
def runTest(self):
""" This function will update the comments field of database."""
server_id = test_server_dict["server"][0]["server_id"]
db_id = self.db_id
db_con = database_utils.verify_database(self,
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
server_id,
db_id)
self.server_id,
self.db_id)
if db_con["info"] == "Database connected.":
try:
data = {
"comments": "This is db update comment",
"id": db_id
"id": self.db_id
}
response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' + str(
server_id) + '/' +
str(db_id), data=json.dumps(data), follow_redirects=True)
self.server_id) + '/' +
str(self.db_id), data=json.dumps(data),
follow_redirects=True)
self.assertEquals(response.status_code, 200)
except Exception as exception:
raise Exception("Error while updating database details. %s" %
exception)
finally:
# Disconnect database to delete it
database_utils.disconnect_database(self, server_id, db_id)
database_utils.disconnect_database(self, self.server_id,
self.db_id)
else:
raise Exception("Error while updating database details.")

View File

@ -6,15 +6,12 @@
# #This software is released under the PostgreSQL Licence
#
# ##########################################################################
import json
import uuid
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from regression import test_utils as utils
DATABASE_URL = '/browser/database/obj/'
DATABASE_CONNECT_URL = '/browser/database/connect/'
@ -93,7 +90,7 @@ def get_db_data(db_owner):
}
],
"encoding": "UTF8",
"name": "db_add_%s" % str(uuid.uuid4())[1:4],
"name": "db_add_%s" % str(uuid.uuid4())[1:6],
"privileges": [],
"securities": [],
"variables": []
@ -107,7 +104,7 @@ def create_database(connection, db_name):
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor = connection.cursor()
pg_cursor.execute("CREATE DATABASE %s" % db_name)
pg_cursor.execute('''CREATE DATABASE "%s"''' % db_name)
connection.set_isolation_level(old_isolation_level)
connection.commit()
return pg_cursor
@ -115,7 +112,7 @@ def create_database(connection, db_name):
raise Exception("Error while creating database. %s" % exception)
def verify_database(self, server_group, server_id, db_id):
def connect_database(self, server_group, server_id, db_id):
"""
This function verifies that database is exists and whether it connect
successfully or not
@ -150,38 +147,3 @@ def disconnect_database(self, server_id, db_id):
'browser/database/connect/', utils.SERVER_GROUP, server_id, db_id),
follow_redirects=True)
self.assertEquals(db_con.status_code, 200)
def delete_database(tester):
"""
This function used to delete the added databases
:param tester: test client object
:return: None
"""
server_ids = None
db_ids_dict = None
all_id = utils.get_ids()
if "sid" and "did" in all_id.keys():
server_ids = all_id["sid"]
if all_id['did']:
db_ids_dict = all_id['did'][0]
else:
raise Exception("Keys are not found in pickle dict: {}".format(["sid", "did"]))
if server_ids and db_ids_dict is not None:
for server_id in server_ids:
server_response = server_utils.verify_server(tester, utils.SERVER_GROUP, server_id)
if server_response["data"]["connected"]:
db_id = db_ids_dict[int(server_id)]
response = tester.delete(DATABASE_URL + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(db_id),
follow_redirects=True)
assert response.status_code == 200
response_data = json.loads(response.data.decode('utf-8'))
assert response_data['success'] == 1
else:
raise Exception("No servers/databases found.")

View File

@ -10,7 +10,7 @@ import json
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression import test_server_dict
from regression import parent_node_dict
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from . import utils as roles_utils
@ -28,7 +28,7 @@ class LoginRoleAddTestCase(BaseTestGenerator):
def runTest(self):
"""This function test the add role scenario"""
server_id = test_server_dict["server"][0]["server_id"]
server_id = parent_node_dict["server"][-1]["server_id"]
server_response = server_utils.connect_server(self, server_id)
if not server_response['data']['connected']:
raise Exception("Server not found to add the role.")
@ -42,8 +42,9 @@ class LoginRoleAddTestCase(BaseTestGenerator):
self.assertEquals(response.status_code, 200)
response_data = json.loads(response.data.decode('utf-8'))
role_id = response_data['node']['_id']
role_dict = {"server_id": server_id, "role_id": role_id}
utils.write_node_info(role_id, "lrid", role_dict)
role_dict = {"server_id": server_id, "role_id": role_id,
"role_name": self.role_name}
utils.write_node_info("lrid", role_dict)
def tearDown(self):
"""This function delete the role from added server"""

View File

@ -10,7 +10,7 @@ import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression import test_server_dict
from regression import parent_node_dict
from . import utils as roles_utils
@ -24,13 +24,16 @@ class LoginRoleDeleteTestCase(BaseTestGenerator):
def setUp(self):
self.role_name = "role_delete_%s" % str(uuid.uuid4())[1:6]
self.role_id = roles_utils.create_role(self.server, self.role_name)
self.server_id = parent_node_dict["server"][-1]["server_id"]
role_dict = {"server_id": self.server_id, "role_id": self.role_id,
"role_name": self.role_name}
utils.write_node_info("lrid", role_dict)
def runTest(self):
"""This function test the delete role scenario"""
server_id = test_server_dict["server"][0]["server_id"]
response = self.tester.delete(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(self.role_id),
str(self.server_id) + '/' + str(self.role_id),
follow_redirects=True)
self.assertEquals(response.status_code, 200)

View File

@ -10,7 +10,7 @@ import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression import test_server_dict
from regression import parent_node_dict
from . import utils as roles_utils
@ -22,15 +22,18 @@ class LoginRoleGetTestCase(BaseTestGenerator):
]
def setUp(self):
self.server_id = parent_node_dict["server"][-1]["server_id"]
self.role_name = "role_get_%s" % str(uuid.uuid4())[1:6]
self.role_id = roles_utils.create_role(self.server, self.role_name)
role_dict = {"server_id": self.server_id, "role_id": self.role_id,
"role_name": self.role_name}
utils.write_node_info("lrid", role_dict)
def runTest(self):
"""This function test the get role scenario"""
server_id = test_server_dict["server"][0]["server_id"]
response = self.tester.get(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(self.role_id),
str(self.server_id) + '/' + str(self.role_id),
follow_redirects=True)
self.assertEquals(response.status_code, 200)

View File

@ -12,7 +12,7 @@ import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression import test_server_dict
from regression import parent_node_dict
from . import utils as roles_utils
@ -24,12 +24,15 @@ class LoginRolePutTestCase(BaseTestGenerator):
]
def setUp(self):
self.server_id = parent_node_dict["server"][-1]["server_id"]
self.role_name = "role_put_%s" % str(uuid.uuid4())[1:6]
self.role_id = roles_utils.create_role(self.server, self.role_name)
role_dict = {"server_id": self.server_id, "role_id": self.role_id,
"role_name": self.role_name}
utils.write_node_info("lrid", role_dict)
def runTest(self):
"""This function tests the update role data scenario"""
server_id = test_server_dict["server"][0]["server_id"]
role_response = roles_utils.verify_role(self.server, self.role_name)
if len(role_response) == 0:
raise Exception("No roles(s) to update!!!")
@ -39,7 +42,7 @@ class LoginRolePutTestCase(BaseTestGenerator):
}
put_response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(self.role_id),
str(self.server_id) + '/' + str(self.role_id),
data=json.dumps(data),
follow_redirects=True)
self.assertEquals(put_response.status_code, 200)

View File

@ -7,14 +7,11 @@
#
# ##################################################################
from __future__ import print_function
import os
import sys
import json
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression import test_server_dict
from regression import parent_node_dict
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from . import utils as tablespace_utils
@ -30,14 +27,14 @@ class TableSpaceAddTestCase(BaseTestGenerator):
self.tablespace_name = ''
if not self.server['tablespace_path']\
or self.server['tablespace_path'] is None:
message = "Skipped tablespace add test case. Tablespace path" \
message = "Tablespace add test case. Tablespace path" \
" not configured for server: %s" % self.server['name']
# Skip the test case if tablespace_path not found.
self.skipTest(message)
def runTest(self):
"""This function test the add tablespace API"""
server_id = test_server_dict["server"][0]["server_id"]
server_id = parent_node_dict["server"][-1]["server_id"]
server_response = server_utils.connect_server(self, server_id)
if not server_response['data']['connected']:
raise Exception("Unable to connect server to get tablespace.")
@ -58,7 +55,7 @@ class TableSpaceAddTestCase(BaseTestGenerator):
tablespace_dict = {"tablespace_id": tablespace_id,
"tablespace_name": self.tablespace_name,
"server_id": server_id}
utils.write_node_info(tablespace_id, "tsid", tablespace_dict)
utils.write_node_info("tsid", tablespace_dict)
def tearDown(self):
"""

View File

@ -12,13 +12,12 @@ import json
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression import test_server_dict
from regression import parent_node_dict
from . import utils as tablespace_utils
class TableSpaceDeleteTestCase(BaseTestGenerator):
"""This class has delete table space scenario"""
scenarios = [
# Fetching default URL for tablespace node.
('Check Tablespace Node', dict(url='/browser/tablespace/obj/'))
@ -27,24 +26,28 @@ class TableSpaceDeleteTestCase(BaseTestGenerator):
def setUp(self):
if not self.server['tablespace_path']\
or self.server['tablespace_path'] is None:
message = "Skipped tablespace delete test case. Tablespace path" \
message = "Tablespace delete test case. Tablespace path" \
" not configured for server: %s" % self.server['name']
# Skip the test case if tablespace_path not found.
self.skipTest(message)
self.tablespace_name = "tablespace_delete_%s" % str(uuid.uuid4())[1:6]
self.server_id = parent_node_dict["server"][-1]["server_id"]
self.tablespace_id = tablespace_utils.create_tablespace(
self.server, self.tablespace_name)
tablespace_dict = {"tablespace_id": self.tablespace_id,
"tablespace_name": self.tablespace_name,
"server_id": self.server_id}
utils.write_node_info("tsid", tablespace_dict)
def runTest(self):
"""This function tests the delete table space api"""
server_id = test_server_dict["server"][0]["server_id"]
tablespace_count = tablespace_utils.verify_table_space(
self.server, self.tablespace_name)
if tablespace_count == 0:
raise Exception("No tablespace(s) to delete!!!")
response = self.tester.delete(self.url + str(utils.SERVER_GROUP)
+ '/' + str(server_id) + '/'
+ '/' + str(self.server_id) + '/'
+ str(self.tablespace_id),
follow_redirects=True)
self.assertEquals(response.status_code, 200)

View File

@ -10,7 +10,7 @@ import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression import test_server_dict
from regression import parent_node_dict
from pgadmin.browser.server_groups.servers.tests import utils as server_utils
from . import utils as tablespace_utils
@ -26,18 +26,22 @@ class TablespaceGetTestCase(BaseTestGenerator):
def setUp(self):
if not self.server['tablespace_path']\
or self.server['tablespace_path'] is None:
message = "Skipped tablespace get test case. Tablespace path" \
message = "Tablespace get test case. Tablespace path" \
" not configured for server: %s" % self.server['name']
# Skip the test case if tablespace_path not found.
self.skipTest(message)
self.tablespace_name = "tablespace_delete_%s" % str(uuid.uuid4())[1:6]
self.tablespace_id = tablespace_utils.create_tablespace(
self.server, self.tablespace_name)
self.server_id = parent_node_dict["server"][-1]["server_id"]
tablespace_dict = {"tablespace_id": self.tablespace_id,
"tablespace_name": self.tablespace_name,
"server_id": self.server_id}
utils.write_node_info("tsid", tablespace_dict)
def runTest(self):
"""This function test the get table space scenario"""
server_id = test_server_dict["server"][0]["server_id"]
server_response = server_utils.connect_server(self, server_id)
server_response = server_utils.connect_server(self, self.server_id)
if not server_response['data']['connected']:
raise Exception("Unable to connect server to get tablespace.")
@ -47,7 +51,7 @@ class TablespaceGetTestCase(BaseTestGenerator):
raise Exception("No tablespace(s) to update!!!")
response = self.tester.get(
self.url + str(utils.SERVER_GROUP) + '/' +
str(server_id) + '/' + str(self.tablespace_id),
str(self.server_id) + '/' + str(self.tablespace_id),
follow_redirects=True)
self.assertEquals(response.status_code, 200)

View File

@ -12,7 +12,7 @@ import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression import test_server_dict
from regression import parent_node_dict
from . import utils as tablespace_utils
@ -27,17 +27,21 @@ class TableSpaceUpdateTestCase(BaseTestGenerator):
def setUp(self):
if not self.server['tablespace_path']\
or self.server['tablespace_path'] is None:
message = "Skipped tablespace delete test case. Tablespace path" \
message = "Tablespace delete test case. Tablespace path" \
" not configured for server: %s" % self.server['name']
# Skip the test case if tablespace_path not found.
self.skipTest(message)
self.tablespace_name = "tablespace_delete_%s" % str(uuid.uuid4())[1:6]
self.tablespace_id = tablespace_utils.create_tablespace(
self.server, self.tablespace_name)
self.server_id = parent_node_dict["server"][-1]["server_id"]
tablespace_dict = {"tablespace_id": self.tablespace_id,
"tablespace_name": self.tablespace_name,
"server_id": self.server_id}
utils.write_node_info("tsid", tablespace_dict)
def runTest(self):
"""This function tests the update tablespace data scenario"""
server_id = test_server_dict["server"][0]["server_id"]
tablespace_count = tablespace_utils.verify_table_space(
self.server, self.tablespace_name)
if tablespace_count == 0:
@ -48,7 +52,7 @@ class TableSpaceUpdateTestCase(BaseTestGenerator):
"table_space_id": self.tablespace_id
}
put_response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' + str(server_id) + '/'
self.url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/'
+ str(self.tablespace_id), data=json.dumps(data),
follow_redirects=True)
self.assertEquals(put_response.status_code, 200)

View File

@ -32,8 +32,9 @@ class ServersAddTestCase(BaseTestGenerator):
self.assertEquals(response.status_code, 200)
response_data = json.loads(response.data.decode('utf-8'))
self.server_id = response_data['node']['_id']
utils.write_node_info(int(self.server_id), "sid", self.server)
server_dict = {"server_id": int(self.server_id)}
utils.write_node_info("sid", server_dict)
def tearDown(self):
"""This function delete the server from SQLite """
utils.delete_server(self.tester, self.server_id)
utils.delete_server_with_api(self.tester, self.server_id)

View File

@ -21,6 +21,8 @@ class ServerDeleteTestCase(BaseTestGenerator):
def setUp(self):
"""This function add the server to test the DELETE API"""
self.server_id = utils.create_server(self.server)
server_dict = {"server_id": self.server_id}
utils.write_node_info("sid", server_dict)
def runTest(self):
"""This function deletes the added server"""
@ -33,4 +35,4 @@ class ServerDeleteTestCase(BaseTestGenerator):
def tearDown(self):
"""This function delete the server from SQLite """
utils.delete_server(self.tester, self.server_id)
utils.delete_server_with_api(self.tester, self.server_id)

View File

@ -9,7 +9,7 @@
from pgadmin.utils.route import BaseTestGenerator
from regression import test_utils as utils
from regression import test_server_dict
from regression import parent_node_dict
class ServersGetTestCase(BaseTestGenerator):
@ -23,9 +23,15 @@ class ServersGetTestCase(BaseTestGenerator):
('Default Server Node url', dict(url='/browser/server/obj/'))
]
def setUp(self):
"""This function add the server to test the GET API"""
self.server_id = utils.create_server(self.server)
server_dict = {"server_id": self.server_id}
utils.write_node_info("sid", server_dict)
def runTest(self):
""" This function will fetch the added servers to object browser. """
server_id = test_server_dict["server"][0]["server_id"]
server_id = parent_node_dict["server"][-1]["server_id"]
if not server_id:
raise Exception("Server not found to test GET API")
response = self.tester.get(self.url + str(utils.SERVER_GROUP) + '/' +
@ -33,3 +39,6 @@ class ServersGetTestCase(BaseTestGenerator):
follow_redirects=True)
self.assertEquals(response.status_code, 200)
def tearDown(self):
"""This function delete the server from SQLite """
utils.delete_server_with_api(self.tester, self.server_id)

View File

@ -23,6 +23,8 @@ class ServerUpdateTestCase(BaseTestGenerator):
def setUp(self):
"""This function add the server to test the PUT API"""
self.server_id = utils.create_server(self.server)
server_dict = {"server_id": self.server_id}
utils.write_node_info("sid", server_dict)
def runTest(self):
"""This function update the server details"""
@ -37,4 +39,4 @@ class ServerUpdateTestCase(BaseTestGenerator):
def tearDown(self):
"""This function delete the server from SQLite"""
utils.delete_server(self.tester, self.server_id)
utils.delete_server_with_api(self.tester, self.server_id)

View File

@ -6,14 +6,13 @@
# #This software is released under the PostgreSQL Licence
#
# ##########################################################################
import uuid
import json
from pgadmin.utils.route import BaseTestGenerator
from regression.test_setup import config_data
from regression import test_utils as utils
from utils import change_password
from regression import test_utils
from . import utils
class ChangePasswordTestCase(BaseTestGenerator):
@ -89,30 +88,25 @@ class ChangePasswordTestCase(BaseTestGenerator):
confirmPassword=self.password, active=1, role="2"),
follow_redirects=True)
user_id = json.loads(response.data.decode('utf-8'))['id']
# Logout the Administrator before login normal user
utils.logout_tester_account(self.tester)
test_utils.logout_tester_account(self.tester)
response = self.tester.post('/login', data=dict(
email=self.username, password=self.password),
follow_redirects=True)
assert response.status_code == 200
self.assertEquals(response.status_code, 200)
# test the 'change password' test case
change_password(self)
utils.change_password(self)
# Delete the normal user after changing it's password
utils.logout_tester_account(self.tester)
test_utils.logout_tester_account(self.tester)
# Login the Administrator before deleting normal user
utils.login_tester_account(self.tester)
test_utils.login_tester_account(self.tester)
response = self.tester.delete(
'/user_management/user/' + str(user_id),
follow_redirects=True)
assert response.status_code == 200
self.assertEquals(response.status_code, 200)
else:
change_password(self)
utils.change_password(self)
@classmethod
def tearDownClass(cls):
utils.login_tester_account(cls.tester)
test_utils.login_tester_account(cls.tester)

View File

@ -85,7 +85,7 @@ class LoginTestCase(BaseTestGenerator):
response = self.tester.post('/login', data=dict(
email=self.email, password=self.password),
follow_redirects=True)
self.assertIn(self.respdata, response.data.decode('utf8'))
self.assertTrue(self.respdata in response.data.decode('utf8'))
@classmethod
def tearDownClass(cls):

View File

@ -32,7 +32,7 @@ class LogoutTest(BaseTestGenerator):
"""This function checks the logout functionality."""
response = self.tester.get('/logout')
self.assertIn(self.respdata, response.data.decode('utf8'))
self.assertTrue(self.respdata in response.data.decode('utf8'))
@classmethod
def tearDownClass(cls):

View File

@ -6,12 +6,12 @@
# #This software is released under the PostgreSQL Licence
#
# ##########################################################################
import uuid
from pgadmin.utils.route import BaseTestGenerator
from regression.test_setup import config_data
from test_utils import login_tester_account, logout_tester_account
from regression.test_utils import login_tester_account
from regression.test_utils import logout_tester_account
class ResetPasswordTestCase(BaseTestGenerator):
@ -45,12 +45,12 @@ class ResetPasswordTestCase(BaseTestGenerator):
"""This function checks reset password functionality."""
response = self.tester.get('/reset')
self.assertIn('Recover pgAdmin 4 Password', response.data.decode(
self.assertTrue('Recover pgAdmin 4 Password' in response.data.decode(
'utf-8'))
response = self.tester.post(
'/reset', data=dict(email=self.email),
follow_redirects=True)
self.assertIn(self.respdata, response.data.decode('utf-8'))
self.assertTrue(self.respdata in response.data.decode('utf-8'))
@classmethod
def tearDownClass(cls):

View File

@ -10,7 +10,7 @@
def change_password(self):
response = self.tester.get('/change', follow_redirects=True)
self.assertIn('pgAdmin 4 Password Change', response.data.decode(
self.assertTrue('pgAdmin 4 Password Change' in response.data.decode(
'utf-8'))
response = self.tester.post('/change', data=dict(
@ -18,4 +18,4 @@ def change_password(self):
new_password=self.new_password,
new_password_confirm=self.new_password_confirm),
follow_redirects=True)
self.assertIn(self.respdata, response.data.decode('utf-8'))
self.assertTrue(self.respdata in response.data.decode('utf-8'))

View File

@ -7,7 +7,11 @@
#
##############################################################
import traceback
import sys
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
from abc import ABCMeta, abstractmethod
@ -70,7 +74,7 @@ class TestsGeneratorRegistry(ABCMeta):
if "tests." in str(module_name):
cls.import_app_modules(module_name)
except ImportError:
pass
traceback.print_exc(file=sys.stderr)
else:
for module_name in find_modules(pkg, False, True):
try:
@ -79,7 +83,8 @@ class TestsGeneratorRegistry(ABCMeta):
if "pgadmin.browser.tests" not in module_name:
cls.import_app_modules(module_name)
except ImportError:
pass
traceback.print_exc(file=sys.stderr)
import six

View File

@ -9,6 +9,11 @@
import pgadmin.browser.server_groups.servers.roles.tests.utils as roles_utils
import pgadmin.browser.server_groups.servers.tablespaces.tests.utils as \
tablespace_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import\
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.schemas.functions.tests\
import utils as trigger_funcs_utils
global node_info_dict
node_info_dict = {
@ -16,22 +21,14 @@ node_info_dict = {
"did": [], # database
"lrid": [], # role
"tsid": [], # tablespace
"scid": [], # schema
"tfnid": [], # trigger functions
"coid": [], # collation
"cid": [], # casts
"etid": [], # event_trigger
"eid": [], # extension
"fid": [], # FDW
"fsid": [], # FRS
"umid": [], # user_mapping
"seid": [] # sequence
"scid": [] # schema
}
global test_server_dict
test_server_dict = {
global parent_node_dict
parent_node_dict = {
"server": [],
"database": [],
"tablespace": [],
"role": []
"role": [],
"schema": []
}

View File

@ -10,14 +10,18 @@
""" This file collect all modules/files present in tests directory and add
them to TestSuite. """
from __future__ import print_function
import argparse
import os
import sys
import signal
import atexit
import unittest
import logging
import traceback
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
logger = logging.getLogger(__name__)
file_name = os.path.basename(__file__)
@ -35,7 +39,7 @@ if sys.path[0] != root:
from pgadmin import create_app
import config
import test_setup
from regression import test_setup
# Delete SQLite db file if exists
if os.path.isfile(config.TEST_SQLITE_PATH):
@ -66,7 +70,7 @@ exec (open("setup.py").read())
from pgadmin.model import SCHEMA_VERSION
# Delay the import test_utils as it needs updated config.SQLITE_PATH
import test_utils
from regression import test_utils
config.SETTINGS_SCHEMA_VERSION = SCHEMA_VERSION
@ -189,11 +193,8 @@ def get_tests_result(test_suite):
if class_name not in failed_cases_result:
skipped_cases_result.append(class_name)
return total_ran, failed_cases_result, skipped_cases_result
except Exception as exception:
exception = "Exception: %s: line:%s %s" % (
file_name, sys.exc_traceback.tb_lineno, exception)
print(exception)
logger.exception(exception)
except Exception:
traceback.print_exc(file=sys.stderr)
class StreamToLogger(object):
@ -248,12 +249,15 @@ if __name__ == '__main__':
test_utils.login_tester_account(test_client)
servers_info = test_utils.get_config_data()
node_name = "all"
if args['pkg'] is not None:
node_name = args['pkg'].split('.')[-1]
try:
for server in servers_info:
print("\n=============Running the test cases for '%s'============="
% server['name'], file=sys.stderr)
# Create test server
test_utils.create_test_server(server)
test_utils.create_parent_server_node(server, node_name)
suite = get_suite(test_module_list, server, test_client)
tests = unittest.TextTestRunner(stream=sys.stderr,
@ -265,14 +269,16 @@ if __name__ == '__main__':
test_result[server['name']] = [ran_tests, failed_cases,
skipped_cases]
# Delete test server
test_utils.delete_test_server(test_client)
# test_utils.delete_test_server(test_client)
except SystemExit:
drop_objects()
print("\n======================================================================", file=sys.stderr)
print("\n==============================================================="
"=======", file=sys.stderr)
print("Test Result Summary", file=sys.stderr)
print("======================================================================\n", file=sys.stderr)
print(
"==================================================================="
"===\n", file=sys.stderr)
for server_res in test_result:
failed_cases = "\n\t\t".join(test_result[server_res][1])
skipped_cases = "\n\t\t".join(test_result[server_res][2])
@ -292,6 +298,8 @@ if __name__ == '__main__':
(total_skipped != 0 and ":\n\t\t" or ""), skipped_cases),
file=sys.stderr)
print("======================================================================\n", file=sys.stderr)
print(
"==================================================================="
"===\n", file=sys.stderr)
print("Please check output in file: %s/regression.log\n" % CURRENT_PATH)

View File

@ -7,7 +7,7 @@
#
# ##################################################################
from __future__ import print_function
import traceback
import os
import sys
import uuid
@ -20,7 +20,7 @@ import test_setup
import regression
SERVER_GROUP = test_setup.config_data['server_group']
file_name = os.path.basename(__file__)
file_name = os.path.realpath(__file__)
def get_db_connection(db, username, password, host, port):
@ -83,11 +83,9 @@ def get_config_data():
return server_data
def write_node_info(node_id, key, node_info=None):
def write_node_info(key, node_info=None):
"""
This function append the node details to
:param node_id: node id
:type node_id: int
:param key: dict key name to store node info
:type key: str
:param node_info: node details
@ -96,13 +94,8 @@ def write_node_info(node_id, key, node_info=None):
:rtype: dict
"""
node_info_dict = regression.node_info_dict
if node_info_dict:
if key in node_info_dict and node_info_dict[key]:
node_info_dict[key].append({node_id: node_info})
else:
node_info_dict[key] = [{node_id: node_info}]
else:
raise Exception("node_info_dict is null.")
if node_info not in node_info_dict[key]:
node_info_dict[key].append(node_info)
def clear_node_info_dict():
@ -123,48 +116,52 @@ def create_database(server, db_name):
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor = connection.cursor()
pg_cursor.execute("CREATE DATABASE %s" % db_name)
pg_cursor.execute('''CREATE DATABASE "%s"''' % db_name)
connection.set_isolation_level(old_isolation_level)
connection.commit()
# Get 'oid' from newly created database
pg_cursor.execute(
"SELECT db.oid from pg_database db WHERE db.datname='%s'" %
db_name)
pg_cursor.execute("SELECT db.oid from pg_database db WHERE"
" db.datname='%s'" % db_name)
oid = pg_cursor.fetchone()
db_id = ''
if oid:
db_id = oid[0]
connection.close()
return db_id
except Exception as exception:
exception = "Error while creating database: %s: line:%s %s" % (
file_name, sys.exc_traceback.tb_lineno, exception)
print(exception, file=sys.stderr)
except Exception:
traceback.print_exc(file=sys.stderr)
def drop_database(connection, database_name):
"""This function used to drop the database"""
try:
if database_name not in ["postgres", "template1", "template0"]:
pg_cursor = connection.cursor()
pg_cursor.execute("SELECT * FROM pg_database db WHERE db.datname='%s'"
% database_name)
pg_cursor.execute("SELECT * FROM pg_database db WHERE"
" db.datname='%s'" % database_name)
if pg_cursor.fetchall():
# Release pid if any process using database
pg_cursor.execute("select pg_terminate_backend(pid) from"
" pg_stat_activity where datname='%s'" %
database_name)
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor.execute('''DROP DATABASE "%s"''' % database_name)
connection.set_isolation_level(old_isolation_level)
connection.commit()
connection.close()
except Exception as exception:
exception = "%s: line:%s %s" % (
file_name, sys.exc_traceback.tb_lineno, exception)
print(exception, file=sys.stderr)
def drop_tablespace(connection):
"""This function used to drop the tablespace"""
pg_cursor = connection.cursor()
pg_cursor.execute("SELECT * FROM pg_tablespace")
table_spaces = pg_cursor.fetchall()
if table_spaces:
for table_space in table_spaces:
if table_space[0] not in ["pg_default", "pg_global"]:
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor.execute("DROP TABLESPACE %s" % table_space[0])
connection.set_isolation_level(old_isolation_level)
connection.commit()
connection.close()
def create_server(server):
@ -181,73 +178,94 @@ def create_server(server):
' comment) VALUES (?,?,?,?,?,?,?,?,?,?)', server_details)
server_id = cur.lastrowid
conn.commit()
# Add server info to parent_node_dict
regression.parent_node_dict["server"].append({"server_id": server_id,
"server": server})
return server_id
except Exception as exception:
raise Exception("Error while creating server. %s" % exception)
def delete_server(tester, sid):
def delete_server_with_api(tester, sid):
"""This function used to delete server from SQLite"""
try:
url = '/browser/server/obj/' + str(SERVER_GROUP) + "/"
# Call API to delete the server
response = tester.delete(url + str(sid))
except Exception as exception:
exception = "%s: line:%s %s" % (
file_name, sys.exc_traceback.tb_lineno, exception)
print(exception, file=sys.stderr)
except Exception:
traceback.print_exc(file=sys.stderr)
def delete_server_from_sqlite(sid):
"""This function used to delete server from SQLite"""
try:
con = sqlite3.connect(config.SQLITE_PATH)
cur = con.cursor()
server_objects = cur.execute('SELECT * FROM server WHERE id=%s' % sid)
ss = server_objects.fetchall()
# for i in ss:
# print(">>>>>>>>>>>", i)
servers_count = len(ss)
# print(">>>>>>>", sid)
if servers_count:
cur.execute('DELETE FROM server WHERE id=%s' % sid)
con.commit()
con.close()
except Exception as exception:
exception = "%s: line:%s %s" % (
file_name, sys.exc_traceback.tb_lineno, exception)
print(exception, file=sys.stderr)
def add_db_to_parent_node_dict(srv_id, db_id, test_db_name):
regression.parent_node_dict["database"].append({"server_id": srv_id,
"db_id": db_id,
"db_name": test_db_name})
def create_test_server(server_info):
def add_schema_to_parent_node_dict(srv_id, db_id, schema_id, schema_name):
regression.parent_node_dict["schema"].append({"server_id": srv_id,
"db_id": db_id,
"schema_id": schema_id,
"schema_name": schema_name})
def create_parent_server_node(server_info, node_name):
"""
This function create the test server which will act as parent server,
the other node will add under this server
:param server_info: server details
:type server_info: dict
:param node_name: node name
:type node_name: str
:return: None
"""
# Create the server
srv_id = create_server(server_info)
if node_name == "databases":
# Create test database
test_db_name = "test_db_%s" % str(uuid.uuid4())[1:8]
test_db_name = "test_db_%s" % str(uuid.uuid4())[1:6]
db_id = create_database(server_info, test_db_name)
add_db_to_parent_node_dict(srv_id, db_id, test_db_name)
elif node_name == "schemas":
test_db_name = "test_db_%s" % str(uuid.uuid4())[1:6]
db_id = create_database(server_info, test_db_name)
add_db_to_parent_node_dict(srv_id, db_id, test_db_name)
# Create schema
schema_name = "test_schema_%s" % str(uuid.uuid4())[1:6]
connection = get_db_connection(test_db_name,
server_info['username'],
server_info['db_password'],
server_info['host'],
server_info['port'])
# Add server info to test_server_dict
regression.test_server_dict["server"].append({"server_id": srv_id,
"server": server_info})
regression.test_server_dict["database"].append({"server_id": srv_id,
"db_id": db_id,
"db_name": test_db_name})
schema = regression.schema_utils.create_schema(connection, schema_name)
add_schema_to_parent_node_dict(srv_id, db_id, schema[0],
schema[1])
elif node_name not in ["servers", "roles", "tablespaces", "browser"]:
# Create test database
test_db_name = "test_db_%s" % str(uuid.uuid4())[1:6]
db_id = create_database(server_info, test_db_name)
add_db_to_parent_node_dict(srv_id, db_id, test_db_name)
# Create schema
schema_name = "test_schema_%s" % str(uuid.uuid4())[1:6]
connection = get_db_connection(test_db_name,
server_info['username'],
server_info['db_password'],
server_info['host'],
server_info['port'])
schema = regression.schema_utils.create_schema(connection, schema_name)
add_schema_to_parent_node_dict(srv_id, db_id, schema[0],
schema[1])
def delete_test_server(tester):
test_server_dict = regression.test_server_dict
test_servers = test_server_dict["server"]
test_databases = test_server_dict["database"]
test_table_spaces = test_server_dict["tablespace"]
try:
parent_node_dict = regression.parent_node_dict
test_servers = parent_node_dict["server"]
test_databases = parent_node_dict["database"]
test_roles = regression.node_info_dict["lrid"]
test_table_spaces = regression.node_info_dict["tsid"]
for test_server in test_servers:
srv_id = test_server["server_id"]
servers_dict = test_server["server"]
@ -260,16 +278,64 @@ def delete_test_server(tester):
database_name = database["db_name"]
# Drop database
drop_database(connection, database_name)
for role in test_roles:
connection = get_db_connection(servers_dict['db'],
servers_dict['username'],
servers_dict['db_password'],
servers_dict['host'],
servers_dict['port'])
# Delete role
regression.roles_utils.delete_role(connection,
role["role_name"])
for tablespace in test_table_spaces:
connection = get_db_connection(servers_dict['db'],
servers_dict['username'],
servers_dict['db_password'],
servers_dict['host'],
servers_dict['port'])
# Delete tablespace
regression.tablespace_utils.delete_tablespace(
connection, tablespace["tablespace_name"])
# Delete server
delete_server(tester, srv_id)
except Exception as exception:
exception = "Exception: %s: line:%s %s" % (
file_name, sys.exc_traceback.tb_lineno, exception)
print(exception, file=sys.stderr)
delete_server_with_api(tester, srv_id)
except Exception:
traceback.print_exc(file=sys.stderr)
raise
# Clear test_server_dict
for item in regression.test_server_dict:
del regression.test_server_dict[item][:]
def get_db_password(config_servers, name, host, db_port):
db_password = ''
for srv in config_servers:
if (srv['name'], srv['host'], srv['db_port']) == (name, host, db_port):
db_password = srv['db_password']
return db_password
def get_db_server(sid):
connection = ''
conn = sqlite3.connect(config.SQLITE_PATH)
cur = conn.cursor()
server = cur.execute('SELECT name, host, port, maintenance_db,'
' username FROM server where id=%s' % sid)
server = server.fetchone()
if server:
name = server[0]
host = server[1]
db_port = server[2]
db_name = server[3]
username = server[4]
config_servers = test_setup.config_data['server_credentials']
# Get the db password from config file for appropriate server
db_password = get_db_password(config_servers, name, host, db_port)
if db_password:
# Drop database
connection = get_db_connection(db_name,
username,
db_password,
host,
db_port)
conn.close()
return connection
def remove_db_file():
@ -282,107 +348,36 @@ def _drop_objects(tester):
"""This function use to cleanup the created the objects(servers, databases,
schemas etc) during the test suite run"""
try:
conn = sqlite3.connect(config.SQLITE_PATH)
cur = conn.cursor()
servers = cur.execute('SELECT name, host, port, maintenance_db,'
' username, id FROM server')
if servers:
all_servers = servers.fetchall()
for server_info in all_servers:
name = server_info[0]
host = server_info[1]
db_port = server_info[2]
config_servers = test_setup.config_data['server_credentials']
db_password = ''
# Get the db password from config file for appropriate server
for srv in config_servers:
if (srv['name'], srv['host'], srv['db_port']) == \
(name, host, db_port):
db_password = srv['db_password']
if db_password:
# Drop database
connection = get_db_connection(server_info[3],
server_info[4],
db_password,
server_info[1],
server_info[2])
pg_cursor = connection.cursor()
pg_cursor.execute("SELECT db.datname FROM pg_database db")
databases = pg_cursor.fetchall()
if databases:
for db in databases:
connection = get_db_connection(server_info[3],
server_info[4],
db_password,
server_info[1],
server_info[2])
# Do not drop the default databases
if db[0] not in ["postgres", "template1",
"template0"]:
drop_database(connection, db[0])
# Delete tablespace
connection = get_db_connection(server_info[3],
server_info[4],
db_password,
server_info[1],
server_info[2])
pg_cursor = connection.cursor()
pg_cursor.execute("SELECT * FROM pg_tablespace")
table_spaces = pg_cursor.fetchall()
if table_spaces:
for tablespace in table_spaces:
# Do not delete default table spaces
if tablespace[0] not in ["pg_default",
"pg_global"]:
tablespace_name = tablespace[0]
# Delete tablespace
connection = get_db_connection(server_info[3],
server_info[4],
db_password,
server_info[1],
server_info[2])
test_servers = regression.parent_node_dict["server"] + \
regression.node_info_dict["sid"]
test_databases = regression.parent_node_dict["database"] + \
regression.node_info_dict["did"]
test_table_spaces = regression.parent_node_dict["tablespace"] + \
regression.node_info_dict["tsid"]
test_roles = regression.parent_node_dict["role"] + \
regression.node_info_dict["lrid"]
# Drop databases
for database in test_databases:
connection = get_db_server(database["server_id"])
if connection:
drop_database(connection, database["db_name"])
# Delete table spaces
for tablespace in test_table_spaces:
connection = get_db_server(tablespace["server_id"])
if connection:
regression.tablespace_utils.delete_tablespace(
connection, tablespace_name)
# Delete role
connection = get_db_connection(server_info[3],
server_info[4],
db_password,
server_info[1],
server_info[2])
pg_cursor = connection.cursor()
pg_cursor.execute("SELECT * FROM pg_catalog.pg_roles")
roles = pg_cursor.fetchall()
if roles:
for role_name in roles:
# Do not delete default table spaces
if role_name[0] not in ["postgres",
"enterprisedb"]:
role_name = role_name[0]
# Delete role
connection, tablespace["tablespace_name"])
# Delete roles
for role in test_roles:
connection = get_db_server(role["server_id"])
if connection:
regression.roles_utils.delete_role(connection,
role_name)
for server_info in all_servers:
server_id = server_info[5]
# Delete server
try:
delete_server(tester, server_id)
except Exception as exception:
exception = "Exception while deleting server: %s:" \
" line:%s %s" %\
(file_name, sys.exc_traceback.tb_lineno,
exception)
print(exception, file=sys.stderr)
continue
conn.close()
except Exception as exception:
exception = "Exception: %s: line:%s %s" % (
file_name, sys.exc_traceback.tb_lineno, exception)
print(exception, file=sys.stderr)
role["role_name"])
# Delete servers
for server in test_servers:
delete_server_with_api(tester, server["server_id"])
except Exception:
traceback.print_exc(file=sys.stderr)
finally:
# Logout the test client
logout_tester_account(tester)