Added support for psycopg3 along with psycopg2. #5011

This commit is contained in:
Khushboo Vashi 2023-02-15 11:31:29 +05:30 committed by Akshay Joshi
parent 7a4951f211
commit 5e0daccf76
635 changed files with 6500 additions and 1534 deletions

View File

@ -26,6 +26,7 @@ New features
Housekeeping
************
| `Issue #5011 <https://github.com/pgadmin-org/pgadmin4/issues/5011>`_ - Added support for psycopg3 along with psycopg2.
| `Issue #5701 <https://github.com/pgadmin-org/pgadmin4/issues/5701>`_ - Remove Bootstrap and jQuery usage.
Bug fixes

View File

@ -60,7 +60,7 @@ _create_python_virtualenv() {
pip3 install wheel
# Install the requirements
pip3 install --no-cache-dir --no-binary psycopg2 -r "${SOURCEDIR}/requirements.txt"
pip3 install --no-cache-dir --no-binary psycopg -r "${SOURCEDIR}/requirements.txt"
# Fixup the paths in the venv activation scripts
sed -i 's/VIRTUAL_ENV=.*/VIRTUAL_ENV="\/usr\/pgadmin4\/venv"/g' venv/bin/activate

View File

@ -36,11 +36,11 @@ with open(req_file, 'r') as req_lines:
requires = []
kerberos_extras = []
# Ensure the Wheel will use psycopg2-binary, not the source distro, and stick
# Ensure the Wheel will use psycopg-binary, not the source distro, and stick
# gssapi in it's own list
for index, req in enumerate(all_requires):
if 'psycopg2' in req:
req = req.replace('psycopg2', 'psycopg2-binary')
if 'psycopg' in req:
req = req.replace('psycopg', 'psycopg-binary')
if 'gssapi' in req:
kerberos_extras.append(req)

View File

@ -199,7 +199,7 @@ begin
else
begin
// Suppose system is running a 32-bit version of Windows then no need to check HKLM64 in RegQueryStringValue
// So IsWin64 - will make sure its should only execute on 64-bit veersion of windows.
// So IsWin64 - will make sure its should only execute on 64-bit version of windows.
if IsWin64 then
begin
// Check if pgAdmin 64 bit is already installed

View File

@ -29,7 +29,8 @@ pytz==2021.*
speaklater3==1.*
sqlparse==0.*
psutil==5.9.3
psycopg2==2.9.*
psycopg2==2.9.*; python_version < '3.7'
psycopg[c]==3.1.*; python_version >= '3.7'
python-dateutil==2.*
SQLAlchemy==1.4.44; python_version <= '3.6'
SQLAlchemy==1.4.*; python_version >= '3.7'

View File

@ -78,6 +78,12 @@ function startDesktopMode() {
let updated_path = process.env[key] + ':/usr/local/bin';
process.env[key] = updated_path;
}
if (platform() === 'win32' && (key === 'PATH' || key == 'Path')) {
let _libpq_path = path.join(path.dirname(path.dirname(path.resolve(pgadminFile))), 'runtime');
process.env[key] = _libpq_path + ';' + process.env[key];
}
misc.writeServerLog(' - ' + key + ': ' + process.env[key]);
});
misc.writeServerLog('--------------------------------------------------------\n');
@ -674,4 +680,4 @@ function refreshMenuItems(menu) {
}
});
}
}

View File

@ -303,7 +303,10 @@ LOG_ROTATION_MAX_LOG_FILES = 90 # Maximum number of backups to retain
##########################################################################
# The default driver used for making connection with PostgreSQL
PG_DEFAULT_DRIVER = 'psycopg2'
if sys.version_info < (3, 7):
PG_DEFAULT_DRIVER = 'psycopg2'
else:
PG_DEFAULT_DRIVER = 'psycopg3'
# Maximum allowed idle time in minutes before which releasing the connection
# for the particular session. (in minutes)

View File

@ -11,7 +11,6 @@
a webserver, this will provide the WSGI interface, otherwise, we're going
to start a web server."""
import sys
if sys.version_info < (3, 4):
@ -35,6 +34,17 @@ if 'PGADMIN_SERVER_MODE' in os.environ:
else:
builtins.SERVER_MODE = None
if (3, 10) > sys.version_info > (3, 8) and os.name == 'posix':
# Fix eventlet issue with Python 3.9.
# Ref: https://github.com/eventlet/eventlet/issues/670
# This was causing issue in psycopg3
from eventlet import hubs
hubs.use_hub("poll")
# Ref: https://github.com/miguelgrinberg/python-socketio/issues/567
# Resolve BigAnimal API issue
import selectors
selectors.DefaultSelector = selectors.PollSelector
import config
import setup
from pgadmin import create_app, socketio

View File

@ -28,7 +28,6 @@ from pgadmin.utils.driver import get_driver
from pgadmin.utils.master_password import get_crypt_key
from pgadmin.utils.exception import CryptKeyMissing
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from psycopg2 import Error as psycopg2_Error, OperationalError
from pgadmin.browser.server_groups.servers.utils import is_valid_ipaddress
from pgadmin.utils.constants import UNAUTH_REQ, MIMETYPE_APP_JS, \
SERVER_CONNECTION_CLOSED
@ -242,7 +241,7 @@ class ServerModule(sg.ServerGroupPluginModule):
except CryptKeyMissing:
# show the nodes at least even if not able to connect.
pass
except psycopg2_Error as e:
except Exception as e:
current_app.logger.exception(e)
errmsg = str(e)
@ -1451,8 +1450,6 @@ class ServerNode(PGChildNodeView):
tunnel_password=tunnel_password,
server_types=ServerType.types()
)
except OperationalError as e:
return internal_server_error(errormsg=str(e))
except Exception as e:
current_app.logger.exception(e)
return self.get_response_for_password(

View File

@ -473,7 +473,7 @@ class DatabaseView(PGChildNodeView):
SQL = render_template(
"/".join([self.template_path, self._PROPERTIES_SQL]),
did=did, conn=self.conn, last_system_oid=0,
show_system_objects=self.blueprint.show_system_objects,
show_system_objects=self.blueprint.show_system_objects
)
status, res = self.conn.execute_dict(SQL)

View File

@ -348,7 +348,8 @@ class CastView(PGChildNodeView, SchemaDiffObjectCompare):
"/".join([self.template_path, self._PROPERTIES_SQL]),
cid=cid,
datlastsysoid=last_system_oid,
showsysobj=self.blueprint.show_system_objects
showsysobj=self.blueprint.show_system_objects,
conn=self.conn
)
status, res = self.conn.execute_dict(sql)
@ -408,7 +409,8 @@ class CastView(PGChildNodeView, SchemaDiffObjectCompare):
srctyp=data['srctyp'],
trgtyp=data['trgtyp'],
datlastsysoid=last_system_oid,
showsysobj=self.blueprint.show_system_objects
showsysobj=self.blueprint.show_system_objects,
conn=self.conn
)
status, cid = self.conn.execute_scalar(sql)
if not status:
@ -584,7 +586,8 @@ class CastView(PGChildNodeView, SchemaDiffObjectCompare):
"/".join([self.template_path, self._PROPERTIES_SQL]),
cid=cid,
datlastsysoid=last_system_oid,
showsysobj=self.blueprint.show_system_objects
showsysobj=self.blueprint.show_system_objects,
conn=self.conn
)
status, res = self.conn.execute_dict(sql)
@ -599,7 +602,7 @@ class CastView(PGChildNodeView, SchemaDiffObjectCompare):
old_data = res['rows'][0]
sql = render_template(
"/".join([self.template_path, self._UPDATE_SQL]),
data=data, o_data=old_data
data=data, o_data=old_data, conn=self.conn
)
return sql, data['name'] if 'name' in data else old_data['name']
else:
@ -630,7 +633,8 @@ class CastView(PGChildNodeView, SchemaDiffObjectCompare):
sql = render_template("/".join([self.template_path,
self._FUNCTIONS_SQL]),
srctyp=data['srctyp'],
trgtyp=data['trgtyp'])
trgtyp=data['trgtyp'],
conn=self.conn)
status, rset = self.conn.execute_dict(sql)
if not status:

View File

@ -11,5 +11,5 @@ CREATE CAST ({{ conn|qtTypeIdent(data.srctyp) }} AS {{ conn|qtTypeIdent(data.trg
{# Description for CAST #}
{% if data.description %}
COMMENT ON CAST ({{ conn|qtTypeIdent(data.srctyp) }} AS {{ conn|qtTypeIdent(data.trgtyp) }})
IS {{ data.description|qtLiteral }};
IS {{ data.description|qtLiteral(conn) }};
{% endif %}{% endif %}

View File

@ -6,8 +6,8 @@ SELECT
FROM
pg_catalog.pg_proc p JOIN pg_catalog.pg_namespace n ON n.oid=p.pronamespace
WHERE
proargtypes[0] = (SELECT t.oid FROM pg_catalog.pg_type t WHERE pg_catalog.format_type(t.oid, NULL) = {{srctyp|qtLiteral}})
AND prorettype = (SELECT t.oid FROM pg_catalog.pg_type t WHERE pg_catalog.format_type(t.oid, NULL) = {{trgtyp|qtLiteral}})
proargtypes[0] = (SELECT t.oid FROM pg_catalog.pg_type t WHERE pg_catalog.format_type(t.oid, NULL) = {{srctyp|qtLiteral(conn)}})
AND prorettype = (SELECT t.oid FROM pg_catalog.pg_type t WHERE pg_catalog.format_type(t.oid, NULL) = {{trgtyp|qtLiteral(conn)}})
AND CASE
WHEN pg_catalog.array_length(proargtypes,1) = 2 THEN
proargtypes[1] = 23

View File

@ -3,8 +3,8 @@
SELECT
ca.oid
FROM pg_catalog.pg_cast ca
WHERE ca.castsource = (SELECT t.oid FROM pg_catalog.pg_type t WHERE pg_catalog.format_type(t.oid, NULL) = {{srctyp|qtLiteral}})
AND ca.casttarget = (SELECT t.oid FROM pg_catalog.pg_type t WHERE pg_catalog.format_type(t.oid, NULL) = {{trgtyp|qtLiteral}})
WHERE ca.castsource = (SELECT t.oid FROM pg_catalog.pg_type t WHERE pg_catalog.format_type(t.oid, NULL) = {{srctyp|qtLiteral(conn)}})
AND ca.casttarget = (SELECT t.oid FROM pg_catalog.pg_type t WHERE pg_catalog.format_type(t.oid, NULL) = {{trgtyp|qtLiteral(conn)}})
{% if datlastsysoid %}
AND ca.oid > {{datlastsysoid}}::OID
{% endif %}

View File

@ -2,5 +2,5 @@
{% if data and 'description' in data and data.description != o_data.description %}
COMMENT ON CAST ({{ conn|qtTypeIdent(o_data.srctyp) }} AS {{ conn|qtTypeIdent(o_data.trgtyp) }})
IS {{ data.description|qtLiteral }};
{% endif %}
IS {{ data.description|qtLiteral(conn) }};
{% endif %}

View File

@ -88,7 +88,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False,'Mocked Internal Server Error')"
},
"expected_data": {
@ -110,7 +110,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "Exception('Mocked Exception Message')"
},
"expected_data": {
@ -153,7 +153,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False,'Mocked Internal Server Error')"
},
"expected_data": {
@ -186,7 +186,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False,'Mocked Internal Server Error')"
},
"expected_data": {
@ -249,7 +249,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False,'Mocked Internal Server Error')"
},
"expected_data": {
@ -292,7 +292,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False,'Mocked Internal Server Error')"
},
"expected_data": {
@ -315,7 +315,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "Exception('Mocked Exception Message')"
},
"expected_data": {
@ -402,7 +402,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False,'Mocked Internal Server Error')"
},
"expected_data": {
@ -425,7 +425,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False,'Mocked Internal Server Error')"
},
"expected_data": {
@ -552,7 +552,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False,'Mocked Internal Server Error')"
},
"expected_data": {
@ -575,7 +575,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False,'Mocked Internal Server Error')"
},
"expected_data": {
@ -639,7 +639,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False,'Mocked Internal Server Error')"
},
"expected_data": {
@ -661,7 +661,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "Exception('Mocked Exception Message')"
},
"expected_data": {
@ -709,7 +709,7 @@
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 500,
"status_code": 200,
"error_msg": null,
"test_result_data": {}
}
@ -727,7 +727,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False,'Mocked Internal Server Error')"
},
"expected_data": {
@ -749,7 +749,7 @@
"test_data": {},
"mocking_required": false,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False,'Mocked Internal Server Error')"
},
"expected_data": {

View File

@ -24,6 +24,7 @@ class CastsCreateTestCase(BaseTestGenerator):
def setUp(self):
""" This function will get data required to create cast."""
super().setUp()
super().runTest()
self.data = self.test_data

View File

@ -158,11 +158,11 @@ def create_cast(server, source_type, target_type):
server['port'],
server['sslmode'])
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
set_isolation_level(connection, 0)
pg_cursor = connection.cursor()
pg_cursor.execute("CREATE CAST (%s AS %s) WITHOUT"
" FUNCTION AS IMPLICIT" % (source_type, target_type))
connection.set_isolation_level(old_isolation_level)
set_isolation_level(connection, old_isolation_level)
connection.commit()
# Get 'oid' from newly created cast

View File

@ -428,7 +428,7 @@ class EventTriggerView(PGChildNodeView, SchemaDiffObjectCompare):
sql = render_template(
"/".join([self.template_path, self._OID_SQL]),
data=data
data=data, conn=self.conn
)
status, etid = self.conn.execute_scalar(sql)
if not status:
@ -477,7 +477,7 @@ class EventTriggerView(PGChildNodeView, SchemaDiffObjectCompare):
sql = render_template(
"/".join([self.template_path, self._OID_SQL]),
data=data
data=data, conn=self.conn
)
status, etid = self.conn.execute_scalar(sql)

View File

@ -1,5 +1,5 @@
{# The Sql below will provide oid for newly created event_trigger #}
{% if data %}
SELECT e.oid from pg_catalog.pg_event_trigger e
WHERE e.evtname = {{ data.name|qtLiteral }}
WHERE e.evtname = {{ data.name|qtLiteral(conn) }}
{% endif %}

View File

@ -13,7 +13,7 @@ ALTER EVENT TRIGGER {{ conn|qtIdent(data.name) }}
{% if data.comment %}
COMMENT ON EVENT TRIGGER {{ conn|qtIdent(data.name) }}
IS {{ data.comment|qtLiteral }};
IS {{ data.comment|qtLiteral(conn) }};
{% endif %}
{% if data.seclabels and data.seclabels|length > 0 %}

View File

@ -86,7 +86,7 @@
],
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error while creating a event trigger')"
},
"expected_data": {
@ -114,7 +114,7 @@
],
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(True, True), (False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -138,7 +138,7 @@
],
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(True, True), (True, True), (False, 'Mocked Internal Server Error while getting oid of created event trigger')"
},
"expected_data": {
@ -188,7 +188,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a event trigger')"
},
"expected_data": {
@ -226,7 +226,7 @@
"event_trigger_list": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a event trigger')"
},
"expected_data": {
@ -252,7 +252,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error while deleting a event trigger')"
},
"expected_data": {
@ -278,7 +278,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(True, True),(False, 'Mocked Internal Server Error while deleting a event trigger')"
},
"expected_data": {
@ -312,7 +312,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error while fetching a event trigger')"
},
"expected_data": {
@ -394,7 +394,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching a event trigger nodes')"
},
"expected_data": {
@ -420,7 +420,7 @@
"node": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching a event trigger nodes')"
},
"expected_data": {
@ -446,7 +446,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a event trigger sql')"
},
"expected_data": {
@ -460,7 +460,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error while fetching a DB created Event Trigger sql')"
},
"expected_data": {
@ -509,7 +509,7 @@
"event_trigger_functions": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching a event trigger functions')"
},
"expected_data": {

View File

@ -28,6 +28,7 @@ class EventTriggerAddTestCase(BaseTestGenerator):
event_trigger_utils.test_cases)
def setUp(self):
super().setUp()
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']

View File

@ -27,6 +27,7 @@ class EventTriggerDeleteTestCase(BaseTestGenerator):
event_trigger_utils.test_cases)
def setUp(self):
super().setUp()
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']

View File

@ -29,6 +29,7 @@ class EventTriggerFunctionsTestCase(BaseTestGenerator):
event_trigger_utils.test_cases)
def setUp(self):
super().setUp()
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']

View File

@ -27,6 +27,7 @@ class EventTriggerGetTestCase(BaseTestGenerator):
event_trigger_utils.test_cases)
def setUp(self):
super().setUp()
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
@ -136,6 +137,7 @@ class EventTriggerGetNodesAndNodeTestCase(BaseTestGenerator):
event_trigger_utils.test_cases)
def setUp(self):
super().setUp()
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']

View File

@ -28,6 +28,7 @@ class EventTriggerPutTestCase(BaseTestGenerator):
event_trigger_utils.test_cases)
def setUp(self):
super().setUp()
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']

View File

@ -27,6 +27,7 @@ class EventTriggerGetSqlTestCase(BaseTestGenerator):
event_trigger_utils.test_cases)
def setUp(self):
super().setUp()
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']

View File

@ -13,7 +13,8 @@ import traceback
import os
import json
from regression.python_test_utils.test_utils import get_db_connection
from regression.python_test_utils.test_utils import get_db_connection,\
set_isolation_level
from regression.python_test_utils import test_utils as utils
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
@ -46,12 +47,12 @@ def create_event_trigger(server, db_name, schema_name, func_name,
server['port'],
server['sslmode'])
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
set_isolation_level(connection, 0)
pg_cursor = connection.cursor()
pg_cursor.execute('''CREATE EVENT TRIGGER "%s" ON DDL_COMMAND_END
EXECUTE PROCEDURE "%s"."%s"()''' % (trigger_name, schema_name,
func_name))
connection.set_isolation_level(old_isolation_level)
set_isolation_level(connection, old_isolation_level)
connection.commit()
# Get 'oid' from newly created event trigger
pg_cursor.execute(

View File

@ -158,7 +158,8 @@ class ExtensionView(PGChildNodeView, SchemaDiffObjectCompare):
Fetches all extensions properties and render into properties tab
"""
SQL = render_template("/".join([self.template_path,
self._PROPERTIES_SQL]))
self._PROPERTIES_SQL]),
conn=self.conn)
status, res = self.conn.execute_dict(SQL)
if not status:
@ -175,7 +176,8 @@ class ExtensionView(PGChildNodeView, SchemaDiffObjectCompare):
"""
res = []
SQL = render_template("/".join([self.template_path,
self._PROPERTIES_SQL]))
self._PROPERTIES_SQL]),
conn=self.conn)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
@ -201,7 +203,7 @@ class ExtensionView(PGChildNodeView, SchemaDiffObjectCompare):
"""
SQL = render_template("/".join([self.template_path,
self._PROPERTIES_SQL]),
eid=eid)
eid=eid, conn=self.conn)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
@ -241,7 +243,8 @@ class ExtensionView(PGChildNodeView, SchemaDiffObjectCompare):
:return:
"""
SQL = render_template("/".join(
[self.template_path, self._PROPERTIES_SQL]), eid=eid)
[self.template_path, self._PROPERTIES_SQL]), eid=eid,
conn=self.conn)
status, res = self.conn.execute_dict(SQL)
if not status:
return False, internal_server_error(errormsg=res)
@ -424,7 +427,7 @@ class ExtensionView(PGChildNodeView, SchemaDiffObjectCompare):
if eid is not None:
SQL = render_template("/".join(
[self.template_path, self._PROPERTIES_SQL]
), eid=eid)
), eid=eid, conn=self.conn)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
@ -483,7 +486,7 @@ class ExtensionView(PGChildNodeView, SchemaDiffObjectCompare):
"""
SQL = render_template("/".join(
[self.template_path, self._PROPERTIES_SQL]
), eid=eid)
), eid=eid, conn=self.conn)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
@ -557,7 +560,8 @@ class ExtensionView(PGChildNodeView, SchemaDiffObjectCompare):
res = dict()
sql = render_template("/".join([self.template_path,
self._PROPERTIES_SQL]))
self._PROPERTIES_SQL]),
conn=self.conn)
status, rset = self.conn.execute_2darray(sql)
if not status:
return internal_server_error(errormsg=rset)

View File

@ -11,7 +11,7 @@ FROM
{%- if eid %}
WHERE x.oid = {{eid}}::oid
{% elif ename %}
WHERE x.extname = {{ename|qtLiteral}}::text
WHERE x.extname = {{ename|qtLiteral(conn)}}::text
{% else %}
ORDER BY x.extname
{% endif %}

View File

@ -11,7 +11,8 @@
import sys
import traceback
from regression.python_test_utils.test_utils import get_db_connection
from regression.python_test_utils.test_utils import get_db_connection,\
set_isolation_level
def get_extension_data(schema_name):
@ -45,12 +46,12 @@ def create_extension(server, db_name, extension_name, schema_name):
server['port'],
server['sslmode'])
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
set_isolation_level(connection, 0)
pg_cursor = connection.cursor()
pg_cursor.execute(
'''CREATE EXTENSION "%s" SCHEMA "%s"''' % (extension_name,
schema_name))
connection.set_isolation_level(old_isolation_level)
set_isolation_level(connection, old_isolation_level)
connection.commit()
# Get 'oid' from newly created extension
pg_cursor.execute("SELECT oid FROM pg_catalog.pg_extension "

View File

@ -28,7 +28,7 @@ def get_extension_details(conn, ename, properties_sql=None):
[ExtensionView.EXT_TEMPLATE_PATH, 'properties.sql'])
status, rset = conn.execute_dict(
render_template(properties_sql, ename=ename)
render_template(properties_sql, ename=ename, conn=conn)
)
if status:

View File

@ -394,7 +394,7 @@ class ForeignDataWrapperView(PGChildNodeView, SchemaDiffObjectCompare):
)
sql = render_template("/".join([self.template_path, self._ACL_SQL]),
fid=fid
fid=fid, conn=self.conn
)
status, fdw_acl_res = self.conn.execute_dict(sql)
@ -815,7 +815,7 @@ class ForeignDataWrapperView(PGChildNodeView, SchemaDiffObjectCompare):
is_valid_options = True
sql = render_template("/".join([self.template_path, self._ACL_SQL]),
fid=fid)
fid=fid, conn=self.conn)
status, fdw_acl_res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=fdw_acl_res)
@ -969,7 +969,7 @@ class ForeignDataWrapperView(PGChildNodeView, SchemaDiffObjectCompare):
sql = render_template("/".join([self.template_path,
self._PROPERTIES_SQL]),
schema_diff=True)
schema_diff=True, conn=self.conn)
status, rset = self.conn.execute_2darray(sql)
if not status:
return internal_server_error(errormsg=rset)

View File

@ -382,7 +382,7 @@ class ForeignServerView(PGChildNodeView, SchemaDiffObjectCompare):
)
sql = render_template("/".join([self.template_path, self._ACL_SQL]),
fsid=fsid
fsid=fsid, conn=self.conn
)
status, fs_rv_acl_res = self.conn.execute_dict(sql)
if not status:
@ -930,7 +930,7 @@ class ForeignServerView(PGChildNodeView, SchemaDiffObjectCompare):
sql = render_template("/".join([self.template_path,
self._PROPERTIES_SQL]),
schema_diff=True)
schema_diff=True, conn=self.conn)
status, rset = self.conn.execute_2darray(sql)
if not status:
return internal_server_error(errormsg=rset)

View File

@ -11,7 +11,7 @@ FROM
LEFT OUTER JOIN pg_catalog.pg_shdescription descr ON (
fsrv.oid=descr.objoid AND descr.classoid='pg_foreign_server'::regclass)
{% if fsid %}
WHERE fsrv.oid = {{ fsid|qtLiteral }}::OID
WHERE fsrv.oid = {{ fsid|qtLiteral(conn) }}::OID
{% endif %}
) acl,
pg_catalog.aclexplode(srvacl) d

View File

@ -3,15 +3,15 @@
{% if data.name %}
CREATE SERVER {{ conn|qtIdent(data.name) }}{% if data.fsrvtype %}
TYPE {{ data.fsrvtype|qtLiteral }}{% endif %}{% if data.fsrvversion %}
TYPE {{ data.fsrvtype|qtLiteral(conn) }}{% endif %}{% if data.fsrvversion %}
VERSION {{ data.fsrvversion|qtLiteral }}{%-endif %}{% if fdwdata %}
VERSION {{ data.fsrvversion|qtLiteral(conn) }}{%-endif %}{% if fdwdata %}
FOREIGN DATA WRAPPER {{ conn|qtIdent(fdwdata.name) }}{% endif %}{% if data.fsrvoptions %}
{% if is_valid_options %}
OPTIONS ({% for variable in data.fsrvoptions %}{% if loop.index != 1 %}, {% endif %}
{{ conn|qtIdent(variable.fsrvoption) }} {{ variable.fsrvvalue|qtLiteral }}{% endfor %}){% endif %}{% endif %};
{{ conn|qtIdent(variable.fsrvoption) }} {{ variable.fsrvvalue|qtLiteral(conn) }}{% endfor %}){% endif %}{% endif %};
{# ============= Set the owner for foreign server ============= #}
{% if data.fsrvowner %}
@ -22,7 +22,7 @@ ALTER SERVER {{ conn|qtIdent(data.name) }}
{% if data.description %}
COMMENT ON SERVER {{ conn|qtIdent(data.name) }}
IS {{ data.description|qtLiteral }};
IS {{ data.description|qtLiteral(conn) }};
{% endif %}
{# ============= Set the ACL for foreign server ============= #}

View File

@ -12,9 +12,9 @@ FROM pg_catalog.pg_foreign_server srv
LEFT OUTER JOIN pg_catalog.pg_foreign_data_wrapper fdw on fdw.oid=srvfdw
LEFT OUTER JOIN pg_catalog.pg_description des ON (des.objoid=srv.oid AND des.objsubid=0 AND des.classoid='pg_foreign_server'::regclass)
{% if data and fdwdata %}
WHERE fdw.fdwname = {{ fdwdata.name|qtLiteral }}::text and srvname = {{ data.name|qtLiteral }}::text
WHERE fdw.fdwname = {{ fdwdata.name|qtLiteral(conn) }}::text and srvname = {{ data.name|qtLiteral(conn) }}::text
{% elif fdwdata %}
WHERE fdw.fdwname = {{fdwdata.name|qtLiteral}}::text
WHERE fdw.fdwname = {{fdwdata.name|qtLiteral(conn)}}::text
{% endif %}
{% if fid %}
WHERE srvfdw={{fid}}::oid

View File

@ -20,14 +20,14 @@ DROP SERVER {{ conn|qtIdent(o_data.name) }};
CREATE SERVER {{ conn|qtIdent(o_data.name) }}{% if fsrvtype %}
TYPE {{ fsrvtype|qtLiteral }}{% endif %}{% if fsrvversion %}
TYPE {{ fsrvtype|qtLiteral(conn) }}{% endif %}{% if fsrvversion %}
VERSION {{ fsrvversion|qtLiteral }}{%-endif %}{% if fdwname %}
VERSION {{ fsrvversion|qtLiteral(conn) }}{%-endif %}{% if fdwname %}
FOREIGN DATA WRAPPER {{ conn|qtIdent(fdwname) }}{% endif %}{% if o_data.fsrvoptions %}
OPTIONS ({% for variable in o_data.fsrvoptions %}{% if loop.index != 1 %}, {% endif %}
{{ conn|qtIdent(variable.fsrvoption) }} {{ variable.fsrvvalue|qtLiteral }}{% endfor %}){% endif %};
{{ conn|qtIdent(variable.fsrvoption) }} {{ variable.fsrvvalue|qtLiteral(conn) }}{% endfor %}){% endif %};
ALTER SERVER {{ conn|qtIdent(o_data.name) }}
OWNER TO {{ conn|qtIdent(o_data.fsrvowner) }};
@ -49,13 +49,13 @@ ALTER SERVER {{ conn|qtIdent(data.name) }}
{# ============= Update foreign server version ============= #}
{% if data.fsrvversion is defined and data.fsrvversion is not none and data.fsrvversion != o_data.fsrvversion %}
ALTER SERVER {{ conn|qtIdent(data.name) }}
VERSION {{ data.fsrvversion|qtLiteral }};
VERSION {{ data.fsrvversion|qtLiteral(conn) }};
{% endif %}
{# ============= Update foreign server comments ============= #}
{% if data.description is defined and data.description != o_data.description %}
COMMENT ON SERVER {{ conn|qtIdent(data.name) }}
IS {{ data.description|qtLiteral }};
IS {{ data.description|qtLiteral(conn) }};
{% endif %}
{# ============= Update foreign server options and values ============= #}
@ -70,7 +70,7 @@ DROP {{ conn|qtIdent(variable.fsrvoption) }}{% endfor %}
{% if is_valid_added_options %}
ALTER SERVER {{ conn|qtIdent(data.name) }}
OPTIONS ({% for variable in data.fsrvoptions.added %}{% if loop.index != 1 %}, {% endif %}
ADD {{ conn|qtIdent(variable.fsrvoption) }} {{ variable.fsrvvalue|qtLiteral }}{% endfor %}
ADD {{ conn|qtIdent(variable.fsrvoption) }} {{ variable.fsrvvalue|qtLiteral(conn) }}{% endfor %}
);
{% endif %}
@ -79,7 +79,7 @@ ADD {{ conn|qtIdent(variable.fsrvoption) }} {{ variable.fsrvvalue|qtLiteral }}{%
{% if is_valid_changed_options %}
ALTER SERVER {{ conn|qtIdent(data.name) }}
OPTIONS ({% for variable in data.fsrvoptions.changed %}{% if loop.index != 1 %}, {% endif %}
SET {{ conn|qtIdent(variable.fsrvoption) }} {{ variable.fsrvvalue|qtLiteral }}{% endfor %}
SET {{ conn|qtIdent(variable.fsrvoption) }} {{ variable.fsrvvalue|qtLiteral(conn) }}{% endfor %}
);
{% endif %}

View File

@ -43,7 +43,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -58,7 +58,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while creating a foreign server')"
},
"expected_data": {
@ -84,7 +84,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -156,7 +156,7 @@
"internal_server_error": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching foreign server nodes')"
},
"expected_data": {
@ -182,7 +182,7 @@
"internal_server_error": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching foreign server nodes')"
},
"expected_data": {
@ -216,7 +216,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "[(False, 'Mocked Internal Server Error while fetching a foreign server')]"
},
"expected_data": {
@ -242,7 +242,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a foreign server')"
},
"expected_data": {
@ -279,7 +279,7 @@
"foreign_server_list": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a foreign server')"
},
"expected_data": {
@ -305,7 +305,7 @@
"internal_server_error": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a foreign server')"
},
"expected_data": {

View File

@ -13,7 +13,8 @@ import sys
import uuid
import json
from regression.python_test_utils.test_utils import get_db_connection
from regression.python_test_utils.test_utils import get_db_connection,\
set_isolation_level
file_name = os.path.basename(__file__)
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
@ -74,14 +75,14 @@ def create_fsrv(server, db_name, fsrv_name, fdw_name):
server['port'],
server['sslmode'])
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
set_isolation_level(connection, 0)
pg_cursor = connection.cursor()
pg_cursor.execute("CREATE SERVER {0} FOREIGN DATA WRAPPER {1} OPTIONS "
"(host '{2}', dbname '{3}', port '{4}')".format
(fsrv_name, fdw_name, server['host'], db_name,
server['port']))
connection.set_isolation_level(old_isolation_level)
set_isolation_level(connection, old_isolation_level)
connection.commit()
# Get 'oid' from newly created foreign server

View File

@ -3,4 +3,4 @@
CREATE USER MAPPING FOR {% if data.name == "CURRENT_USER" or data.name == "PUBLIC" %}{{ data.name }}{% else %}{{ conn|qtIdent(data.name) }}{% endif %} SERVER {{ conn|qtIdent(fdwdata.name) }}{%endif%}{% if data.umoptions %}{% if is_valid_options %}
OPTIONS ({% for variable in data.umoptions %}{% if loop.index != 1 %}, {% endif %}
{{ conn|qtIdent(variable.umoption) }} {{ variable.umvalue|qtLiteral }}{% endfor %}){% endif %}{% endif %};
{{ conn|qtIdent(variable.umoption) }} {{ variable.umvalue|qtLiteral(conn) }}{% endfor %}){% endif %}{% endif %};

View File

@ -10,7 +10,7 @@ DROP {{ conn|qtIdent(variable.umoption) }}{% endfor %}
{% if is_valid_added_options %}
ALTER USER MAPPING FOR {{ conn|qtIdent(o_data.name) }} SERVER {{ conn|qtIdent(fdwdata.name) }}
OPTIONS ({% for variable in data.umoptions.added %}{% if loop.index != 1 %}, {% endif %}
ADD {{ conn|qtIdent(variable.umoption) }} {{ variable.umvalue|qtLiteral }}{% endfor %}
ADD {{ conn|qtIdent(variable.umoption) }} {{ variable.umvalue|qtLiteral(conn) }}{% endfor %}
);
{% endif %}
@ -19,8 +19,8 @@ ADD {{ conn|qtIdent(variable.umoption) }} {{ variable.umvalue|qtLiteral }}{% end
{% if is_valid_changed_options %}
ALTER USER MAPPING FOR {{ conn|qtIdent(o_data.name) }} SERVER {{ conn|qtIdent(fdwdata.name) }}
OPTIONS ({% for variable in data.umoptions.changed %}{% if loop.index != 1 %}, {% endif %}
SET {{ conn|qtIdent(variable.umoption) }} {{ variable.umvalue|qtLiteral }}{% endfor %}
SET {{ conn|qtIdent(variable.umoption) }} {{ variable.umvalue|qtLiteral(conn) }}{% endfor %}
);
{% endif %}
{% endif %}
{% endif %}

View File

@ -19,7 +19,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while creating a user mapping')"
},
"expected_data": {
@ -59,7 +59,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -85,7 +85,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -124,7 +124,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while deleting a user mapping')"
},
"expected_data": {
@ -184,7 +184,7 @@
"internal_server_error": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching user mapping nodes')"
},
"expected_data": {
@ -210,7 +210,7 @@
"internal_server_error": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching user mapping nodes')"
},
"expected_data": {
@ -260,7 +260,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "[(False, 'Mocked Internal Server Error while fetching a user mapping')]"
},
"expected_data": {
@ -286,7 +286,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a user mapping')"
},
"expected_data": {
@ -323,7 +323,7 @@
"um_list": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a user mapping')"
},
"expected_data": {
@ -349,7 +349,7 @@
"internal_server_error": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a user mapping')"
},
"expected_data": {

View File

@ -13,7 +13,8 @@ import sys
import traceback
import json
from regression.python_test_utils.test_utils import get_db_connection
from regression.python_test_utils.test_utils import get_db_connection,\
set_isolation_level
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
with open(CURRENT_PATH + "/user_mapping_test_data.json") as data_file:
@ -58,7 +59,7 @@ def create_user_mapping(server, db_name, fsrv_name):
server['port'],
server['sslmode'])
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
set_isolation_level(connection, 0)
pg_cursor = connection.cursor()
query = "CREATE USER MAPPING FOR %s SERVER %s OPTIONS" \
" (user '%s', password '%s')" % (server['username'],
@ -67,7 +68,7 @@ def create_user_mapping(server, db_name, fsrv_name):
server['db_password']
)
pg_cursor.execute(query)
connection.set_isolation_level(old_isolation_level)
set_isolation_level(connection, old_isolation_level)
connection.commit()
# Get 'oid' from newly created user mapping
pg_cursor.execute(

View File

@ -11,7 +11,7 @@ FROM
LEFT OUTER JOIN pg_catalog.pg_shdescription descr ON (
fdw.oid=descr.objoid AND descr.classoid='pg_foreign_data_wrapper'::regclass)
{% if fid %}
WHERE fdw.oid = {{ fid|qtLiteral }}::OID
WHERE fdw.oid = {{ fid|qtLiteral(conn) }}::OID
{% endif %}
) acl,
pg_catalog.aclexplode(fdwacl) d

View File

@ -9,7 +9,7 @@ CREATE FOREIGN DATA WRAPPER {{ conn|qtIdent(data.name) }}{% if data.fdwvalue %}
{% if is_valid_options %}
OPTIONS ({% for variable in data.fdwoptions %}{% if loop.index != 1 %}, {% endif %}
{{ conn|qtIdent(variable.fdwoption) }} {{ variable.fdwvalue|qtLiteral }}{% endfor %}){% endif %}{% endif %};
{{ conn|qtIdent(variable.fdwoption) }} {{ variable.fdwvalue|qtLiteral(conn) }}{% endfor %}){% endif %}{% endif %};
{# ============= Set the owner for foreign data wrapper ============= #}
{% if data.fdwowner %}
@ -20,7 +20,7 @@ ALTER FOREIGN DATA WRAPPER {{ conn|qtIdent(data.name) }}
{# ============= Comment on of foreign data wrapper object ============= #}
{% if data.description %}
COMMENT ON FOREIGN DATA WRAPPER {{ conn|qtIdent(data.name) }}
IS {{ data.description|qtLiteral }};
IS {{ data.description|qtLiteral(conn) }};
{% endif %}
{# ============= Create ACL for foreign data wrapper ============= #}

View File

@ -21,7 +21,7 @@ FROM pg_catalog.pg_foreign_data_wrapper fdw
WHERE fdw.oid={{fid}}::oid
{% endif %}
{% if fname %}
WHERE fdw.fdwname={{ fname|qtLiteral }}::text
WHERE fdw.fdwname={{ fname|qtLiteral(conn) }}::text
{% endif %}
{% if schema_diff %}
WHERE CASE WHEN (SELECT COUNT(*) FROM pg_catalog.pg_depend

View File

@ -37,7 +37,7 @@ ALTER FOREIGN DATA WRAPPER {{ conn|qtIdent(data.name) }}
{# ============= Update foreign data wrapper comments ============= #}
{% if data.description is defined and data.description != o_data.description %}
COMMENT ON FOREIGN DATA WRAPPER {{ conn|qtIdent(data.name) }}
IS {{ data.description|qtLiteral }};
IS {{ data.description|qtLiteral(conn) }};
{% endif %}
{# ============= Update foreign data wrapper options and values ============= #}
@ -52,7 +52,7 @@ DROP {{ conn|qtIdent(variable.fdwoption) }}{% endfor %}
{% if is_valid_added_options %}
ALTER FOREIGN DATA WRAPPER {{ conn|qtIdent(data.name) }}
OPTIONS ({% for variable in data.fdwoptions.added %}{% if loop.index != 1 %}, {% endif %}
ADD {{ conn|qtIdent(variable.fdwoption) }} {{ variable.fdwvalue|qtLiteral }}{% endfor %}
ADD {{ conn|qtIdent(variable.fdwoption) }} {{ variable.fdwvalue|qtLiteral(conn) }}{% endfor %}
);
{% endif %}
@ -61,7 +61,7 @@ ADD {{ conn|qtIdent(variable.fdwoption) }} {{ variable.fdwvalue|qtLiteral }}{% e
{% if is_valid_changed_options %}
ALTER FOREIGN DATA WRAPPER {{ conn|qtIdent(data.name) }}
OPTIONS ({% for variable in data.fdwoptions.changed %}{% if loop.index != 1 %}, {% endif %}
SET {{ conn|qtIdent(variable.fdwoption) }} {{ variable.fdwvalue|qtLiteral }}{% endfor %}
SET {{ conn|qtIdent(variable.fdwoption) }} {{ variable.fdwvalue|qtLiteral(conn) }}{% endfor %}
);
{% endif %}

View File

@ -19,7 +19,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while creating a foreign data wrapper')"
},
"expected_data": {
@ -34,7 +34,7 @@
"test_data": {},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(True, True), (False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -94,7 +94,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error while deleting a fdw')"
},
"expected_data": {
@ -108,7 +108,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(True, True), (False, 'Mocked Internal Server Error while deleting a fdw')"
},
"expected_data": {
@ -158,7 +158,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching fdw nodes')"
},
"expected_data": {
@ -184,7 +184,7 @@
"node": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching fdw nodes')"
},
"expected_data": {
@ -222,7 +222,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching handlers')"
},
"expected_data": {
@ -248,7 +248,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching handlers')"
},
"expected_data": {
@ -282,7 +282,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error while updating fdw')"
},
"expected_data": {
@ -308,7 +308,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a fdw')"
},
"expected_data": {
@ -346,7 +346,7 @@
"fdw_list": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a fdw')"
},
"expected_data": {
@ -372,7 +372,7 @@
"internal_server_error": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {

View File

@ -14,7 +14,8 @@ import uuid
import json
import os
from regression.python_test_utils.test_utils import get_db_connection
from regression.python_test_utils.test_utils import get_db_connection,\
set_isolation_level
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
with open(CURRENT_PATH + "/fdw_test_data.json") as data_file:
@ -66,11 +67,11 @@ def create_fdw(server, db_name, fdw_name):
server['port'],
server['sslmode'])
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
set_isolation_level(connection, 0)
pg_cursor = connection.cursor()
pg_cursor.execute('''CREATE FOREIGN DATA WRAPPER "%s"
OPTIONS (op1 '5')''' % fdw_name)
connection.set_isolation_level(old_isolation_level)
set_isolation_level(connection, old_isolation_level)
connection.commit()
# Get 'oid' from newly created foreign data wrapper
pg_cursor.execute(

View File

@ -260,7 +260,8 @@ class LanguageView(PGChildNodeView, SchemaDiffObjectCompare):
did: Database ID
"""
sql = render_template("/".join([self.template_path,
self._PROPERTIES_SQL]))
self._PROPERTIES_SQL]),
conn=self.conn)
status, res = self.conn.execute_dict(sql)
if not status:
@ -283,7 +284,8 @@ class LanguageView(PGChildNodeView, SchemaDiffObjectCompare):
"""
res = []
sql = render_template("/".join([self.template_path,
self._PROPERTIES_SQL]))
self._PROPERTIES_SQL]),
conn=self.conn)
status, result = self.conn.execute_2darray(sql)
if not status:
return internal_server_error(errormsg=result)
@ -315,7 +317,8 @@ class LanguageView(PGChildNodeView, SchemaDiffObjectCompare):
"""
sql = render_template("/".join([self.template_path,
self._PROPERTIES_SQL]),
lid=lid)
lid=lid,
conn=self.conn)
status, result = self.conn.execute_2darray(sql)
if not status:
return internal_server_error(errormsg=result)
@ -362,7 +365,7 @@ class LanguageView(PGChildNodeView, SchemaDiffObjectCompare):
"""
sql = render_template(
"/".join([self.template_path, self._PROPERTIES_SQL]),
lid=lid
lid=lid, conn=self.conn
)
status, res = self.conn.execute_dict(sql)
@ -378,7 +381,7 @@ class LanguageView(PGChildNodeView, SchemaDiffObjectCompare):
sql = render_template(
"/".join([self.template_path, self._ACL_SQL]),
lid=lid
lid=lid, conn=self.conn
)
status, result = self.conn.execute_dict(sql)
if not status:
@ -631,7 +634,8 @@ class LanguageView(PGChildNodeView, SchemaDiffObjectCompare):
if lid is not None:
sql = render_template(
"/".join([self.template_path, self._PROPERTIES_SQL]), lid=lid
"/".join([self.template_path, self._PROPERTIES_SQL]), lid=lid,
conn=self.conn
)
status, res = self.conn.execute_dict(sql)
if not status:
@ -722,7 +726,7 @@ class LanguageView(PGChildNodeView, SchemaDiffObjectCompare):
"""
sql = render_template(
"/".join([self.template_path, self._PROPERTIES_SQL]),
lid=lid
lid=lid, conn=self.conn
)
status, res = self.conn.execute_dict(sql)
if not status:
@ -736,7 +740,7 @@ class LanguageView(PGChildNodeView, SchemaDiffObjectCompare):
sql = render_template(
"/".join([self.template_path, self._ACL_SQL]),
lid=lid
lid=lid, conn=self.conn
)
status, result = self.conn.execute_dict(sql)
if not status:
@ -826,7 +830,8 @@ class LanguageView(PGChildNodeView, SchemaDiffObjectCompare):
res = dict()
sql = render_template("/".join([self.template_path,
self._PROPERTIES_SQL]),
schema_diff=True)
schema_diff=True,
conn=self.conn)
status, rset = self.conn.execute_2darray(sql)
if not status:
return internal_server_error(errormsg=rset)

View File

@ -10,7 +10,7 @@ FROM
FROM
(SELECT lanacl FROM pg_catalog.pg_language lan
LEFT OUTER JOIN pg_catalog.pg_shdescription descr ON (lan.oid=descr.objoid AND descr.classoid='pg_language'::regclass)
WHERE lan.oid = {{ lid|qtLiteral }}::OID
WHERE lan.oid = {{ lid|qtLiteral(conn) }}::OID
) acl,
pg_catalog.aclexplode(lanacl) d
) d

View File

@ -23,7 +23,7 @@ ALTER LANGUAGE {{ conn|qtIdent(data.name) }}
{# ============= Comment on of language object ============= #}
{% if data.description %}
COMMENT ON LANGUAGE {{ conn|qtIdent(data.name) }}
IS {{ data.description|qtLiteral }};
IS {{ data.description|qtLiteral(conn) }};
{% endif %}
{# ============= Create ACL for language ============= #}
{% if data.lanacl %}

View File

@ -18,7 +18,7 @@ WHERE lanispl IS TRUE
lan.oid={{lid}}::oid
{% endif %}
{% if lanname %} AND
lanname={{ lanname|qtLiteral }}::text
lanname={{ lanname|qtLiteral(conn) }}::text
{% endif %}
{% if schema_diff %}
AND CASE WHEN (SELECT COUNT(*) FROM pg_catalog.pg_depend

View File

@ -24,7 +24,7 @@ ALTER LANGUAGE {{ conn|qtIdent(data.name) }}
{% if data.description %}
COMMENT ON LANGUAGE {{ conn|qtIdent(data.name) }}
IS {{ data.description|qtLiteral }};
IS {{ data.description|qtLiteral(conn) }};
{% endif %}
{# ============= PRIVILEGES on LANGUAGE ============= #}
{% if data.lanacl and data.lanacl|length > 0 %}

View File

@ -65,7 +65,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
}
},
@ -88,7 +88,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(True, True), (False, 'Mocked Internal Server Error')"
}
}
@ -127,7 +127,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
}
},
@ -142,7 +142,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "PLACE_HOLDER"
}
},
@ -156,7 +156,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
}
},
@ -171,7 +171,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
}
}
@ -197,7 +197,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error')"
}
},
@ -223,7 +223,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error')"
}
}
@ -270,7 +270,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "PLACE_HOLDER"
},
"expected_data": {
@ -312,7 +312,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
}
}
@ -346,7 +346,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
}
},
@ -362,7 +362,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
}
}
@ -409,7 +409,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error')"
}
},
@ -424,7 +424,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(True, True), (False, 'Mocked Internal Server Error')"
}
}

View File

@ -22,6 +22,7 @@ class LanguagesDeleteTestCase(BaseTestGenerator):
'language_delete', language_utils.test_cases)
def setUp(self):
super().setUp()
self.server_data = parent_node_dict["database"][-1]
self.server_id = self.server_data["server_id"]
self.db_id = self.server_data['db_id']
@ -53,10 +54,6 @@ class LanguagesDeleteTestCase(BaseTestGenerator):
expected_status_code = self.expected_data['status_code']
elif self.error_in_deleting_language:
# with patch('pgadmin.utils.driver.psycopg2.connection.Connection'
# '.execute_scalar',
# side_effect=[(True, True), (
# False, self.expected_data["message"])]):
with patch(self.mock_data["function_name"],
side_effect=[eval(self.mock_data["return_value"])]):
response = self.delete_language()

View File

@ -23,6 +23,7 @@ class LanguagesGetTestCase(BaseTestGenerator):
language_utils.test_cases)
def setUp(self):
super().setUp()
self.server_data = parent_node_dict["database"][-1]
self.server_id = self.server_data["server_id"]
self.db_id = self.server_data['db_id']
@ -110,6 +111,7 @@ class LanguagesGetNodesTestCase(BaseTestGenerator):
language_utils.test_cases)
def setUp(self):
super().setUp()
self.server_data = parent_node_dict["database"][-1]
self.server_id = self.server_data["server_id"]
self.db_id = self.server_data['db_id']

View File

@ -23,6 +23,7 @@ class LanguagesGetFunctionAndTemplateTestCase(BaseTestGenerator):
'get_language_function_and_template', language_utils.test_cases)
def setUp(self):
super().setUp()
self.server_data = parent_node_dict["database"][-1]
self.server_id = self.server_data["server_id"]
self.db_id = self.server_data['db_id']

View File

@ -24,6 +24,7 @@ class LanguagesPutTestCase(BaseTestGenerator):
language_utils.test_cases)
def setUp(self):
super().setUp()
self.server_data = parent_node_dict["database"][-1]
self.server_id = self.server_data["server_id"]
self.db_id = self.server_data['db_id']

View File

@ -23,6 +23,7 @@ class LanguagesGetSql(BaseTestGenerator):
language_utils.test_cases)
def setUp(self):
super().setUp()
self.server_data = parent_node_dict["database"][-1]
self.server_id = self.server_data["server_id"]
self.db_id = self.server_data['db_id']

View File

@ -81,7 +81,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error ')"
},
"expected_data": {
@ -104,7 +104,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(True, True)(False, 'Mocked Internal Server Error ')"
},
"expected_data": {
@ -129,7 +129,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -154,7 +154,7 @@
"database_nodes": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -197,7 +197,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -234,7 +234,7 @@
"database_nodes": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -259,7 +259,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -323,7 +323,7 @@
"id": "PLACE_HOLDER"
},
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -375,7 +375,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -388,7 +388,7 @@
"is_positive_test": true,
"mocking_required": false,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {

View File

@ -27,6 +27,7 @@ class PublicationsAddTestCase(BaseTestGenerator):
publication_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]

View File

@ -25,6 +25,7 @@ class PublicationDeleteTestCase(BaseTestGenerator):
publication_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]

View File

@ -28,6 +28,7 @@ class PublicationGetTestCase(BaseTestGenerator):
publication_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]

View File

@ -29,6 +29,7 @@ class PublicationUpdateTestCase(BaseTestGenerator):
publication_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]

View File

@ -28,6 +28,7 @@ class PublicationGetTestCase(BaseTestGenerator):
publication_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]

View File

@ -59,12 +59,12 @@ def create_publication(server, db_name, publication_name):
server['port'],
server['sslmode'])
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
utils.set_isolation_level(connection, 0)
pg_cursor = connection.cursor()
query = "CREATE publication %s FOR ALL TABLES" % \
(publication_name)
pg_cursor.execute(query)
connection.set_isolation_level(old_isolation_level)
utils.set_isolation_level(connection, old_isolation_level)
connection.commit()
# Get role oid of newly added publication
pg_cursor.execute("select oid from pg_catalog.pg_publication pub "
@ -139,11 +139,11 @@ def delete_publication(server, db_name, publication_name):
publication_count = pg_cursor.fetchone()
if publication_count:
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
utils.set_isolation_level(connection, 0)
pg_cursor = connection.cursor()
query = "DROP publication %s" % publication_name
pg_cursor.execute(query)
connection.set_isolation_level(old_isolation_level)
utils.set_isolation_level(connection, old_isolation_level)
connection.commit()
connection.close()
except Exception:

View File

@ -412,7 +412,8 @@ class SchemaView(PGChildNodeView):
SQL = render_template(
"/".join([self.template_path, 'sql/acl.sql']),
_=gettext,
scid=scid
scid=scid,
conn=self.conn
)
status, acl = self.conn.execute_dict(SQL)
if not status:
@ -518,7 +519,8 @@ class SchemaView(PGChildNodeView):
show_sysobj=show_system_objects,
_=gettext,
scid=scid,
schema_restrictions=param
schema_restrictions=param,
conn=self.conn
)
status, rset = self.conn.execute_2darray(SQL)
@ -578,7 +580,8 @@ class SchemaView(PGChildNodeView):
"/".join([self.template_path, self._SQL_PREFIX + self._NODES_SQL]),
show_sysobj=self.blueprint.show_system_objects,
_=gettext,
scid=scid
scid=scid,
conn=self.conn
)
status, rset = self.conn.execute_2darray(SQL)
@ -663,6 +666,17 @@ It may have been removed by another user.
request.data
)
for k, v in data.items():
try:
# comments should be taken as is because if user enters a
# json comment it is parsed by loads which should not happen
if k in ('comment',):
data[k] = v
else:
data[k] = json.loads(v, encoding='utf-8')
except (ValueError, TypeError, KeyError):
data[k] = v
required_args = {
'name': 'Name'
}
@ -695,7 +709,7 @@ It may have been removed by another user.
# below sql will gives the same
SQL = render_template(
"/".join([self.template_path, 'sql/oid.sql']),
schema=data['name'], _=gettext
schema=data['name'], _=gettext, conn=self.conn
)
status, scid = self.conn.execute_scalar(SQL)
@ -991,7 +1005,7 @@ It may have been removed by another user.
SQL = render_template(
"/".join([self.template_path, 'sql/is_catalog.sql']),
scid=kwargs['scid'], _=gettext
scid=kwargs['scid'], _=gettext, conn=self.conn
)
status, res = self.conn.execute_dict(SQL)

View File

@ -486,7 +486,8 @@ class CollationView(PGChildNodeView, SchemaDiffObjectCompare):
# We need oid to add object in tree at browser
SQL = render_template(
"/".join([self.template_path, self._OID_SQL]), data=data
"/".join([self.template_path, self._OID_SQL]), data=data,
conn=self.conn
)
status, coid = self.conn.execute_scalar(SQL)
if not status:
@ -494,7 +495,8 @@ class CollationView(PGChildNodeView, SchemaDiffObjectCompare):
# Get updated schema oid
SQL = render_template(
"/".join([self.template_path, self._OID_SQL]), coid=coid
"/".join([self.template_path, self._OID_SQL]), coid=coid,
conn=self.conn
)
status, new_scid = self.conn.execute_scalar(SQL)

View File

@ -2,11 +2,11 @@
CREATE COLLATION{% if add_not_exists_clause %} IF NOT EXISTS{% endif %} {{ conn|qtIdent(data.schema, data.name) }}
{# if user has provided lc_collate & lc_type #}
{% if data.lc_collate and data.lc_type %}
(LC_COLLATE = {{ data.lc_collate|qtLiteral }}, LC_CTYPE = {{ data.lc_type|qtLiteral }});
(LC_COLLATE = {{ data.lc_collate|qtLiteral(conn) }}, LC_CTYPE = {{ data.lc_type|qtLiteral(conn) }});
{% endif %}
{# if user has provided locale only #}
{% if data.locale %}
(LOCALE = {{ data.locale|qtLiteral }});
(LOCALE = {{ data.locale|qtLiteral(conn) }});
{% endif %}
{# if user has choosed to copy from existing collation #}
{% if data.copy_collation %}
@ -20,6 +20,6 @@ ALTER COLLATION {{ conn|qtIdent(data.schema, data.name) }}
{% if data.description %}
COMMENT ON COLLATION {{ conn|qtIdent(data.schema, data.name) }}
IS {{ data.description|qtLiteral }};
IS {{ data.description|qtLiteral(conn) }};
{% endif %}
{% endif %}

View File

@ -3,8 +3,8 @@
SELECT c.oid
FROM pg_catalog.pg_collation c, pg_catalog.pg_namespace n
WHERE c.collnamespace=n.oid AND
n.nspname = {{ data.schema|qtLiteral }} AND
c.collname = {{ data.name|qtLiteral }}
n.nspname = {{ data.schema|qtLiteral(conn) }} AND
c.collname = {{ data.name|qtLiteral(conn) }}
{% elif coid %}
SELECT
c.collnamespace as scid

View File

@ -8,10 +8,10 @@ DROP COLLATION {{ conn|qtIdent(o_data.schema, o_data.name) }};
CREATE COLLATION {{ conn|qtIdent(o_data.schema, o_data.name) }}
{% if data.lc_collate and data.lc_type %}
(LC_COLLATE = {{ data.lc_collate|qtLiteral }}, LC_CTYPE = {{ data.lc_type|qtLiteral }});
(LC_COLLATE = {{ data.lc_collate|qtLiteral(conn) }}, LC_CTYPE = {{ data.lc_type|qtLiteral(conn) }});
{% endif %}
{% if data.locale %}
(LOCALE = {{ data.locale|qtLiteral }});
(LOCALE = {{ data.locale|qtLiteral(conn) }});
{% endif %}
{% if data.copy_collation %}
FROM {{ data.copy_collation }};
@ -28,7 +28,7 @@ ALTER COLLATION {{ conn|qtIdent(o_data.schema, o_data.name) }}
{# Change object's comment #}
{% if data.description is defined and data.description != o_data.description %}
COMMENT ON COLLATION {{ conn|qtIdent(o_data.schema, o_data.name) }}
IS {{ data.description|qtLiteral }};
IS {{ data.description|qtLiteral(conn) }};
{% endif %}
{# Change object name #}

View File

@ -65,7 +65,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(True, True), (False, 'Mocked Internal Server Error while getting oid of created collation')"
},
"expected_data": {
@ -85,7 +85,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -111,7 +111,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a collation')"
},
"expected_data": {
@ -149,7 +149,7 @@
"collation_list": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a collation')"
},
"expected_data": {
@ -175,7 +175,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while deleting a collation')"
},
"expected_data": {
@ -201,7 +201,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error while deleting a collation')"
},
"expected_data": {
@ -235,7 +235,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error while fetching a collation')"
},
"expected_data": {
@ -253,7 +253,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching a collation')"
},
"expected_data": {
@ -301,7 +301,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching collation nodes')"
},
"expected_data": {
@ -327,7 +327,7 @@
"node": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching collation nodes')"
},
"expected_data": {
@ -352,7 +352,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching collations')"
},
"expected_data": {
@ -390,7 +390,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a collation sql')"
},
"expected_data": {

View File

@ -597,7 +597,8 @@ AND relkind != 'c'))"""
SQL = render_template("/".join([self.template_path,
self._OID_SQL]),
basensp=data['basensp'],
name=data['name'])
name=data['name'],
conn=self.conn)
status, doid = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=doid)
@ -605,7 +606,8 @@ AND relkind != 'c'))"""
# Get updated schema oid
SQL = render_template("/".join([self.template_path,
self._OID_SQL]),
doid=doid)
doid=doid,
conn=self.conn)
status, scid = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=scid)
@ -705,7 +707,8 @@ AND relkind != 'c'))"""
# Get Schema Id
SQL = render_template("/".join([self.template_path,
self._OID_SQL]),
doid=doid)
doid=doid,
conn=self.conn)
status, scid = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=scid)
@ -835,7 +838,7 @@ AND relkind != 'c'))"""
SQL = render_template(
"/".join([self.template_path, self._UPDATE_SQL]),
data=data, o_data=old_data)
data=data, o_data=old_data, conn=self.conn)
return SQL, data
def get_sql(self, gid, sid, data, scid, doid=None, is_schema_diff=False):

View File

@ -444,7 +444,8 @@ class DomainConstraintView(PGChildNodeView):
# Get the recently added constraints oid
SQL = render_template("/".join([self.template_path,
self._OID_SQL]),
doid=doid, name=data['name'])
doid=doid, name=data['name'],
conn=self.conn)
status, coid = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=coid)

View File

@ -4,4 +4,4 @@ FROM
pg_catalog.pg_constraint
WHERE
contypid = {{doid}}::oid
AND conname={{ name|qtLiteral }};
AND conname={{ name|qtLiteral(conn) }};

View File

@ -10,4 +10,4 @@ ALTER DOMAIN {{ conn|qtIdent(o_data.nspname, o_data.relname) }}
COMMENT ON CONSTRAINT {{ conn|qtIdent(name) }} ON DOMAIN {{ conn|qtIdent(o_data.nspname, o_data.relname) }}
IS {{ data.description|qtLiteral }};{% endif %}
IS {{ data.description|qtLiteral(conn) }};{% endif %}

View File

@ -46,7 +46,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -86,7 +86,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(True, 'Mocking the scalar output'), (False, 'Mocked Internal Server Error while creating domain')"
},
"expected_data": {
@ -131,7 +131,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while deleting a domain')"
},
"expected_data": {
@ -145,7 +145,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error while deleting a domain')"
},
"expected_data": {
@ -230,7 +230,7 @@
"invalid": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching a domain constraints nodes')"
},
"expected_data": {
@ -270,7 +270,7 @@
"node": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching a domain nodes')"
},
"expected_data": {
@ -347,7 +347,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "[(False, 'Mocked Internal Server Error while fetching a domain')]"
},
"expected_data": {
@ -409,7 +409,7 @@
"internal_server_error": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a domain')"
},
"expected_data": {
@ -499,7 +499,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a domain')"
},
"expected_data": {
@ -536,7 +536,7 @@
"domain_constraint_list": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a domain constraint')"
},
"expected_data": {

View File

@ -28,6 +28,7 @@ class DomainConstraintAddTestCase(BaseTestGenerator):
domain_cons_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.schema_id = schema_info["schema_id"]

View File

@ -28,6 +28,7 @@ class DomainConstraintDeleteTestCase(BaseTestGenerator):
domain_cons_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.schema_id = schema_info["schema_id"]

View File

@ -27,6 +27,7 @@ class DomainConstraintGetTestCase(BaseTestGenerator):
domain_cons_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.schema_id = schema_info["schema_id"]

View File

@ -28,6 +28,7 @@ class DomainConstraintMsqlTestCase(BaseTestGenerator):
domain_cons_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.schema_id = schema_info["schema_id"]

View File

@ -28,6 +28,7 @@ class DomainConstraintNodeAndNodesTestCase(BaseTestGenerator):
domain_cons_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.schema_id = schema_info["schema_id"]

View File

@ -28,6 +28,7 @@ class DomainConstraintPutTestCase(BaseTestGenerator):
domain_cons_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.schema_id = schema_info["schema_id"]

View File

@ -28,6 +28,7 @@ class DomainConstraintGetSqlTestCase(BaseTestGenerator):
domain_cons_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.schema_id = schema_info["schema_id"]

View File

@ -13,6 +13,6 @@ FROM
JOIN
pg_catalog.pg_namespace bn ON bn.oid=d.typnamespace
WHERE
bn.nspname = {{ basensp|qtLiteral }}
AND d.typname={{ name|qtLiteral }};
bn.nspname = {{ basensp|qtLiteral(conn) }}
AND d.typname={{ name|qtLiteral(conn) }};
{% endif %}

View File

@ -44,7 +44,7 @@ ALTER DOMAIN {{ conn|qtIdent(o_data.basensp, name) }}
{% if c.description is defined and c.description != '' %}
COMMENT ON CONSTRAINT {{ conn|qtIdent(c.conname) }} ON DOMAIN {{ conn|qtIdent(o_data.basensp, name) }}
IS {{ c.description|qtLiteral }};{% endif %}
IS {{ c.description|qtLiteral(conn) }};{% endif %}
{% endfor -%}
{% else %}
{% for c in data.constraints.changed %}
@ -92,7 +92,7 @@ COMMENT ON CONSTRAINT {{ conn|qtIdent(c.conname) }} ON DOMAIN {{ conn|qtIdent(o_
{% endif -%}{% if data.description is defined and data.description != o_data.description %}
COMMENT ON DOMAIN {{ conn|qtIdent(o_data.basensp, name) }}
IS {{ data.description|qtLiteral }};
IS {{ data.description|qtLiteral(conn) }};
{% endif -%}{% if data.basensp %}
ALTER DOMAIN {{ conn|qtIdent(o_data.basensp, name) }}

View File

@ -52,7 +52,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -86,7 +86,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(True, 'Mocking the scalar output'), (False, 'Mocked Internal Server Error while creating domain')"
},
"expected_data": {
@ -120,7 +120,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(True, True),(True, True), (False, 'Mocked Internal Server Error while creating domain')"
},
"expected_data": {
@ -148,7 +148,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while deleting a domain')"
},
"expected_data": {
@ -162,7 +162,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error while deleting a domain')"
},
"expected_data": {
@ -208,7 +208,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "(False, 'Mocked Internal Server Error while fetching a domain')"
},
"expected_data": {
@ -226,7 +226,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_scalar",
"return_value": "[(True, True),(False, 'Mocked Internal Server Error while fetching a domain')]"
},
"expected_data": {
@ -288,7 +288,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
@ -319,7 +319,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching a domain nodes')"
},
"expected_data": {
@ -345,7 +345,7 @@
"node": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_2darray",
"return_value": "(False, 'Mocked Internal Server Error while fetching a domain nodes')"
},
"expected_data": {
@ -383,7 +383,7 @@
"is_positive_test": false,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a domain')"
},
"expected_data": {
@ -409,7 +409,7 @@
"domain_list": true,
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error while fetching a domain')"
},
"expected_data": {
@ -518,7 +518,7 @@
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {

View File

@ -28,6 +28,7 @@ class DomainAddTestCase(BaseTestGenerator):
domain_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.schema_id = schema_info["schema_id"]

View File

@ -26,6 +26,7 @@ class DomainDeleteTestCase(BaseTestGenerator):
domain_utils.test_cases)
def setUp(self):
super().setUp()
self.database_info = parent_node_dict["database"][-1]
self.db_name = self.database_info["db_name"]
self.db_id = self.database_info["db_id"]

View File

@ -26,6 +26,7 @@ class DomainGetTestCase(BaseTestGenerator):
domain_utils.test_cases)
def setUp(self):
super().setUp()
self.database_info = parent_node_dict["database"][-1]
self.db_name = self.database_info["db_name"]
self.db_id = self.database_info["db_id"]

View File

@ -28,6 +28,7 @@ class DomainMsqlTestCase(BaseTestGenerator):
domain_utils.test_cases)
def setUp(self):
super().setUp()
self.database_info = parent_node_dict["database"][-1]
self.db_name = self.database_info["db_name"]
self.db_id = self.database_info["db_id"]

View File

@ -27,6 +27,7 @@ class DomainPutTestCase(BaseTestGenerator):
domain_utils.test_cases)
def setUp(self):
super().setUp()
self.database_info = parent_node_dict["database"][-1]
self.db_id = self.database_info["db_id"]
self.server_id = self.database_info["server_id"]

View File

@ -29,6 +29,7 @@ class DomainReverseEngineeredSQLTestCase(BaseTestGenerator):
domain_utils.test_cases)
def setUp(self):
super().setUp()
self.database_info = parent_node_dict["database"][-1]
self.db_name = self.database_info["db_name"]
self.schema_info = parent_node_dict["schema"][-1]

View File

@ -714,7 +714,8 @@ class ForeignTableView(PGChildNodeView, DataTypeReader,
SQL = render_template("/".join([self.template_path,
self._OID_SQL]),
basensp=basensp,
name=self.request['name'])
name=self.request['name'],
conn=self.conn)
status, res = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
@ -826,7 +827,8 @@ class ForeignTableView(PGChildNodeView, DataTypeReader,
SQL = render_template("/".join([self.template_path,
self._OID_SQL]),
foid=foid)
foid=foid,
conn=self.conn)
status, res = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
@ -884,7 +886,8 @@ class ForeignTableView(PGChildNodeView, DataTypeReader,
SQL = render_template("/".join([self.template_path,
self._CREATE_SQL]),
data=data, is_sql=True,
add_not_exists_clause=True
add_not_exists_clause=True,
conn=self.conn
)
if not json_resp:
@ -1071,11 +1074,11 @@ class ForeignTableView(PGChildNodeView, DataTypeReader,
sql = render_template(
"/".join([self.template_path,
'foreign_table_schema_diff.sql']),
data=data, o_data=old_data)
data=data, o_data=old_data, conn=self.conn)
else:
sql = render_template(
"/".join([self.template_path, self._UPDATE_SQL]),
data=data, o_data=old_data
data=data, o_data=old_data, conn=self.conn
)
return sql, data['name'] if 'name' in data else old_data['name']
else:
@ -1087,7 +1090,8 @@ class ForeignTableView(PGChildNodeView, DataTypeReader,
["a", "r", "w", "x"])
sql = render_template("/".join([self.template_path,
self._CREATE_SQL]), data=data)
self._CREATE_SQL]), data=data,
conn=self.conn)
return sql, data['name']
@check_precondition

View File

@ -9,7 +9,7 @@ CREATE FOREIGN TABLE{% if add_not_exists_clause %} IF NOT EXISTS{% endif %} {{ c
{% if is_columns.append('1') %}{% endif %}
{{conn|qtIdent(c.attname)}} {% if is_sql %}{{ c.fulltype }}{% else %}{{c.datatype }}{% if c.typlen %}({{c.typlen}}{% if c.precision %}, {{c.precision}}{% endif %}){% endif %}{% if c.isArrayType %}[]{% endif %}{% endif %}{% if c.coloptions %}
{% for o in c.coloptions %}{% if o.option is defined and o.value is defined %}
{% if loop.first %} OPTIONS ({% endif %}{% if not loop.first %}, {% endif %}{{o.option}} {{o.value|qtLiteral}}{% if loop.last %}){% endif %}{% endif %}
{% if loop.first %} OPTIONS ({% endif %}{% if not loop.first %}, {% endif %}{{o.option}} {{o.value|qtLiteral(conn)}}{% if loop.last %}){% endif %}{% endif %}
{% endfor %}{% endif %}
{% if c.attnotnull %} NOT NULL{% else %} NULL{% endif %}
{% if c.typdefault is defined and c.typdefault is not none %} DEFAULT {{c.typdefault}}{% endif %}
@ -28,7 +28,7 @@ CREATE FOREIGN TABLE{% if add_not_exists_clause %} IF NOT EXISTS{% endif %} {{ c
{% for o in data.ftoptions %}
{% if o.option is defined and o.value is defined %}
{% if loop.first %} OPTIONS ({% endif %}{% if not loop.first %}, {% endif %}{{o.option}} {{o.value|qtLiteral}}{% if loop.last %}){% endif %}{% endif %}
{% if loop.first %} OPTIONS ({% endif %}{% if not loop.first %}, {% endif %}{{o.option}} {{o.value|qtLiteral(conn)}}{% if loop.last %}){% endif %}{% endif %}
{% endfor %}{% endif %};
{% if data.owner %}

View File

@ -18,7 +18,7 @@ CREATE FOREIGN TABLE {{ conn|qtIdent(o_data.basensp, o_data.name) }}(
{% if is_columns.append('1') %}{% endif %}
{{conn|qtIdent(c.attname)}} {% if is_sql %}{{ c.fulltype }}{% else %}{{c.datatype }}{% if c.typlen %}({{c.typlen}}{% if c.precision %}, {{c.precision}}{% endif %}){% endif %}{% if c.isArrayType %}[]{% endif %}{% endif %}{% if c.coloptions %}
{% for o in c.coloptions %}{% if o.option is defined and o.value is defined %}
{% if loop.first %} OPTIONS ({% endif %}{% if not loop.first %}, {% endif %}{{o.option}} {{o.value|qtLiteral}}{% if loop.last %}){% endif %}{% endif %}
{% if loop.first %} OPTIONS ({% endif %}{% if not loop.first %}, {% endif %}{{o.option}} {{o.value|qtLiteral(conn)}}{% if loop.last %}){% endif %}{% endif %}
{% endfor %}{% endif %}
{% if c.attnotnull %} NOT NULL{% else %} NULL{% endif %}
{% if c.typdefault is defined and c.typdefault is not none %} DEFAULT {{c.typdefault}}{% endif %}
@ -37,7 +37,7 @@ CREATE FOREIGN TABLE {{ conn|qtIdent(o_data.basensp, o_data.name) }}(
{% for o in ftoptions %}
{% if o.option is defined and o.value is defined %}
{% if loop.first %} OPTIONS ({% endif %}{% if not loop.first %}, {% endif %}{{o.option}} {{o.value|qtLiteral}}{% if loop.last %}){% endif %}{% endif %}
{% if loop.first %} OPTIONS ({% endif %}{% if not loop.first %}, {% endif %}{{o.option}} {{o.value|qtLiteral(conn)}}{% if loop.last %}){% endif %}{% endif %}
{% endfor %}{% endif %};
{% if data.owner or o_data.owner%}

View File

@ -6,8 +6,8 @@ FROM
JOIN
pg_catalog.pg_namespace bn ON bn.oid=c.relnamespace
WHERE
bn.nspname = {{ basensp|qtLiteral }}
AND c.relname={{ name|qtLiteral }};
bn.nspname = {{ basensp|qtLiteral(conn) }}
AND c.relname={{ name|qtLiteral(conn) }};
{% elif foid %}
SELECT

Some files were not shown because too many files have changed in this diff Show More