mirror of
https://github.com/pgadmin-org/pgadmin4.git
synced 2024-11-22 08:46:39 -06:00
Fixed 'convention' related code smell reported by SonarQube.
This commit is contained in:
parent
fcf720a5d8
commit
45a03dd663
@ -717,8 +717,8 @@ def create_app(app_name=None):
|
||||
from flask_compress import Compress
|
||||
Compress(app)
|
||||
|
||||
from pgadmin.misc.themes import Themes
|
||||
Themes(app)
|
||||
from pgadmin.misc.themes import themes
|
||||
themes(app)
|
||||
|
||||
@app.context_processor
|
||||
def inject_blueprint():
|
||||
|
@ -17,7 +17,7 @@ from unittest.mock import MagicMock, Mock
|
||||
|
||||
class TestExternalTablesModule(BaseTestGenerator):
|
||||
scenarios = [
|
||||
('#BackendSupported When access the on a Postgresql Database, '
|
||||
('#backend_supported When access the on a Postgresql Database, '
|
||||
'it returns false',
|
||||
dict(
|
||||
test_type='backend-support',
|
||||
@ -27,8 +27,8 @@ class TestExternalTablesModule(BaseTestGenerator):
|
||||
),
|
||||
expected_result=False,
|
||||
)),
|
||||
('#BackendSupported When access the on a Postgres Plus Advance Server '
|
||||
'Database, it returns false',
|
||||
('#backend_supported When access the on a Postgres Plus Advance '
|
||||
'Server Database, it returns false',
|
||||
dict(
|
||||
test_type='backend-support',
|
||||
manager=dict(
|
||||
@ -37,7 +37,7 @@ class TestExternalTablesModule(BaseTestGenerator):
|
||||
),
|
||||
expected_result=False,
|
||||
)),
|
||||
('#BackendSupported When access the on a GreenPlum Database, '
|
||||
('#backend_supported When access the on a GreenPlum Database, '
|
||||
'it returns true',
|
||||
dict(
|
||||
test_type='backend-support',
|
||||
@ -82,7 +82,7 @@ class TestExternalTablesModule(BaseTestGenerator):
|
||||
module = ExternalTablesModule('something')
|
||||
self.assertEquals(
|
||||
self.expected_result,
|
||||
module.BackendSupported(manager)
|
||||
module.backend_supported(manager)
|
||||
)
|
||||
|
||||
def __test_get_nodes(self):
|
||||
|
@ -933,7 +933,7 @@ It may have been removed by another user.
|
||||
if isinstance(module, PGChildModule):
|
||||
if (
|
||||
self.manager is not None and
|
||||
module.BackendSupported(
|
||||
module.backend_supported(
|
||||
self.manager, **backend_support_keywords
|
||||
)
|
||||
):
|
||||
|
@ -73,14 +73,14 @@ class CompoundTriggerModule(CollectionNodeModule):
|
||||
self.min_gpdbver = 1000000000
|
||||
self.server_type = ['ppas']
|
||||
|
||||
def BackendSupported(self, manager, **kwargs):
|
||||
def backend_supported(self, manager, **kwargs):
|
||||
"""
|
||||
Load this module if vid is view, we will not load it under
|
||||
material view
|
||||
"""
|
||||
if manager.server_type == 'gpdb':
|
||||
return False
|
||||
if super(CompoundTriggerModule, self).BackendSupported(
|
||||
if super(CompoundTriggerModule, self).backend_supported(
|
||||
manager, **kwargs):
|
||||
conn = manager.connection(did=kwargs['did'])
|
||||
|
||||
|
@ -68,12 +68,12 @@ class IndexesModule(CollectionNodeModule):
|
||||
self.max_ver = None
|
||||
super(IndexesModule, self).__init__(*args, **kwargs)
|
||||
|
||||
def BackendSupported(self, manager, **kwargs):
|
||||
def backend_supported(self, manager, **kwargs):
|
||||
"""
|
||||
Load this module if vid is view, we will not load it under
|
||||
material view
|
||||
"""
|
||||
if super(IndexesModule, self).BackendSupported(manager, **kwargs):
|
||||
if super(IndexesModule, self).backend_supported(manager, **kwargs):
|
||||
conn = manager.connection(did=kwargs['did'])
|
||||
|
||||
# If PG version > 100000 and < 110000 then index is
|
||||
|
@ -30,7 +30,7 @@ from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
def backend_supported(module, manager, **kwargs):
|
||||
|
||||
if CollectionNodeModule.BackendSupported(module, manager, **kwargs):
|
||||
if CollectionNodeModule.backend_supported(module, manager, **kwargs):
|
||||
if 'tid' not in kwargs:
|
||||
return True
|
||||
|
||||
@ -110,7 +110,7 @@ class PartitionsModule(CollectionNodeModule):
|
||||
"""
|
||||
return True
|
||||
|
||||
def BackendSupported(self, manager, **kwargs):
|
||||
def backend_supported(self, manager, **kwargs):
|
||||
"""
|
||||
Load this module if it is a partition table
|
||||
"""
|
||||
@ -239,13 +239,13 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings,
|
||||
for module in self.blueprint.submodules:
|
||||
if isinstance(module, PGChildModule):
|
||||
if manager is not None and \
|
||||
module.BackendSupported(manager, **kwargs):
|
||||
module.backend_supported(manager, **kwargs):
|
||||
nodes.extend(module.get_nodes(**kwargs))
|
||||
else:
|
||||
nodes.extend(module.get_nodes(**kwargs))
|
||||
|
||||
if manager is not None and \
|
||||
self.blueprint.BackendSupported(manager, **kwargs):
|
||||
self.blueprint.backend_supported(manager, **kwargs):
|
||||
nodes.extend(self.blueprint.get_nodes(**kwargs))
|
||||
|
||||
return nodes
|
||||
|
@ -111,10 +111,10 @@ class TestBackendSupport(BaseTestGenerator):
|
||||
connection_mock.execute_scalar.return_value = \
|
||||
self.connection_execution_return_value
|
||||
module.manager.connection.return_value = connection_mock
|
||||
CollectionNodeModule_mock.BackendSupported.return_value = \
|
||||
CollectionNodeModule_mock.backend_supported.return_value = \
|
||||
self.collection_node_active
|
||||
|
||||
result = module.BackendSupported(
|
||||
result = module.backend_supported(
|
||||
module.manager, **self.input_arguments
|
||||
)
|
||||
|
||||
|
@ -50,12 +50,12 @@ class RuleModule(CollectionNodeModule):
|
||||
|
||||
super(RuleModule, self).__init__(*args, **kwargs)
|
||||
|
||||
def BackendSupported(self, manager, **kwargs):
|
||||
def backend_supported(self, manager, **kwargs):
|
||||
"""
|
||||
Load this module if tid is view, we will not load it under
|
||||
material view
|
||||
"""
|
||||
if super(RuleModule, self).BackendSupported(manager, **kwargs):
|
||||
if super(RuleModule, self).backend_supported(manager, **kwargs):
|
||||
conn = manager.connection(did=kwargs['did'])
|
||||
|
||||
if 'vid' not in kwargs:
|
||||
|
@ -70,14 +70,14 @@ class TriggerModule(CollectionNodeModule):
|
||||
self.min_gpdbver = 1000000000
|
||||
super(TriggerModule, self).__init__(*args, **kwargs)
|
||||
|
||||
def BackendSupported(self, manager, **kwargs):
|
||||
def backend_supported(self, manager, **kwargs):
|
||||
"""
|
||||
Load this module if vid is view, we will not load it under
|
||||
material view
|
||||
"""
|
||||
if manager.server_type == 'gpdb':
|
||||
return False
|
||||
if super(TriggerModule, self).BackendSupported(manager, **kwargs):
|
||||
if super(TriggerModule, self).backend_supported(manager, **kwargs):
|
||||
conn = manager.connection(did=kwargs['did'])
|
||||
|
||||
if 'vid' not in kwargs:
|
||||
|
@ -45,7 +45,7 @@ class SchemaChildModule(CollectionNodeModule):
|
||||
CATALOG_DB_SUPPORTED = True
|
||||
SUPPORTED_SCHEMAS = None
|
||||
|
||||
def BackendSupported(self, manager, **kwargs):
|
||||
def backend_supported(self, manager, **kwargs):
|
||||
return (
|
||||
(
|
||||
(
|
||||
@ -70,7 +70,7 @@ class SchemaChildModule(CollectionNodeModule):
|
||||
not kwargs['is_catalog'] and self.CATALOG_DB_SUPPORTED
|
||||
)
|
||||
) and
|
||||
CollectionNodeModule.BackendSupported(self, manager, **kwargs)
|
||||
CollectionNodeModule.backend_supported(self, manager, **kwargs)
|
||||
)
|
||||
|
||||
@property
|
||||
|
@ -47,7 +47,7 @@ class JobModule(CollectionNodeModule):
|
||||
"""
|
||||
return servers.ServerModule.NODE_TYPE
|
||||
|
||||
def BackendSupported(self, manager, **kwargs):
|
||||
def backend_supported(self, manager, **kwargs):
|
||||
if hasattr(self, 'show_node') and not self.show_node:
|
||||
return False
|
||||
|
||||
|
@ -37,7 +37,7 @@ class ResourceGroupModule(CollectionNodeModule):
|
||||
- Method is used to initialize the ResourceGroupModule and it's
|
||||
base module.
|
||||
|
||||
* BackendSupported(manager, **kwargs)
|
||||
* backend_supported(manager, **kwargs)
|
||||
- This function is used to check the database server type and version.
|
||||
Resource Group only supported in PPAS 9.4 and above.
|
||||
|
||||
|
@ -44,5 +44,5 @@ class BackendSupportedTestCase(BaseTestGenerator):
|
||||
manager.sversion = self.manager['sversion']
|
||||
manager.server_type = self.manager['server_type']
|
||||
self.assertEquals(
|
||||
self.expected_result, module.BackendSupported(manager)
|
||||
self.expected_result, module.backend_supported(manager)
|
||||
)
|
||||
|
@ -95,7 +95,7 @@ class PGChildModule(object):
|
||||
|
||||
Method:
|
||||
------
|
||||
* BackendSupported(manager)
|
||||
* backend_supported(manager)
|
||||
- Return True when it supports certain version.
|
||||
Uses the psycopg2 server connection manager as input for checking the
|
||||
compatibility of the current module.
|
||||
@ -112,7 +112,7 @@ class PGChildModule(object):
|
||||
|
||||
super(PGChildModule, self).__init__()
|
||||
|
||||
def BackendSupported(self, manager, **kwargs):
|
||||
def backend_supported(self, manager, **kwargs):
|
||||
if hasattr(self, 'show_node'):
|
||||
if not self.show_node:
|
||||
return False
|
||||
@ -385,7 +385,7 @@ class PGChildNodeView(NodeView):
|
||||
if isinstance(module, PGChildModule):
|
||||
if (
|
||||
manager is not None and
|
||||
module.BackendSupported(manager, **kwargs)
|
||||
module.backend_supported(manager, **kwargs)
|
||||
):
|
||||
nodes.extend(module.get_nodes(**kwargs))
|
||||
else:
|
||||
|
@ -43,7 +43,6 @@ _IS_WIN = (os.name == 'nt')
|
||||
_ZERO = timedelta(0)
|
||||
_sys_encoding = None
|
||||
_fs_encoding = None
|
||||
_u = None
|
||||
_out_dir = None
|
||||
_log_file = None
|
||||
|
||||
@ -321,13 +320,13 @@ def execute(argv):
|
||||
|
||||
_log('Waiting for the process to finish...')
|
||||
# Child process return code
|
||||
exitCode = process.wait()
|
||||
exit_code = process.wait()
|
||||
|
||||
if exitCode is None:
|
||||
exitCode = process.poll()
|
||||
if exit_code is None:
|
||||
exit_code = process.poll()
|
||||
|
||||
_log('Process exited with code: {0}'.format(exitCode))
|
||||
args.update({'exit_code': exitCode})
|
||||
_log('Process exited with code: {0}'.format(exit_code))
|
||||
args.update({'exit_code': exit_code})
|
||||
|
||||
# Add end_time
|
||||
args.update({'end_time': get_current_time()})
|
||||
@ -415,11 +414,7 @@ if __name__ == '__main__':
|
||||
# encoding or 'ascii'.
|
||||
_fs_encoding = 'utf-8'
|
||||
|
||||
def u(_s, _encoding=_sys_encoding):
|
||||
return _s
|
||||
_u = u
|
||||
|
||||
_out_dir = u(os.environ['OUTDIR'])
|
||||
_out_dir = os.environ['OUTDIR']
|
||||
_log_file = os.path.join(_out_dir, ('log_%s' % os.getpid()))
|
||||
|
||||
_log('Starting the process executor...')
|
||||
|
@ -70,12 +70,12 @@ def sizeof_fmt(num, suffix='B'):
|
||||
|
||||
|
||||
# return size of file
|
||||
def getSize(path):
|
||||
def getsize(path):
|
||||
st = os.stat(path)
|
||||
return st.st_size
|
||||
|
||||
|
||||
def getDriveSize(path):
|
||||
def getdrivesize(path):
|
||||
if _platform == "win32":
|
||||
free_bytes = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
||||
@ -581,7 +581,7 @@ class Filemanager(object):
|
||||
protected = 0
|
||||
path = file_name = u"{0}:".format(drive)
|
||||
try:
|
||||
drive_size = getDriveSize(path)
|
||||
drive_size = getdrivesize(path)
|
||||
drive_size_in_units = sizeof_fmt(drive_size)
|
||||
except Exception:
|
||||
drive_size = 0
|
||||
@ -662,7 +662,7 @@ class Filemanager(object):
|
||||
"Properties": {
|
||||
"Date Created": created,
|
||||
"Date Modified": modified,
|
||||
"Size": sizeof_fmt(getSize(system_path))
|
||||
"Size": sizeof_fmt(getsize(system_path))
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
@ -811,7 +811,7 @@ class Filemanager(object):
|
||||
|
||||
thefile['Properties']['Date Created'] = created
|
||||
thefile['Properties']['Date Modified'] = modified
|
||||
thefile['Properties']['Size'] = sizeof_fmt(getSize(orig_path))
|
||||
thefile['Properties']['Size'] = sizeof_fmt(getsize(orig_path))
|
||||
|
||||
return thefile
|
||||
|
||||
@ -962,9 +962,9 @@ class Filemanager(object):
|
||||
'utf-8').decode('utf-8')
|
||||
file_name = file_obj.filename.encode('utf-8').decode('utf-8')
|
||||
orig_path = u"{0}{1}".format(dir, path)
|
||||
newName = u"{0}{1}".format(orig_path, file_name)
|
||||
new_name = u"{0}{1}".format(orig_path, file_name)
|
||||
|
||||
with open(newName, 'wb') as f:
|
||||
with open(new_name, 'wb') as f:
|
||||
while True:
|
||||
# 4MB chunk (4 * 1024 * 1024 Bytes)
|
||||
data = file_obj.read(4194304)
|
||||
@ -987,7 +987,7 @@ class Filemanager(object):
|
||||
|
||||
result = {
|
||||
'Path': path,
|
||||
'Name': newName,
|
||||
'Name': new_name,
|
||||
'Error': err_msg,
|
||||
'Code': code
|
||||
}
|
||||
@ -1011,8 +1011,8 @@ class Filemanager(object):
|
||||
Filemanager.check_access_permission(
|
||||
dir, u"{}{}".format(path, name))
|
||||
|
||||
newName = u"{0}{1}".format(orig_path, name)
|
||||
if not os.path.exists(newName):
|
||||
new_name = u"{0}{1}".format(orig_path, name)
|
||||
if not os.path.exists(new_name):
|
||||
code = 0
|
||||
except Exception as e:
|
||||
code = 0
|
||||
@ -1031,23 +1031,23 @@ class Filemanager(object):
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_new_name(dir, path, newName, count=1):
|
||||
def get_new_name(dir, path, new_name, count=1):
|
||||
"""
|
||||
Utility to provide new name for folder if file
|
||||
with same name already exists
|
||||
"""
|
||||
last_char = newName[-1]
|
||||
tnewPath = u"{}/{}{}_{}".format(dir, path, newName, count)
|
||||
if last_char == 'r' and not path_exists(tnewPath):
|
||||
return tnewPath, newName
|
||||
last_char = new_name[-1]
|
||||
t_new_path = u"{}/{}{}_{}".format(dir, path, new_name, count)
|
||||
if last_char == 'r' and not path_exists(t_new_path):
|
||||
return t_new_path, new_name
|
||||
else:
|
||||
last_char = int(tnewPath[-1]) + 1
|
||||
newPath = u"{}/{}{}_{}".format(dir, path, newName, last_char)
|
||||
if path_exists(newPath):
|
||||
last_char = int(t_new_path[-1]) + 1
|
||||
new_path = u"{}/{}{}_{}".format(dir, path, new_name, last_char)
|
||||
if path_exists(new_path):
|
||||
count += 1
|
||||
return Filemanager.get_new_name(dir, path, newName, count)
|
||||
return Filemanager.get_new_name(dir, path, new_name, count)
|
||||
else:
|
||||
return newPath, newName
|
||||
return new_path, new_name
|
||||
|
||||
@staticmethod
|
||||
def check_file_for_bom_and_binary(filename, enc="utf-8"):
|
||||
@ -1145,30 +1145,30 @@ class Filemanager(object):
|
||||
return res
|
||||
|
||||
if dir != "":
|
||||
newPath = u"{}/{}{}/".format(dir, path, name)
|
||||
new_path = u"{}/{}{}/".format(dir, path, name)
|
||||
else:
|
||||
newPath = u"{}{}/".format(path, name)
|
||||
new_path = u"{}{}/".format(path, name)
|
||||
|
||||
err_msg = ''
|
||||
code = 1
|
||||
newName = name
|
||||
if not path_exists(newPath):
|
||||
new_name = name
|
||||
if not path_exists(new_path):
|
||||
try:
|
||||
os.mkdir(newPath)
|
||||
os.mkdir(new_path)
|
||||
except Exception as e:
|
||||
code = 0
|
||||
err_msg = gettext(u"Error: {0}").format(e.strerror)
|
||||
else:
|
||||
newPath, newName = self.get_new_name(dir, path, name)
|
||||
new_path, new_name = self.get_new_name(dir, path, name)
|
||||
try:
|
||||
os.mkdir(newPath)
|
||||
os.mkdir(new_path)
|
||||
except Exception as e:
|
||||
code = 0
|
||||
err_msg = gettext(u"Error: {0}").format(e.strerror)
|
||||
|
||||
result = {
|
||||
'Parent': path,
|
||||
'Name': newName,
|
||||
'Name': new_name,
|
||||
'Error': err_msg,
|
||||
'Code': code
|
||||
}
|
||||
@ -1233,7 +1233,7 @@ def file_manager(trans_id):
|
||||
It gets unique transaction id from post request and
|
||||
rotate it into Filemanager class.
|
||||
"""
|
||||
myFilemanager = Filemanager(trans_id)
|
||||
my_fm = Filemanager(trans_id)
|
||||
mode = ''
|
||||
kwargs = {}
|
||||
if req.method == 'POST':
|
||||
@ -1253,8 +1253,8 @@ def file_manager(trans_id):
|
||||
mode = req.args['mode']
|
||||
|
||||
try:
|
||||
func = getattr(myFilemanager, mode)
|
||||
func = getattr(my_fm, mode)
|
||||
res = func(**kwargs)
|
||||
return make_json_response(data={'result': res, 'status': True})
|
||||
except Exception:
|
||||
return getattr(myFilemanager, mode)(**kwargs)
|
||||
return getattr(my_fm, mode)(**kwargs)
|
||||
|
@ -27,7 +27,7 @@ def get_all_themes():
|
||||
return all_themes
|
||||
|
||||
|
||||
def Themes(app):
|
||||
def themes(app):
|
||||
@app.context_processor
|
||||
def inject_theme_func():
|
||||
def get_theme_css():
|
||||
|
@ -314,9 +314,9 @@ def initialize_query_tool(trans_id, sgid, sid, did=None):
|
||||
if request.data:
|
||||
_ = request.data
|
||||
|
||||
reqArgs = request.args
|
||||
if ('recreate' in reqArgs and
|
||||
reqArgs['recreate'] == '1'):
|
||||
req_args = request.args
|
||||
if ('recreate' in req_args and
|
||||
req_args['recreate'] == '1'):
|
||||
connect = False
|
||||
|
||||
# Create asynchronous connection using random connection id.
|
||||
|
@ -1672,7 +1672,7 @@ def get_arguments_sqlite(sid, did, scid, func_id):
|
||||
"""
|
||||
|
||||
"""Get the count of the existing data available in sqlite database"""
|
||||
DbgFuncArgsCount = DebuggerFunctionArguments.query.filter_by(
|
||||
dbg_func_args_count = DebuggerFunctionArguments.query.filter_by(
|
||||
server_id=sid,
|
||||
database_id=did,
|
||||
schema_id=scid,
|
||||
@ -1681,18 +1681,18 @@ def get_arguments_sqlite(sid, did, scid, func_id):
|
||||
|
||||
args_data = []
|
||||
|
||||
if DbgFuncArgsCount:
|
||||
if dbg_func_args_count:
|
||||
"""Update the Debugger Function Arguments settings"""
|
||||
DbgFuncArgs = DebuggerFunctionArguments.query.filter_by(
|
||||
dbg_func_args = DebuggerFunctionArguments.query.filter_by(
|
||||
server_id=sid,
|
||||
database_id=did,
|
||||
schema_id=scid,
|
||||
function_id=func_id
|
||||
)
|
||||
|
||||
args_list = DbgFuncArgs.all()
|
||||
args_list = dbg_func_args.all()
|
||||
|
||||
for i in range(0, DbgFuncArgsCount):
|
||||
for i in range(0, dbg_func_args_count):
|
||||
info = {
|
||||
"arg_id": args_list[i].arg_id,
|
||||
"is_null": args_list[i].is_null,
|
||||
@ -1705,13 +1705,13 @@ def get_arguments_sqlite(sid, did, scid, func_id):
|
||||
# As we do have entry available for that function so we need to add
|
||||
# that entry
|
||||
return make_json_response(
|
||||
data={'result': args_data, 'args_count': DbgFuncArgsCount}
|
||||
data={'result': args_data, 'args_count': dbg_func_args_count}
|
||||
)
|
||||
else:
|
||||
# As we do not have any entry available for that function so we need
|
||||
# to add that entry
|
||||
return make_json_response(
|
||||
data={'result': 'result', 'args_count': DbgFuncArgsCount}
|
||||
data={'result': 'result', 'args_count': dbg_func_args_count}
|
||||
)
|
||||
|
||||
|
||||
@ -1743,7 +1743,7 @@ def set_arguments_sqlite(sid, did, scid, func_id):
|
||||
|
||||
try:
|
||||
for i in range(0, len(data)):
|
||||
DbgFuncArgsExists = DebuggerFunctionArguments.query.filter_by(
|
||||
dbg_func_args_exists = DebuggerFunctionArguments.query.filter_by(
|
||||
server_id=data[i]['server_id'],
|
||||
database_id=data[i]['database_id'],
|
||||
schema_id=data[i]['schema_id'],
|
||||
@ -1771,8 +1771,8 @@ def set_arguments_sqlite(sid, did, scid, func_id):
|
||||
|
||||
# Check if data is already available in database then update the
|
||||
# existing value otherwise add the new value
|
||||
if DbgFuncArgsExists:
|
||||
DbgFuncArgs = DebuggerFunctionArguments.query.filter_by(
|
||||
if dbg_func_args_exists:
|
||||
dbg_func_args = DebuggerFunctionArguments.query.filter_by(
|
||||
server_id=data[i]['server_id'],
|
||||
database_id=data[i]['database_id'],
|
||||
schema_id=data[i]['schema_id'],
|
||||
@ -1780,10 +1780,10 @@ def set_arguments_sqlite(sid, did, scid, func_id):
|
||||
arg_id=data[i]['arg_id']
|
||||
).first()
|
||||
|
||||
DbgFuncArgs.is_null = data[i]['is_null']
|
||||
DbgFuncArgs.is_expression = data[i]['is_expression']
|
||||
DbgFuncArgs.use_default = data[i]['use_default']
|
||||
DbgFuncArgs.value = array_string
|
||||
dbg_func_args.is_null = data[i]['is_null']
|
||||
dbg_func_args.is_expression = data[i]['is_expression']
|
||||
dbg_func_args.use_default = data[i]['use_default']
|
||||
dbg_func_args.value = array_string
|
||||
else:
|
||||
debugger_func_args = DebuggerFunctionArguments(
|
||||
server_id=data[i]['server_id'],
|
||||
|
@ -376,7 +376,7 @@ def msql(sid, did):
|
||||
acls['table']['acl'])
|
||||
|
||||
# Pass database objects and get SQL for privileges
|
||||
SQL_data = ''
|
||||
sql_data = ''
|
||||
data_func = {'objects': data['objects'],
|
||||
'priv': data['priv']['function']}
|
||||
SQL = render_template(
|
||||
@ -384,7 +384,7 @@ def msql(sid, did):
|
||||
'/sql/grant_function.sql']),
|
||||
data=data_func, conn=conn)
|
||||
if SQL and SQL.strip('\n') != '':
|
||||
SQL_data += SQL
|
||||
sql_data += SQL
|
||||
|
||||
data_seq = {'objects': data['objects'],
|
||||
'priv': data['priv']['sequence']}
|
||||
@ -393,7 +393,7 @@ def msql(sid, did):
|
||||
'/sql/grant_sequence.sql']),
|
||||
data=data_seq, conn=conn)
|
||||
if SQL and SQL.strip('\n') != '':
|
||||
SQL_data += SQL
|
||||
sql_data += SQL
|
||||
|
||||
data_table = {'objects': data['objects'],
|
||||
'priv': data['priv']['table']}
|
||||
@ -401,9 +401,9 @@ def msql(sid, did):
|
||||
"/".join([server_prop['template_path'], '/sql/grant_table.sql']),
|
||||
data=data_table, conn=conn)
|
||||
if SQL and SQL.strip('\n') != '':
|
||||
SQL_data += SQL
|
||||
sql_data += SQL
|
||||
|
||||
res = {'data': SQL_data}
|
||||
res = {'data': sql_data}
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
@ -464,7 +464,7 @@ def save(sid, did):
|
||||
|
||||
# Pass database objects and get SQL for privileges
|
||||
# Pass database objects and get SQL for privileges
|
||||
SQL_data = ''
|
||||
sql_data = ''
|
||||
data_func = {'objects': data['objects'],
|
||||
'priv': data['priv']['function']}
|
||||
SQL = render_template(
|
||||
@ -472,7 +472,7 @@ def save(sid, did):
|
||||
'/sql/grant_function.sql']),
|
||||
data=data_func, conn=conn)
|
||||
if SQL and SQL.strip('\n') != '':
|
||||
SQL_data += SQL
|
||||
sql_data += SQL
|
||||
|
||||
data_seq = {'objects': data['objects'],
|
||||
'priv': data['priv']['sequence']}
|
||||
@ -481,7 +481,7 @@ def save(sid, did):
|
||||
'/sql/grant_sequence.sql']),
|
||||
data=data_seq, conn=conn)
|
||||
if SQL and SQL.strip('\n') != '':
|
||||
SQL_data += SQL
|
||||
sql_data += SQL
|
||||
|
||||
data_table = {'objects': data['objects'],
|
||||
'priv': data['priv']['table']}
|
||||
@ -489,9 +489,9 @@ def save(sid, did):
|
||||
"/".join([server_prop['template_path'], '/sql/grant_table.sql']),
|
||||
data=data_table, conn=conn)
|
||||
if SQL and SQL.strip('\n') != '':
|
||||
SQL_data += SQL
|
||||
sql_data += SQL
|
||||
|
||||
status, res = conn.execute_dict(SQL_data)
|
||||
status, res = conn.execute_dict(sql_data)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
|
@ -86,7 +86,7 @@ class SearchObjectsHelperTest(BaseTestGenerator):
|
||||
for data in blueprints:
|
||||
if node_type == data['node_type']:
|
||||
blueprint = MagicMock(
|
||||
BackendSupported=MagicMock(
|
||||
backend_supported=MagicMock(
|
||||
return_value=data['backend_supported']),
|
||||
collection_label=data['coll_label'],
|
||||
show_node=data['backend_supported'],
|
||||
|
@ -71,8 +71,8 @@ class SearchObjectsHelper:
|
||||
if blueprint is None:
|
||||
continue
|
||||
|
||||
if blueprint.BackendSupported(self.manager, is_catalog=False,
|
||||
did=self.did) or skip_check:
|
||||
if blueprint.backend_supported(self.manager, is_catalog=False,
|
||||
did=self.did) or skip_check:
|
||||
if node_type in ['edbfunc', 'edbproc']:
|
||||
return_types[node_type] =\
|
||||
gettext('Package {0}').format(
|
||||
|
Loading…
Reference in New Issue
Block a user