mirror of
https://github.com/pgadmin-org/pgadmin4.git
synced 2025-02-25 18:55:31 -06:00
Fixed 'convention' related code smell reported by SonarQube.
This commit is contained in:
committed by
Akshay Joshi
parent
fcf720a5d8
commit
45a03dd663
@@ -717,8 +717,8 @@ def create_app(app_name=None):
|
|||||||
from flask_compress import Compress
|
from flask_compress import Compress
|
||||||
Compress(app)
|
Compress(app)
|
||||||
|
|
||||||
from pgadmin.misc.themes import Themes
|
from pgadmin.misc.themes import themes
|
||||||
Themes(app)
|
themes(app)
|
||||||
|
|
||||||
@app.context_processor
|
@app.context_processor
|
||||||
def inject_blueprint():
|
def inject_blueprint():
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ from unittest.mock import MagicMock, Mock
|
|||||||
|
|
||||||
class TestExternalTablesModule(BaseTestGenerator):
|
class TestExternalTablesModule(BaseTestGenerator):
|
||||||
scenarios = [
|
scenarios = [
|
||||||
('#BackendSupported When access the on a Postgresql Database, '
|
('#backend_supported When access the on a Postgresql Database, '
|
||||||
'it returns false',
|
'it returns false',
|
||||||
dict(
|
dict(
|
||||||
test_type='backend-support',
|
test_type='backend-support',
|
||||||
@@ -27,8 +27,8 @@ class TestExternalTablesModule(BaseTestGenerator):
|
|||||||
),
|
),
|
||||||
expected_result=False,
|
expected_result=False,
|
||||||
)),
|
)),
|
||||||
('#BackendSupported When access the on a Postgres Plus Advance Server '
|
('#backend_supported When access the on a Postgres Plus Advance '
|
||||||
'Database, it returns false',
|
'Server Database, it returns false',
|
||||||
dict(
|
dict(
|
||||||
test_type='backend-support',
|
test_type='backend-support',
|
||||||
manager=dict(
|
manager=dict(
|
||||||
@@ -37,7 +37,7 @@ class TestExternalTablesModule(BaseTestGenerator):
|
|||||||
),
|
),
|
||||||
expected_result=False,
|
expected_result=False,
|
||||||
)),
|
)),
|
||||||
('#BackendSupported When access the on a GreenPlum Database, '
|
('#backend_supported When access the on a GreenPlum Database, '
|
||||||
'it returns true',
|
'it returns true',
|
||||||
dict(
|
dict(
|
||||||
test_type='backend-support',
|
test_type='backend-support',
|
||||||
@@ -82,7 +82,7 @@ class TestExternalTablesModule(BaseTestGenerator):
|
|||||||
module = ExternalTablesModule('something')
|
module = ExternalTablesModule('something')
|
||||||
self.assertEquals(
|
self.assertEquals(
|
||||||
self.expected_result,
|
self.expected_result,
|
||||||
module.BackendSupported(manager)
|
module.backend_supported(manager)
|
||||||
)
|
)
|
||||||
|
|
||||||
def __test_get_nodes(self):
|
def __test_get_nodes(self):
|
||||||
|
|||||||
@@ -933,7 +933,7 @@ It may have been removed by another user.
|
|||||||
if isinstance(module, PGChildModule):
|
if isinstance(module, PGChildModule):
|
||||||
if (
|
if (
|
||||||
self.manager is not None and
|
self.manager is not None and
|
||||||
module.BackendSupported(
|
module.backend_supported(
|
||||||
self.manager, **backend_support_keywords
|
self.manager, **backend_support_keywords
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
|
|||||||
@@ -73,14 +73,14 @@ class CompoundTriggerModule(CollectionNodeModule):
|
|||||||
self.min_gpdbver = 1000000000
|
self.min_gpdbver = 1000000000
|
||||||
self.server_type = ['ppas']
|
self.server_type = ['ppas']
|
||||||
|
|
||||||
def BackendSupported(self, manager, **kwargs):
|
def backend_supported(self, manager, **kwargs):
|
||||||
"""
|
"""
|
||||||
Load this module if vid is view, we will not load it under
|
Load this module if vid is view, we will not load it under
|
||||||
material view
|
material view
|
||||||
"""
|
"""
|
||||||
if manager.server_type == 'gpdb':
|
if manager.server_type == 'gpdb':
|
||||||
return False
|
return False
|
||||||
if super(CompoundTriggerModule, self).BackendSupported(
|
if super(CompoundTriggerModule, self).backend_supported(
|
||||||
manager, **kwargs):
|
manager, **kwargs):
|
||||||
conn = manager.connection(did=kwargs['did'])
|
conn = manager.connection(did=kwargs['did'])
|
||||||
|
|
||||||
|
|||||||
@@ -68,12 +68,12 @@ class IndexesModule(CollectionNodeModule):
|
|||||||
self.max_ver = None
|
self.max_ver = None
|
||||||
super(IndexesModule, self).__init__(*args, **kwargs)
|
super(IndexesModule, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def BackendSupported(self, manager, **kwargs):
|
def backend_supported(self, manager, **kwargs):
|
||||||
"""
|
"""
|
||||||
Load this module if vid is view, we will not load it under
|
Load this module if vid is view, we will not load it under
|
||||||
material view
|
material view
|
||||||
"""
|
"""
|
||||||
if super(IndexesModule, self).BackendSupported(manager, **kwargs):
|
if super(IndexesModule, self).backend_supported(manager, **kwargs):
|
||||||
conn = manager.connection(did=kwargs['did'])
|
conn = manager.connection(did=kwargs['did'])
|
||||||
|
|
||||||
# If PG version > 100000 and < 110000 then index is
|
# If PG version > 100000 and < 110000 then index is
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
|||||||
|
|
||||||
def backend_supported(module, manager, **kwargs):
|
def backend_supported(module, manager, **kwargs):
|
||||||
|
|
||||||
if CollectionNodeModule.BackendSupported(module, manager, **kwargs):
|
if CollectionNodeModule.backend_supported(module, manager, **kwargs):
|
||||||
if 'tid' not in kwargs:
|
if 'tid' not in kwargs:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -110,7 +110,7 @@ class PartitionsModule(CollectionNodeModule):
|
|||||||
"""
|
"""
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def BackendSupported(self, manager, **kwargs):
|
def backend_supported(self, manager, **kwargs):
|
||||||
"""
|
"""
|
||||||
Load this module if it is a partition table
|
Load this module if it is a partition table
|
||||||
"""
|
"""
|
||||||
@@ -239,13 +239,13 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings,
|
|||||||
for module in self.blueprint.submodules:
|
for module in self.blueprint.submodules:
|
||||||
if isinstance(module, PGChildModule):
|
if isinstance(module, PGChildModule):
|
||||||
if manager is not None and \
|
if manager is not None and \
|
||||||
module.BackendSupported(manager, **kwargs):
|
module.backend_supported(manager, **kwargs):
|
||||||
nodes.extend(module.get_nodes(**kwargs))
|
nodes.extend(module.get_nodes(**kwargs))
|
||||||
else:
|
else:
|
||||||
nodes.extend(module.get_nodes(**kwargs))
|
nodes.extend(module.get_nodes(**kwargs))
|
||||||
|
|
||||||
if manager is not None and \
|
if manager is not None and \
|
||||||
self.blueprint.BackendSupported(manager, **kwargs):
|
self.blueprint.backend_supported(manager, **kwargs):
|
||||||
nodes.extend(self.blueprint.get_nodes(**kwargs))
|
nodes.extend(self.blueprint.get_nodes(**kwargs))
|
||||||
|
|
||||||
return nodes
|
return nodes
|
||||||
|
|||||||
@@ -111,10 +111,10 @@ class TestBackendSupport(BaseTestGenerator):
|
|||||||
connection_mock.execute_scalar.return_value = \
|
connection_mock.execute_scalar.return_value = \
|
||||||
self.connection_execution_return_value
|
self.connection_execution_return_value
|
||||||
module.manager.connection.return_value = connection_mock
|
module.manager.connection.return_value = connection_mock
|
||||||
CollectionNodeModule_mock.BackendSupported.return_value = \
|
CollectionNodeModule_mock.backend_supported.return_value = \
|
||||||
self.collection_node_active
|
self.collection_node_active
|
||||||
|
|
||||||
result = module.BackendSupported(
|
result = module.backend_supported(
|
||||||
module.manager, **self.input_arguments
|
module.manager, **self.input_arguments
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -50,12 +50,12 @@ class RuleModule(CollectionNodeModule):
|
|||||||
|
|
||||||
super(RuleModule, self).__init__(*args, **kwargs)
|
super(RuleModule, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def BackendSupported(self, manager, **kwargs):
|
def backend_supported(self, manager, **kwargs):
|
||||||
"""
|
"""
|
||||||
Load this module if tid is view, we will not load it under
|
Load this module if tid is view, we will not load it under
|
||||||
material view
|
material view
|
||||||
"""
|
"""
|
||||||
if super(RuleModule, self).BackendSupported(manager, **kwargs):
|
if super(RuleModule, self).backend_supported(manager, **kwargs):
|
||||||
conn = manager.connection(did=kwargs['did'])
|
conn = manager.connection(did=kwargs['did'])
|
||||||
|
|
||||||
if 'vid' not in kwargs:
|
if 'vid' not in kwargs:
|
||||||
|
|||||||
@@ -70,14 +70,14 @@ class TriggerModule(CollectionNodeModule):
|
|||||||
self.min_gpdbver = 1000000000
|
self.min_gpdbver = 1000000000
|
||||||
super(TriggerModule, self).__init__(*args, **kwargs)
|
super(TriggerModule, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def BackendSupported(self, manager, **kwargs):
|
def backend_supported(self, manager, **kwargs):
|
||||||
"""
|
"""
|
||||||
Load this module if vid is view, we will not load it under
|
Load this module if vid is view, we will not load it under
|
||||||
material view
|
material view
|
||||||
"""
|
"""
|
||||||
if manager.server_type == 'gpdb':
|
if manager.server_type == 'gpdb':
|
||||||
return False
|
return False
|
||||||
if super(TriggerModule, self).BackendSupported(manager, **kwargs):
|
if super(TriggerModule, self).backend_supported(manager, **kwargs):
|
||||||
conn = manager.connection(did=kwargs['did'])
|
conn = manager.connection(did=kwargs['did'])
|
||||||
|
|
||||||
if 'vid' not in kwargs:
|
if 'vid' not in kwargs:
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ class SchemaChildModule(CollectionNodeModule):
|
|||||||
CATALOG_DB_SUPPORTED = True
|
CATALOG_DB_SUPPORTED = True
|
||||||
SUPPORTED_SCHEMAS = None
|
SUPPORTED_SCHEMAS = None
|
||||||
|
|
||||||
def BackendSupported(self, manager, **kwargs):
|
def backend_supported(self, manager, **kwargs):
|
||||||
return (
|
return (
|
||||||
(
|
(
|
||||||
(
|
(
|
||||||
@@ -70,7 +70,7 @@ class SchemaChildModule(CollectionNodeModule):
|
|||||||
not kwargs['is_catalog'] and self.CATALOG_DB_SUPPORTED
|
not kwargs['is_catalog'] and self.CATALOG_DB_SUPPORTED
|
||||||
)
|
)
|
||||||
) and
|
) and
|
||||||
CollectionNodeModule.BackendSupported(self, manager, **kwargs)
|
CollectionNodeModule.backend_supported(self, manager, **kwargs)
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ class JobModule(CollectionNodeModule):
|
|||||||
"""
|
"""
|
||||||
return servers.ServerModule.NODE_TYPE
|
return servers.ServerModule.NODE_TYPE
|
||||||
|
|
||||||
def BackendSupported(self, manager, **kwargs):
|
def backend_supported(self, manager, **kwargs):
|
||||||
if hasattr(self, 'show_node') and not self.show_node:
|
if hasattr(self, 'show_node') and not self.show_node:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ class ResourceGroupModule(CollectionNodeModule):
|
|||||||
- Method is used to initialize the ResourceGroupModule and it's
|
- Method is used to initialize the ResourceGroupModule and it's
|
||||||
base module.
|
base module.
|
||||||
|
|
||||||
* BackendSupported(manager, **kwargs)
|
* backend_supported(manager, **kwargs)
|
||||||
- This function is used to check the database server type and version.
|
- This function is used to check the database server type and version.
|
||||||
Resource Group only supported in PPAS 9.4 and above.
|
Resource Group only supported in PPAS 9.4 and above.
|
||||||
|
|
||||||
|
|||||||
@@ -44,5 +44,5 @@ class BackendSupportedTestCase(BaseTestGenerator):
|
|||||||
manager.sversion = self.manager['sversion']
|
manager.sversion = self.manager['sversion']
|
||||||
manager.server_type = self.manager['server_type']
|
manager.server_type = self.manager['server_type']
|
||||||
self.assertEquals(
|
self.assertEquals(
|
||||||
self.expected_result, module.BackendSupported(manager)
|
self.expected_result, module.backend_supported(manager)
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ class PGChildModule(object):
|
|||||||
|
|
||||||
Method:
|
Method:
|
||||||
------
|
------
|
||||||
* BackendSupported(manager)
|
* backend_supported(manager)
|
||||||
- Return True when it supports certain version.
|
- Return True when it supports certain version.
|
||||||
Uses the psycopg2 server connection manager as input for checking the
|
Uses the psycopg2 server connection manager as input for checking the
|
||||||
compatibility of the current module.
|
compatibility of the current module.
|
||||||
@@ -112,7 +112,7 @@ class PGChildModule(object):
|
|||||||
|
|
||||||
super(PGChildModule, self).__init__()
|
super(PGChildModule, self).__init__()
|
||||||
|
|
||||||
def BackendSupported(self, manager, **kwargs):
|
def backend_supported(self, manager, **kwargs):
|
||||||
if hasattr(self, 'show_node'):
|
if hasattr(self, 'show_node'):
|
||||||
if not self.show_node:
|
if not self.show_node:
|
||||||
return False
|
return False
|
||||||
@@ -385,7 +385,7 @@ class PGChildNodeView(NodeView):
|
|||||||
if isinstance(module, PGChildModule):
|
if isinstance(module, PGChildModule):
|
||||||
if (
|
if (
|
||||||
manager is not None and
|
manager is not None and
|
||||||
module.BackendSupported(manager, **kwargs)
|
module.backend_supported(manager, **kwargs)
|
||||||
):
|
):
|
||||||
nodes.extend(module.get_nodes(**kwargs))
|
nodes.extend(module.get_nodes(**kwargs))
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -43,7 +43,6 @@ _IS_WIN = (os.name == 'nt')
|
|||||||
_ZERO = timedelta(0)
|
_ZERO = timedelta(0)
|
||||||
_sys_encoding = None
|
_sys_encoding = None
|
||||||
_fs_encoding = None
|
_fs_encoding = None
|
||||||
_u = None
|
|
||||||
_out_dir = None
|
_out_dir = None
|
||||||
_log_file = None
|
_log_file = None
|
||||||
|
|
||||||
@@ -321,13 +320,13 @@ def execute(argv):
|
|||||||
|
|
||||||
_log('Waiting for the process to finish...')
|
_log('Waiting for the process to finish...')
|
||||||
# Child process return code
|
# Child process return code
|
||||||
exitCode = process.wait()
|
exit_code = process.wait()
|
||||||
|
|
||||||
if exitCode is None:
|
if exit_code is None:
|
||||||
exitCode = process.poll()
|
exit_code = process.poll()
|
||||||
|
|
||||||
_log('Process exited with code: {0}'.format(exitCode))
|
_log('Process exited with code: {0}'.format(exit_code))
|
||||||
args.update({'exit_code': exitCode})
|
args.update({'exit_code': exit_code})
|
||||||
|
|
||||||
# Add end_time
|
# Add end_time
|
||||||
args.update({'end_time': get_current_time()})
|
args.update({'end_time': get_current_time()})
|
||||||
@@ -415,11 +414,7 @@ if __name__ == '__main__':
|
|||||||
# encoding or 'ascii'.
|
# encoding or 'ascii'.
|
||||||
_fs_encoding = 'utf-8'
|
_fs_encoding = 'utf-8'
|
||||||
|
|
||||||
def u(_s, _encoding=_sys_encoding):
|
_out_dir = os.environ['OUTDIR']
|
||||||
return _s
|
|
||||||
_u = u
|
|
||||||
|
|
||||||
_out_dir = u(os.environ['OUTDIR'])
|
|
||||||
_log_file = os.path.join(_out_dir, ('log_%s' % os.getpid()))
|
_log_file = os.path.join(_out_dir, ('log_%s' % os.getpid()))
|
||||||
|
|
||||||
_log('Starting the process executor...')
|
_log('Starting the process executor...')
|
||||||
|
|||||||
@@ -70,12 +70,12 @@ def sizeof_fmt(num, suffix='B'):
|
|||||||
|
|
||||||
|
|
||||||
# return size of file
|
# return size of file
|
||||||
def getSize(path):
|
def getsize(path):
|
||||||
st = os.stat(path)
|
st = os.stat(path)
|
||||||
return st.st_size
|
return st.st_size
|
||||||
|
|
||||||
|
|
||||||
def getDriveSize(path):
|
def getdrivesize(path):
|
||||||
if _platform == "win32":
|
if _platform == "win32":
|
||||||
free_bytes = ctypes.c_ulonglong(0)
|
free_bytes = ctypes.c_ulonglong(0)
|
||||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
||||||
@@ -581,7 +581,7 @@ class Filemanager(object):
|
|||||||
protected = 0
|
protected = 0
|
||||||
path = file_name = u"{0}:".format(drive)
|
path = file_name = u"{0}:".format(drive)
|
||||||
try:
|
try:
|
||||||
drive_size = getDriveSize(path)
|
drive_size = getdrivesize(path)
|
||||||
drive_size_in_units = sizeof_fmt(drive_size)
|
drive_size_in_units = sizeof_fmt(drive_size)
|
||||||
except Exception:
|
except Exception:
|
||||||
drive_size = 0
|
drive_size = 0
|
||||||
@@ -662,7 +662,7 @@ class Filemanager(object):
|
|||||||
"Properties": {
|
"Properties": {
|
||||||
"Date Created": created,
|
"Date Created": created,
|
||||||
"Date Modified": modified,
|
"Date Modified": modified,
|
||||||
"Size": sizeof_fmt(getSize(system_path))
|
"Size": sizeof_fmt(getsize(system_path))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -811,7 +811,7 @@ class Filemanager(object):
|
|||||||
|
|
||||||
thefile['Properties']['Date Created'] = created
|
thefile['Properties']['Date Created'] = created
|
||||||
thefile['Properties']['Date Modified'] = modified
|
thefile['Properties']['Date Modified'] = modified
|
||||||
thefile['Properties']['Size'] = sizeof_fmt(getSize(orig_path))
|
thefile['Properties']['Size'] = sizeof_fmt(getsize(orig_path))
|
||||||
|
|
||||||
return thefile
|
return thefile
|
||||||
|
|
||||||
@@ -962,9 +962,9 @@ class Filemanager(object):
|
|||||||
'utf-8').decode('utf-8')
|
'utf-8').decode('utf-8')
|
||||||
file_name = file_obj.filename.encode('utf-8').decode('utf-8')
|
file_name = file_obj.filename.encode('utf-8').decode('utf-8')
|
||||||
orig_path = u"{0}{1}".format(dir, path)
|
orig_path = u"{0}{1}".format(dir, path)
|
||||||
newName = u"{0}{1}".format(orig_path, file_name)
|
new_name = u"{0}{1}".format(orig_path, file_name)
|
||||||
|
|
||||||
with open(newName, 'wb') as f:
|
with open(new_name, 'wb') as f:
|
||||||
while True:
|
while True:
|
||||||
# 4MB chunk (4 * 1024 * 1024 Bytes)
|
# 4MB chunk (4 * 1024 * 1024 Bytes)
|
||||||
data = file_obj.read(4194304)
|
data = file_obj.read(4194304)
|
||||||
@@ -987,7 +987,7 @@ class Filemanager(object):
|
|||||||
|
|
||||||
result = {
|
result = {
|
||||||
'Path': path,
|
'Path': path,
|
||||||
'Name': newName,
|
'Name': new_name,
|
||||||
'Error': err_msg,
|
'Error': err_msg,
|
||||||
'Code': code
|
'Code': code
|
||||||
}
|
}
|
||||||
@@ -1011,8 +1011,8 @@ class Filemanager(object):
|
|||||||
Filemanager.check_access_permission(
|
Filemanager.check_access_permission(
|
||||||
dir, u"{}{}".format(path, name))
|
dir, u"{}{}".format(path, name))
|
||||||
|
|
||||||
newName = u"{0}{1}".format(orig_path, name)
|
new_name = u"{0}{1}".format(orig_path, name)
|
||||||
if not os.path.exists(newName):
|
if not os.path.exists(new_name):
|
||||||
code = 0
|
code = 0
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
code = 0
|
code = 0
|
||||||
@@ -1031,23 +1031,23 @@ class Filemanager(object):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_new_name(dir, path, newName, count=1):
|
def get_new_name(dir, path, new_name, count=1):
|
||||||
"""
|
"""
|
||||||
Utility to provide new name for folder if file
|
Utility to provide new name for folder if file
|
||||||
with same name already exists
|
with same name already exists
|
||||||
"""
|
"""
|
||||||
last_char = newName[-1]
|
last_char = new_name[-1]
|
||||||
tnewPath = u"{}/{}{}_{}".format(dir, path, newName, count)
|
t_new_path = u"{}/{}{}_{}".format(dir, path, new_name, count)
|
||||||
if last_char == 'r' and not path_exists(tnewPath):
|
if last_char == 'r' and not path_exists(t_new_path):
|
||||||
return tnewPath, newName
|
return t_new_path, new_name
|
||||||
else:
|
else:
|
||||||
last_char = int(tnewPath[-1]) + 1
|
last_char = int(t_new_path[-1]) + 1
|
||||||
newPath = u"{}/{}{}_{}".format(dir, path, newName, last_char)
|
new_path = u"{}/{}{}_{}".format(dir, path, new_name, last_char)
|
||||||
if path_exists(newPath):
|
if path_exists(new_path):
|
||||||
count += 1
|
count += 1
|
||||||
return Filemanager.get_new_name(dir, path, newName, count)
|
return Filemanager.get_new_name(dir, path, new_name, count)
|
||||||
else:
|
else:
|
||||||
return newPath, newName
|
return new_path, new_name
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check_file_for_bom_and_binary(filename, enc="utf-8"):
|
def check_file_for_bom_and_binary(filename, enc="utf-8"):
|
||||||
@@ -1145,30 +1145,30 @@ class Filemanager(object):
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
if dir != "":
|
if dir != "":
|
||||||
newPath = u"{}/{}{}/".format(dir, path, name)
|
new_path = u"{}/{}{}/".format(dir, path, name)
|
||||||
else:
|
else:
|
||||||
newPath = u"{}{}/".format(path, name)
|
new_path = u"{}{}/".format(path, name)
|
||||||
|
|
||||||
err_msg = ''
|
err_msg = ''
|
||||||
code = 1
|
code = 1
|
||||||
newName = name
|
new_name = name
|
||||||
if not path_exists(newPath):
|
if not path_exists(new_path):
|
||||||
try:
|
try:
|
||||||
os.mkdir(newPath)
|
os.mkdir(new_path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
code = 0
|
code = 0
|
||||||
err_msg = gettext(u"Error: {0}").format(e.strerror)
|
err_msg = gettext(u"Error: {0}").format(e.strerror)
|
||||||
else:
|
else:
|
||||||
newPath, newName = self.get_new_name(dir, path, name)
|
new_path, new_name = self.get_new_name(dir, path, name)
|
||||||
try:
|
try:
|
||||||
os.mkdir(newPath)
|
os.mkdir(new_path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
code = 0
|
code = 0
|
||||||
err_msg = gettext(u"Error: {0}").format(e.strerror)
|
err_msg = gettext(u"Error: {0}").format(e.strerror)
|
||||||
|
|
||||||
result = {
|
result = {
|
||||||
'Parent': path,
|
'Parent': path,
|
||||||
'Name': newName,
|
'Name': new_name,
|
||||||
'Error': err_msg,
|
'Error': err_msg,
|
||||||
'Code': code
|
'Code': code
|
||||||
}
|
}
|
||||||
@@ -1233,7 +1233,7 @@ def file_manager(trans_id):
|
|||||||
It gets unique transaction id from post request and
|
It gets unique transaction id from post request and
|
||||||
rotate it into Filemanager class.
|
rotate it into Filemanager class.
|
||||||
"""
|
"""
|
||||||
myFilemanager = Filemanager(trans_id)
|
my_fm = Filemanager(trans_id)
|
||||||
mode = ''
|
mode = ''
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
if req.method == 'POST':
|
if req.method == 'POST':
|
||||||
@@ -1253,8 +1253,8 @@ def file_manager(trans_id):
|
|||||||
mode = req.args['mode']
|
mode = req.args['mode']
|
||||||
|
|
||||||
try:
|
try:
|
||||||
func = getattr(myFilemanager, mode)
|
func = getattr(my_fm, mode)
|
||||||
res = func(**kwargs)
|
res = func(**kwargs)
|
||||||
return make_json_response(data={'result': res, 'status': True})
|
return make_json_response(data={'result': res, 'status': True})
|
||||||
except Exception:
|
except Exception:
|
||||||
return getattr(myFilemanager, mode)(**kwargs)
|
return getattr(my_fm, mode)(**kwargs)
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ def get_all_themes():
|
|||||||
return all_themes
|
return all_themes
|
||||||
|
|
||||||
|
|
||||||
def Themes(app):
|
def themes(app):
|
||||||
@app.context_processor
|
@app.context_processor
|
||||||
def inject_theme_func():
|
def inject_theme_func():
|
||||||
def get_theme_css():
|
def get_theme_css():
|
||||||
|
|||||||
@@ -314,9 +314,9 @@ def initialize_query_tool(trans_id, sgid, sid, did=None):
|
|||||||
if request.data:
|
if request.data:
|
||||||
_ = request.data
|
_ = request.data
|
||||||
|
|
||||||
reqArgs = request.args
|
req_args = request.args
|
||||||
if ('recreate' in reqArgs and
|
if ('recreate' in req_args and
|
||||||
reqArgs['recreate'] == '1'):
|
req_args['recreate'] == '1'):
|
||||||
connect = False
|
connect = False
|
||||||
|
|
||||||
# Create asynchronous connection using random connection id.
|
# Create asynchronous connection using random connection id.
|
||||||
|
|||||||
@@ -1672,7 +1672,7 @@ def get_arguments_sqlite(sid, did, scid, func_id):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
"""Get the count of the existing data available in sqlite database"""
|
"""Get the count of the existing data available in sqlite database"""
|
||||||
DbgFuncArgsCount = DebuggerFunctionArguments.query.filter_by(
|
dbg_func_args_count = DebuggerFunctionArguments.query.filter_by(
|
||||||
server_id=sid,
|
server_id=sid,
|
||||||
database_id=did,
|
database_id=did,
|
||||||
schema_id=scid,
|
schema_id=scid,
|
||||||
@@ -1681,18 +1681,18 @@ def get_arguments_sqlite(sid, did, scid, func_id):
|
|||||||
|
|
||||||
args_data = []
|
args_data = []
|
||||||
|
|
||||||
if DbgFuncArgsCount:
|
if dbg_func_args_count:
|
||||||
"""Update the Debugger Function Arguments settings"""
|
"""Update the Debugger Function Arguments settings"""
|
||||||
DbgFuncArgs = DebuggerFunctionArguments.query.filter_by(
|
dbg_func_args = DebuggerFunctionArguments.query.filter_by(
|
||||||
server_id=sid,
|
server_id=sid,
|
||||||
database_id=did,
|
database_id=did,
|
||||||
schema_id=scid,
|
schema_id=scid,
|
||||||
function_id=func_id
|
function_id=func_id
|
||||||
)
|
)
|
||||||
|
|
||||||
args_list = DbgFuncArgs.all()
|
args_list = dbg_func_args.all()
|
||||||
|
|
||||||
for i in range(0, DbgFuncArgsCount):
|
for i in range(0, dbg_func_args_count):
|
||||||
info = {
|
info = {
|
||||||
"arg_id": args_list[i].arg_id,
|
"arg_id": args_list[i].arg_id,
|
||||||
"is_null": args_list[i].is_null,
|
"is_null": args_list[i].is_null,
|
||||||
@@ -1705,13 +1705,13 @@ def get_arguments_sqlite(sid, did, scid, func_id):
|
|||||||
# As we do have entry available for that function so we need to add
|
# As we do have entry available for that function so we need to add
|
||||||
# that entry
|
# that entry
|
||||||
return make_json_response(
|
return make_json_response(
|
||||||
data={'result': args_data, 'args_count': DbgFuncArgsCount}
|
data={'result': args_data, 'args_count': dbg_func_args_count}
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# As we do not have any entry available for that function so we need
|
# As we do not have any entry available for that function so we need
|
||||||
# to add that entry
|
# to add that entry
|
||||||
return make_json_response(
|
return make_json_response(
|
||||||
data={'result': 'result', 'args_count': DbgFuncArgsCount}
|
data={'result': 'result', 'args_count': dbg_func_args_count}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -1743,7 +1743,7 @@ def set_arguments_sqlite(sid, did, scid, func_id):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
for i in range(0, len(data)):
|
for i in range(0, len(data)):
|
||||||
DbgFuncArgsExists = DebuggerFunctionArguments.query.filter_by(
|
dbg_func_args_exists = DebuggerFunctionArguments.query.filter_by(
|
||||||
server_id=data[i]['server_id'],
|
server_id=data[i]['server_id'],
|
||||||
database_id=data[i]['database_id'],
|
database_id=data[i]['database_id'],
|
||||||
schema_id=data[i]['schema_id'],
|
schema_id=data[i]['schema_id'],
|
||||||
@@ -1771,8 +1771,8 @@ def set_arguments_sqlite(sid, did, scid, func_id):
|
|||||||
|
|
||||||
# Check if data is already available in database then update the
|
# Check if data is already available in database then update the
|
||||||
# existing value otherwise add the new value
|
# existing value otherwise add the new value
|
||||||
if DbgFuncArgsExists:
|
if dbg_func_args_exists:
|
||||||
DbgFuncArgs = DebuggerFunctionArguments.query.filter_by(
|
dbg_func_args = DebuggerFunctionArguments.query.filter_by(
|
||||||
server_id=data[i]['server_id'],
|
server_id=data[i]['server_id'],
|
||||||
database_id=data[i]['database_id'],
|
database_id=data[i]['database_id'],
|
||||||
schema_id=data[i]['schema_id'],
|
schema_id=data[i]['schema_id'],
|
||||||
@@ -1780,10 +1780,10 @@ def set_arguments_sqlite(sid, did, scid, func_id):
|
|||||||
arg_id=data[i]['arg_id']
|
arg_id=data[i]['arg_id']
|
||||||
).first()
|
).first()
|
||||||
|
|
||||||
DbgFuncArgs.is_null = data[i]['is_null']
|
dbg_func_args.is_null = data[i]['is_null']
|
||||||
DbgFuncArgs.is_expression = data[i]['is_expression']
|
dbg_func_args.is_expression = data[i]['is_expression']
|
||||||
DbgFuncArgs.use_default = data[i]['use_default']
|
dbg_func_args.use_default = data[i]['use_default']
|
||||||
DbgFuncArgs.value = array_string
|
dbg_func_args.value = array_string
|
||||||
else:
|
else:
|
||||||
debugger_func_args = DebuggerFunctionArguments(
|
debugger_func_args = DebuggerFunctionArguments(
|
||||||
server_id=data[i]['server_id'],
|
server_id=data[i]['server_id'],
|
||||||
|
|||||||
@@ -376,7 +376,7 @@ def msql(sid, did):
|
|||||||
acls['table']['acl'])
|
acls['table']['acl'])
|
||||||
|
|
||||||
# Pass database objects and get SQL for privileges
|
# Pass database objects and get SQL for privileges
|
||||||
SQL_data = ''
|
sql_data = ''
|
||||||
data_func = {'objects': data['objects'],
|
data_func = {'objects': data['objects'],
|
||||||
'priv': data['priv']['function']}
|
'priv': data['priv']['function']}
|
||||||
SQL = render_template(
|
SQL = render_template(
|
||||||
@@ -384,7 +384,7 @@ def msql(sid, did):
|
|||||||
'/sql/grant_function.sql']),
|
'/sql/grant_function.sql']),
|
||||||
data=data_func, conn=conn)
|
data=data_func, conn=conn)
|
||||||
if SQL and SQL.strip('\n') != '':
|
if SQL and SQL.strip('\n') != '':
|
||||||
SQL_data += SQL
|
sql_data += SQL
|
||||||
|
|
||||||
data_seq = {'objects': data['objects'],
|
data_seq = {'objects': data['objects'],
|
||||||
'priv': data['priv']['sequence']}
|
'priv': data['priv']['sequence']}
|
||||||
@@ -393,7 +393,7 @@ def msql(sid, did):
|
|||||||
'/sql/grant_sequence.sql']),
|
'/sql/grant_sequence.sql']),
|
||||||
data=data_seq, conn=conn)
|
data=data_seq, conn=conn)
|
||||||
if SQL and SQL.strip('\n') != '':
|
if SQL and SQL.strip('\n') != '':
|
||||||
SQL_data += SQL
|
sql_data += SQL
|
||||||
|
|
||||||
data_table = {'objects': data['objects'],
|
data_table = {'objects': data['objects'],
|
||||||
'priv': data['priv']['table']}
|
'priv': data['priv']['table']}
|
||||||
@@ -401,9 +401,9 @@ def msql(sid, did):
|
|||||||
"/".join([server_prop['template_path'], '/sql/grant_table.sql']),
|
"/".join([server_prop['template_path'], '/sql/grant_table.sql']),
|
||||||
data=data_table, conn=conn)
|
data=data_table, conn=conn)
|
||||||
if SQL and SQL.strip('\n') != '':
|
if SQL and SQL.strip('\n') != '':
|
||||||
SQL_data += SQL
|
sql_data += SQL
|
||||||
|
|
||||||
res = {'data': SQL_data}
|
res = {'data': sql_data}
|
||||||
|
|
||||||
return ajax_response(
|
return ajax_response(
|
||||||
response=res,
|
response=res,
|
||||||
@@ -464,7 +464,7 @@ def save(sid, did):
|
|||||||
|
|
||||||
# Pass database objects and get SQL for privileges
|
# Pass database objects and get SQL for privileges
|
||||||
# Pass database objects and get SQL for privileges
|
# Pass database objects and get SQL for privileges
|
||||||
SQL_data = ''
|
sql_data = ''
|
||||||
data_func = {'objects': data['objects'],
|
data_func = {'objects': data['objects'],
|
||||||
'priv': data['priv']['function']}
|
'priv': data['priv']['function']}
|
||||||
SQL = render_template(
|
SQL = render_template(
|
||||||
@@ -472,7 +472,7 @@ def save(sid, did):
|
|||||||
'/sql/grant_function.sql']),
|
'/sql/grant_function.sql']),
|
||||||
data=data_func, conn=conn)
|
data=data_func, conn=conn)
|
||||||
if SQL and SQL.strip('\n') != '':
|
if SQL and SQL.strip('\n') != '':
|
||||||
SQL_data += SQL
|
sql_data += SQL
|
||||||
|
|
||||||
data_seq = {'objects': data['objects'],
|
data_seq = {'objects': data['objects'],
|
||||||
'priv': data['priv']['sequence']}
|
'priv': data['priv']['sequence']}
|
||||||
@@ -481,7 +481,7 @@ def save(sid, did):
|
|||||||
'/sql/grant_sequence.sql']),
|
'/sql/grant_sequence.sql']),
|
||||||
data=data_seq, conn=conn)
|
data=data_seq, conn=conn)
|
||||||
if SQL and SQL.strip('\n') != '':
|
if SQL and SQL.strip('\n') != '':
|
||||||
SQL_data += SQL
|
sql_data += SQL
|
||||||
|
|
||||||
data_table = {'objects': data['objects'],
|
data_table = {'objects': data['objects'],
|
||||||
'priv': data['priv']['table']}
|
'priv': data['priv']['table']}
|
||||||
@@ -489,9 +489,9 @@ def save(sid, did):
|
|||||||
"/".join([server_prop['template_path'], '/sql/grant_table.sql']),
|
"/".join([server_prop['template_path'], '/sql/grant_table.sql']),
|
||||||
data=data_table, conn=conn)
|
data=data_table, conn=conn)
|
||||||
if SQL and SQL.strip('\n') != '':
|
if SQL and SQL.strip('\n') != '':
|
||||||
SQL_data += SQL
|
sql_data += SQL
|
||||||
|
|
||||||
status, res = conn.execute_dict(SQL_data)
|
status, res = conn.execute_dict(sql_data)
|
||||||
if not status:
|
if not status:
|
||||||
return internal_server_error(errormsg=res)
|
return internal_server_error(errormsg=res)
|
||||||
|
|
||||||
|
|||||||
@@ -86,7 +86,7 @@ class SearchObjectsHelperTest(BaseTestGenerator):
|
|||||||
for data in blueprints:
|
for data in blueprints:
|
||||||
if node_type == data['node_type']:
|
if node_type == data['node_type']:
|
||||||
blueprint = MagicMock(
|
blueprint = MagicMock(
|
||||||
BackendSupported=MagicMock(
|
backend_supported=MagicMock(
|
||||||
return_value=data['backend_supported']),
|
return_value=data['backend_supported']),
|
||||||
collection_label=data['coll_label'],
|
collection_label=data['coll_label'],
|
||||||
show_node=data['backend_supported'],
|
show_node=data['backend_supported'],
|
||||||
|
|||||||
@@ -71,8 +71,8 @@ class SearchObjectsHelper:
|
|||||||
if blueprint is None:
|
if blueprint is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if blueprint.BackendSupported(self.manager, is_catalog=False,
|
if blueprint.backend_supported(self.manager, is_catalog=False,
|
||||||
did=self.did) or skip_check:
|
did=self.did) or skip_check:
|
||||||
if node_type in ['edbfunc', 'edbproc']:
|
if node_type in ['edbfunc', 'edbproc']:
|
||||||
return_types[node_type] =\
|
return_types[node_type] =\
|
||||||
gettext('Package {0}').format(
|
gettext('Package {0}').format(
|
||||||
|
|||||||
Reference in New Issue
Block a user