Fixed cognitive complexity issues reported by SonarQube.

This commit is contained in:
Nikhil Mohite
2020-08-25 18:13:01 +05:30
committed by Akshay Joshi
parent e582ffca38
commit d2577e32e6
3 changed files with 405 additions and 200 deletions

View File

@@ -839,9 +839,34 @@ class DatabaseView(PGChildNodeView):
)
)
@check_precondition(action="drop")
def delete(self, gid, sid, did=None):
"""Delete the database."""
def _release_conn_before_delete(self, sid, did):
"""
Check connection and release it before deleting database.
:param sid: Server Id.
:param did: Database Id.
:return: Return error if any.
"""
if self.conn.connected():
# Release the connection if it is connected
from pgadmin.utils.driver import get_driver
manager = \
get_driver(PG_DEFAULT_DRIVER).connection_manager(sid)
manager.connection(did=did, auto_reconnect=True)
status = manager.release(did=did)
if not status:
return True, unauthorized(
_("Database could not be deleted."))
return False, ''
@staticmethod
def _get_req_data(did):
"""
Get data from request.
:param did: Database Id.
:return: Return Data get from request.
"""
if did is None:
data = request.form if request.form else json.loads(
@@ -850,13 +875,21 @@ class DatabaseView(PGChildNodeView):
else:
data = {'ids': [did]}
return data
@check_precondition(action="drop")
def delete(self, gid, sid, did=None):
"""Delete the database."""
data = DatabaseView._get_req_data(did)
for did in data['ids']:
default_conn = self.manager.connection()
SQL = render_template(
sql = render_template(
"/".join([self.template_path, self._DELETE_SQL]),
did=did, conn=self.conn
)
status, res = default_conn.execute_scalar(SQL)
status, res = default_conn.execute_scalar(sql)
if not status:
return internal_server_error(errormsg=res)
@@ -872,24 +905,16 @@ class DatabaseView(PGChildNodeView):
)
)
else:
if self.conn.connected():
# Release the connection if it is connected
from pgadmin.utils.driver import get_driver
manager = \
get_driver(PG_DEFAULT_DRIVER).connection_manager(sid)
manager.connection(did=did, auto_reconnect=True)
status = manager.release(did=did)
is_error, errmsg = self._release_conn_before_delete(sid, did)
if is_error:
return errmsg
if not status:
return unauthorized(
_("Database could not be deleted."))
SQL = render_template(
sql = render_template(
"/".join([self.template_path, self._DELETE_SQL]),
datname=res, conn=self.conn
)
status, msg = default_conn.execute_scalar(SQL)
status, msg = default_conn.execute_scalar(sql)
if not status:
# reconnect if database drop failed.
conn = self.manager.connection(did=did,

View File

@@ -178,17 +178,10 @@ def filename_with_file_manager_path(_file):
return fs_short_path(_file)
@blueprint.route('/job/<int:sid>', methods=['POST'], endpoint='create_job')
@login_required
def create_restore_job(sid):
def _get_create_req_data():
"""
Args:
sid: Server ID
Creates a new job for restore task
Returns:
None
Get data from request for create restore job.
:return: return data if no error occurred.
"""
if request.form:
data = json.loads(request.form['data'], encoding='utf-8')
@@ -198,15 +191,24 @@ def create_restore_job(sid):
try:
_file = filename_with_file_manager_path(data['file'])
except Exception as e:
return bad_request(errormsg=str(e))
return True, bad_request(errormsg=str(e)), data
if _file is None:
return make_json_response(
return True, make_json_response(
status=410,
success=0,
errormsg=_("File could not be found.")
)
), data, _file
return False, '', data, _file
def _connect_server(sid):
"""
Get server object and try to connect with it.
:param sid: Server ID.
:return: if not error occurred then return connection data.
"""
# Fetch the server details like hostname, port, roles etc
server = Server.query.filter_by(
id=sid
@@ -227,67 +229,107 @@ def create_restore_job(sid):
connected = conn.connected()
if not connected:
return make_json_response(
return True, make_json_response(
success=0,
errormsg=_("Please connect to the server first.")
)
), driver, manager, conn, connected
utility = manager.utility('restore')
ret_val = does_utility_exist(utility)
if ret_val:
return make_json_response(
success=0,
errormsg=ret_val
)
return False, '', driver, manager, conn, connected, server
def set_param(key, param, data, args):
"""
check and add parameter to args list.
:param key: Key.
:param param: Parameter to be add in the args list.
:param data: Data.
:param args: args list.
:return: Return true if key in data else return false.
"""
if key in data and data[key]:
args.append(param)
return True
return False
def set_value(key, param, data, args, default_value=None):
"""
Add values to args list if key not present in data set default value.
:param key: Key.
:param param: Parameter to be add in the args list.
:param data: Data.
:param args: args list.
:param default_value: default value flag.
:return:
"""
if key in data and data[key] is not None and data[key] != '':
args.append(param)
args.append(data[key])
elif default_value is not None:
args.append(param)
args.append(default_value)
def _set_value_with_schema(data, key, args, param, driver, conn):
"""
Set value if with_schema flag is true.
:param data: Data.
:param key: Key.
:param args: args list.
:param param: parameter to be add in the args list.
:param driver: Driver.
:param conn: connection.
:return:
"""
if isinstance(data[key], list):
s, t = data[key]
args.extend([
param,
driver.qtIdent(
conn, s
) + '.' + driver.qtIdent(conn, t)
])
else:
for s, o in data[key]:
args.extend([
param,
driver.qtIdent(
conn, s
) + '.' + driver.qtIdent(conn, o)
])
def set_multiple(key, param, data, args, driver, conn, with_schema=True):
if key in data and \
len(data[key]) > 0:
if with_schema:
# TODO:// This is temporary
# Once object tree is implemented then we will use
# list of tuples 'else' part
_set_value_with_schema(data, key, args, param, driver, conn)
else:
for o in data[key]:
args.extend([param, o])
return True
return False
def _set_args_param_values(data, manager, server, driver, conn, _file):
"""
add args to the list.
:param data: Data.
:param manager: Manager.
:param server: Server.
:param driver: Driver.
:param conn: Connection.
:param _file: File.
:return: args list.
"""
args = []
if 'list' in data:
args.append('--list')
else:
def set_param(key, param):
if key in data and data[key]:
args.append(param)
return True
return False
def set_value(key, param, default_value=None):
if key in data and data[key] is not None and data[key] != '':
args.append(param)
args.append(data[key])
elif default_value is not None:
args.append(param)
args.append(default_value)
def set_multiple(key, param, with_schema=True):
if key in data and \
len(data[key]) > 0:
if with_schema:
# TODO:// This is temporary
# Once object tree is implemented then we will use
# list of tuples 'else' part
if isinstance(data[key], list):
s, t = data[key]
args.extend([
param,
driver.qtIdent(
conn, s
) + '.' + driver.qtIdent(conn, t)
])
else:
for s, o in data[key]:
args.extend([
param,
driver.qtIdent(
conn, s
) + '.' + driver.qtIdent(conn, o)
])
else:
for o in data[key]:
args.extend([param, o])
return True
return False
args.extend([
'--host',
manager.local_bind_host if manager.use_ssh_tunnel else server.host,
@@ -297,46 +339,84 @@ def create_restore_job(sid):
'--username', server.username, '--no-password'
])
set_value('role', '--role')
set_value('database', '--dbname')
set_value('role', '--role', data, args)
set_value('database', '--dbname', data, args)
if data['format'] == 'directory':
args.extend(['--format=d'])
set_param('pre_data', '--section=pre-data')
set_param('data', '--section=data')
set_param('post_data', '--section=post-data')
set_param('pre_data', '--section=pre-data', data, args)
set_param('data', '--section=data', data, args)
set_param('post_data', '--section=post-data', data, args)
if not set_param('only_data', '--data-only'):
set_param('dns_owner', '--no-owner')
set_param('dns_privilege', '--no-privileges')
set_param('dns_tablespace', '--no-tablespaces')
if not set_param('only_data', '--data-only', data, args):
set_param('dns_owner', '--no-owner', data, args)
set_param('dns_privilege', '--no-privileges', data, args)
set_param('dns_tablespace', '--no-tablespaces', data, args)
if not set_param('only_schema', '--schema-only'):
set_param('disable_trigger', '--disable-triggers')
if not set_param('only_schema', '--schema-only', data, args):
set_param('disable_trigger', '--disable-triggers', data, args)
set_param('include_create_database', '--create')
set_param('clean', '--clean')
set_param('single_transaction', '--single-transaction')
set_param('no_data_fail_table', '--no-data-for-failed-tables')
set_param('use_set_session_auth', '--use-set-session-authorization')
set_param('exit_on_error', '--exit-on-error')
set_param('include_create_database', '--create', data, args)
set_param('clean', '--clean', data, args)
set_param('single_transaction', '--single-transaction', data, args)
set_param('no_data_fail_table', '--no-data-for-failed-tables', data,
args)
set_param('use_set_session_auth', '--use-set-session-authorization',
data, args)
set_param('exit_on_error', '--exit-on-error', data, args)
if manager.version >= 110000:
set_param('no_comments', '--no-comments')
set_param('no_comments', '--no-comments', data, args)
set_value('no_of_jobs', '--jobs')
set_param('verbose', '--verbose')
set_value('no_of_jobs', '--jobs', data, args)
set_param('verbose', '--verbose', data, args)
set_multiple('schemas', '--schema', False)
set_multiple('tables', '--table', False)
set_multiple('functions', '--function', False)
set_multiple('triggers', '--trigger', False)
set_multiple('trigger_funcs', '--function', False)
set_multiple('indexes', '--index', False)
set_multiple('schemas', '--schema', data, args, driver, conn, False)
set_multiple('tables', '--table', data, args, driver, conn, False)
set_multiple('functions', '--function', data, args, driver, conn,
False)
set_multiple('triggers', '--trigger', data, args, driver, conn, False)
set_multiple('trigger_funcs', '--function', data, args, driver, conn,
False)
set_multiple('indexes', '--index', data, args, driver, conn, False)
args.append(fs_short_path(_file))
return args
@blueprint.route('/job/<int:sid>', methods=['POST'], endpoint='create_job')
@login_required
def create_restore_job(sid):
"""
Args:
sid: Server ID
Creates a new job for restore task
Returns:
None
"""
is_error, errmsg, data, _file = _get_create_req_data()
if is_error:
return errmsg
is_error, errmsg, driver, manager, conn, \
connected, server = _connect_server(sid)
if is_error:
return errmsg
utility = manager.utility('restore')
ret_val = does_utility_exist(utility)
if ret_val:
return make_json_response(
success=0,
errormsg=ret_val
)
args = _set_args_param_values(data, manager, server, driver, conn, _file)
try:
p = BatchProcess(
desc=RestoreMessage(

View File

@@ -114,19 +114,10 @@ class SQLAutoComplete(object):
schema_names = []
if self.conn.connected():
# Fetch the search path
query = render_template(
"/".join([self.sql_path, 'schema.sql']), search_path=True)
status, res = self.conn.execute_dict(query)
if status:
for record in res['rows']:
self.search_path.append(record['schema'])
self._set_search_path()
# Fetch the schema names
query = render_template("/".join([self.sql_path, 'schema.sql']))
status, res = self.conn.execute_dict(query)
if status:
for record in res['rows']:
schema_names.append(record['schema'])
self._fetch_schema_name(schema_names)
pref = Preferences.module('sqleditor')
keywords_in_uppercase = \
@@ -173,6 +164,21 @@ class SQLAutoComplete(object):
self.qualify_columns = 'if_more_than_one_table'
self.asterisk_column_order = 'table_order'
def _set_search_path(self):
query = render_template(
"/".join([self.sql_path, 'schema.sql']), search_path=True)
status, res = self.conn.execute_dict(query)
if status:
for record in res['rows']:
self.search_path.append(record['schema'])
def _fetch_schema_name(self, schema_names):
query = render_template("/".join([self.sql_path, 'schema.sql']))
status, res = self.conn.execute_dict(query)
if status:
for record in res['rows']:
schema_names.append(record['schema'])
def escape_name(self, name):
if name and (
(not self.name_pattern.match(name)) or
@@ -622,6 +628,25 @@ class SQLAutoComplete(object):
aliases = (tbl + str(i) for i in count(2))
return next(a for a in aliases if normalize_ref(a) not in tbls)
def _check_for_aliases(self, left, refs, rtbl, suggestion, right):
"""
Check for generate aliases and return join value
:param left:
:param refs:
:param rtbl:
:param suggestion:
:param right:
:return: return join string.
"""
if self.generate_aliases or normalize_ref(left.tbl) in refs:
lref = self.alias(left.tbl, suggestion.table_refs)
join = '{0} {4} ON {4}.{1} = {2}.{3}'.format(
left.tbl, left.col, rtbl.ref, right.col, lref)
else:
join = '{0} ON {0}.{1} = {2}.{3}'.format(
left.tbl, left.col, rtbl.ref, right.col)
return join
def get_join_matches(self, suggestion, word_before_cursor):
tbls = suggestion.table_refs
cols = self.populate_scoped_cols(tbls)
@@ -644,13 +669,8 @@ class SQLAutoComplete(object):
left = child if parent == right else parent
if suggestion.schema and left.schema != suggestion.schema:
continue
if self.generate_aliases or normalize_ref(left.tbl) in refs:
lref = self.alias(left.tbl, suggestion.table_refs)
join = '{0} {4} ON {4}.{1} = {2}.{3}'.format(
left.tbl, left.col, rtbl.ref, right.col, lref)
else:
join = '{0} ON {0}.{1} = {2}.{3}'.format(
left.tbl, left.col, rtbl.ref, right.col)
join = self._check_for_aliases(left, refs, rtbl, suggestion, right)
alias = generate_alias(left.tbl)
synonyms = [join, '{0} ON {0}.{1} = {2}.{3}'.format(
alias, left.col, rtbl.ref, right.col)]
@@ -668,6 +688,34 @@ class SQLAutoComplete(object):
return self.find_matches(word_before_cursor, joins,
mode='strict', meta='join')
def list_dict(self, pairs): # Turns [(a, b), (a, c)] into {a: [b, c]}
d = defaultdict(list)
for pair in pairs:
d[pair[0]].append(pair[1])
return d
def add_cond(self, lcol, rcol, rref, prio, meta, **kwargs):
"""
Add Condition in join
:param lcol:
:param rcol:
:param rref:
:param prio:
:param meta:
:param kwargs:
:return:
"""
suggestion = kwargs['suggestion']
found_conds = kwargs['found_conds']
ltbl = kwargs['ltbl']
ref_prio = kwargs['ref_prio']
conds = kwargs['conds']
prefix = '' if suggestion.parent else ltbl.ref + '.'
cond = prefix + lcol + ' = ' + rref + '.' + rcol
if cond not in found_conds:
found_conds.add(cond)
conds.append(Candidate(cond, prio + ref_prio[rref], meta))
def get_join_condition_matches(self, suggestion, word_before_cursor):
col = namedtuple('col', 'schema tbl col')
tbls = self.populate_scoped_cols(suggestion.table_refs).items
@@ -679,24 +727,11 @@ class SQLAutoComplete(object):
return []
conds, found_conds = [], set()
def add_cond(lcol, rcol, rref, prio, meta):
prefix = '' if suggestion.parent else ltbl.ref + '.'
cond = prefix + lcol + ' = ' + rref + '.' + rcol
if cond not in found_conds:
found_conds.add(cond)
conds.append(Candidate(cond, prio + ref_prio[rref], meta))
def list_dict(pairs): # Turns [(a, b), (a, c)] into {a: [b, c]}
d = defaultdict(list)
for pair in pairs:
d[pair[0]].append(pair[1])
return d
# Tables that are closer to the cursor get higher prio
ref_prio = dict((tbl.ref, num)
for num, tbl in enumerate(suggestion.table_refs))
# Map (schema, table, col) to tables
coldict = list_dict(
coldict = self.list_dict(
((t.schema, t.name, c.name), t) for t, c in cols if t.ref != lref
)
# For each fk from the left table, generate a join condition if
@@ -707,17 +742,35 @@ class SQLAutoComplete(object):
child = col(fk.childschema, fk.childtable, fk.childcolumn)
par = col(fk.parentschema, fk.parenttable, fk.parentcolumn)
left, right = (child, par) if left == child else (par, child)
for rtbl in coldict[right]:
add_cond(left.col, right.col, rtbl.ref, 2000, 'fk join')
kwargs = {
"suggestion": suggestion,
"found_conds": found_conds,
"ltbl": ltbl,
"conds": conds,
"ref_prio": ref_prio
}
self.add_cond(left.col, right.col, rtbl.ref, 2000, 'fk join',
**kwargs)
# For name matching, use a {(colname, coltype): TableReference} dict
coltyp = namedtuple('coltyp', 'name datatype')
col_table = list_dict((coltyp(c.name, c.datatype), t) for t, c in cols)
col_table = self.list_dict(
(coltyp(c.name, c.datatype), t) for t, c in cols)
# Find all name-match join conditions
for c in (coltyp(c.name, c.datatype) for c in lcols):
for rtbl in (t for t in col_table[c] if t.ref != ltbl.ref):
kwargs = {
"suggestion": suggestion,
"found_conds": found_conds,
"ltbl": ltbl,
"conds": conds,
"ref_prio": ref_prio
}
prio = 1000 if c.datatype in (
'integer', 'bigint', 'smallint') else 0
add_cond(c.name, c.name, rtbl.ref, prio, 'name join')
self.add_cond(c.name, c.name, rtbl.ref, prio, 'name join',
**kwargs)
return self.find_matches(word_before_cursor, conds,
mode='strict', meta='join')
@@ -951,6 +1004,52 @@ class SQLAutoComplete(object):
Datatype: get_datatype_matches,
}
def addcols(self, schema, rel, alias, reltype, cols, columns):
"""
Add columns in schema column list.
:param schema: Schema for reference.
:param rel:
:param alias:
:param reltype:
:param cols:
:param columns:
:return:
"""
tbl = TableReference(schema, rel, alias, reltype == 'functions')
if tbl not in columns:
columns[tbl] = []
columns[tbl].extend(cols)
def _get_schema_columns(self, schemas, tbl, meta, columns):
"""
Check and add schema table columns as per table.
:param schemas: Schema
:param tbl:
:param meta:
:param columns: column list
:return:
"""
for schema in schemas:
relname = self.escape_name(tbl.name)
schema = self.escape_name(schema)
if tbl.is_function:
# Return column names from a set-returning function
# Get an array of FunctionMetadata objects
functions = meta['functions'].get(schema, {}).get(relname)
for func in (functions or []):
# func is a FunctionMetadata object
cols = func.fields()
self.addcols(schema, relname, tbl.alias, 'functions', cols,
columns)
else:
for reltype in ('tables', 'views'):
cols = meta[reltype].get(schema, {}).get(relname)
if cols:
cols = cols.values()
self.addcols(schema, relname, tbl.alias, reltype, cols,
columns)
break
def populate_scoped_cols(self, scoped_tbls, local_tbls=()):
"""Find all columns in a set of scoped_tables.
@@ -963,37 +1062,14 @@ class SQLAutoComplete(object):
columns = OrderedDict()
meta = self.dbmetadata
def addcols(schema, rel, alias, reltype, cols):
tbl = TableReference(schema, rel, alias, reltype == 'functions')
if tbl not in columns:
columns[tbl] = []
columns[tbl].extend(cols)
for tbl in scoped_tbls:
# Local tables should shadow database tables
if tbl.schema is None and normalize_ref(tbl.name) in ctes:
cols = ctes[normalize_ref(tbl.name)]
addcols(None, tbl.name, 'CTE', tbl.alias, cols)
self.addcols(None, tbl.name, 'CTE', tbl.alias, cols, columns)
continue
schemas = [tbl.schema] if tbl.schema else self.search_path
for schema in schemas:
relname = self.escape_name(tbl.name)
schema = self.escape_name(schema)
if tbl.is_function:
# Return column names from a set-returning function
# Get an array of FunctionMetadata objects
functions = meta['functions'].get(schema, {}).get(relname)
for func in (functions or []):
# func is a FunctionMetadata object
cols = func.fields()
addcols(schema, relname, tbl.alias, 'functions', cols)
else:
for reltype in ('tables', 'views'):
cols = meta[reltype].get(schema, {}).get(relname)
if cols:
cols = cols.values()
addcols(schema, relname, tbl.alias, reltype, cols)
break
self._get_schema_columns(schemas, tbl, meta, columns)
return columns
@@ -1057,14 +1133,16 @@ class SQLAutoComplete(object):
if filter_func(meta)
]
def fetch_schema_objects(self, schema, obj_type):
def _get_schema_obj_query(self, schema, obj_type):
"""
This function is used to fetch schema objects like tables, views, etc..
:return:
Get query according object type like tables, views, etc...
:param schema: schema flag to include schema in clause.
:param obj_type: object type.
:return: query according to object type and in_clause
if schema flag in true.
"""
in_clause = ''
query = ''
data = []
if schema:
in_clause = '\'' + schema + '\''
@@ -1087,6 +1165,16 @@ class SQLAutoComplete(object):
query = render_template("/".join([self.sql_path, 'datatypes.sql']),
schema_names=in_clause)
return query, in_clause
def fetch_schema_objects(self, schema, obj_type):
"""
This function is used to fetch schema objects like tables, views, etc..
:return:
"""
data = []
query, in_clause = self._get_schema_obj_query(schema, obj_type)
if self.conn.connected():
status, res = self.conn.execute_dict(query)
if status:
@@ -1107,15 +1195,13 @@ class SQLAutoComplete(object):
elif obj_type == 'datatypes' and len(data) > 0:
self.extend_datatypes(data)
def fetch_functions(self, schema):
def _get_function_sql(self, schema):
"""
This function is used to fecth the list of functions.
:param schema:
:return:
Check for schema inclusion and fetch sql for functions.
:param schema: include schema flag.
:return: sql query for functions, and in_clause value.
"""
in_clause = ''
data = []
if schema:
in_clause = '\'' + schema + '\''
else:
@@ -1128,30 +1214,44 @@ class SQLAutoComplete(object):
query = render_template("/".join([self.sql_path, 'functions.sql']),
schema_names=in_clause)
return query, in_clause
def _get_function_meta_data(self, res, data):
for row in res['rows']:
data.append(FunctionMetadata(
row['schema_name'],
row['func_name'],
row['arg_names'].strip('{}').split(',')
if row['arg_names'] is not None
else row['arg_names'],
row['arg_types'].strip('{}').split(',')
if row['arg_types'] is not None
else row['arg_types'],
row['arg_modes'].strip('{}').split(',')
if row['arg_modes'] is not None
else row['arg_modes'],
row['return_type'],
row['is_aggregate'],
row['is_window'],
row['is_set_returning'],
row['arg_defaults'].strip('{}').split(',')
if row['arg_defaults'] is not None
else row['arg_defaults']
))
def fetch_functions(self, schema):
"""
This function is used to fecth the list of functions.
:param schema:
:return:
"""
data = []
query, in_clause = self._get_function_sql(schema)
if self.conn.connected():
status, res = self.conn.execute_dict(query)
if status:
for row in res['rows']:
data.append(FunctionMetadata(
row['schema_name'],
row['func_name'],
row['arg_names'].strip('{}').split(',')
if row['arg_names'] is not None
else row['arg_names'],
row['arg_types'].strip('{}').split(',')
if row['arg_types'] is not None
else row['arg_types'],
row['arg_modes'].strip('{}').split(',')
if row['arg_modes'] is not None
else row['arg_modes'],
row['return_type'],
row['is_aggregate'],
row['is_window'],
row['is_set_returning'],
row['arg_defaults'].strip('{}').split(',')
if row['arg_defaults'] is not None
else row['arg_defaults']
))
self._get_function_meta_data(res, data)
if len(data) > 0:
self.extend_functions(data)