Fixed SonarQube code smell Replace the unused local variable with '_'.

This commit is contained in:
Akshay Joshi
2024-01-24 18:33:43 +05:30
parent 47aa070cc3
commit 0e0cbc40b8
76 changed files with 4983 additions and 4998 deletions

View File

@@ -1,5 +1,9 @@
checksumBehavior: update checksumBehavior: update
compressionLevel: mixed
enableGlobalCache: false
logFilters: logFilters:
- code: YN0013 - code: YN0013
level: discard level: discard

View File

@@ -26,7 +26,7 @@ def load_providers():
if os.path.isfile(filename): if os.path.isfile(filename):
basename = os.path.basename(filename) basename = os.path.basename(filename)
base, extension = os.path.splitext(basename) _, extension = os.path.splitext(basename)
if extension == ".py" and not basename.startswith("_"): if extension == ".py" and not basename.startswith("_"):
module = __import__("providers." + basename[:-3], module = __import__("providers." + basename[:-3],

View File

@@ -197,7 +197,7 @@ class AzureProvider(AbsProvider):
""" Create/cache/return an Azure client object """ """ Create/cache/return an Azure client object """
# Acquire a credential object using CLI-based authentication. # Acquire a credential object using CLI-based authentication.
if self._credentials is None: if self._credentials is None:
status, self._credentials = \ _, self._credentials = \
self._get_azure_credentials() self._get_azure_credentials()
if type in self._clients: if type in self._clients:

View File

@@ -227,7 +227,7 @@ class EmailAuthentication(BaseMFAuth):
def _registration_view_after_code_sent(self, _form_data): def _registration_view_after_code_sent(self, _form_data):
session['mfa_email_id'] = _form_data.get('send_to', None) session['mfa_email_id'] = _form_data.get('send_to', None)
success, http_code, message = _send_code_to_email( success, _, message = _send_code_to_email(
session['mfa_email_id'] session['mfa_email_id']
) )

View File

@@ -566,7 +566,7 @@ class CastView(PGChildNodeView, SchemaDiffObjectCompare):
:return: :return:
""" """
data = request.args data = request.args
sql, name = self.get_sql(gid, sid, did, data, cid) sql, _ = self.get_sql(gid, sid, did, data, cid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -809,8 +809,8 @@ class CastView(PGChildNodeView, SchemaDiffObjectCompare):
drop_sql = kwargs.get('drop_sql', False) drop_sql = kwargs.get('drop_sql', False)
if data: if data:
sql, name = self.get_sql(gid=gid, sid=sid, did=did, data=data, sql, _ = self.get_sql(gid=gid, sid=sid, did=did, data=data,
cid=oid) cid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -415,7 +415,7 @@ class ExtensionView(PGChildNodeView, SchemaDiffObjectCompare):
data[k] = v data[k] = v
try: try:
SQL, name = self.getSQL(gid, sid, data, did, eid) SQL, _ = self.getSQL(gid, sid, data, did, eid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(SQL, str): if not isinstance(SQL, str):
return SQL return SQL
@@ -601,8 +601,8 @@ class ExtensionView(PGChildNodeView, SchemaDiffObjectCompare):
drop_sql = kwargs.get('drop_sql', False) drop_sql = kwargs.get('drop_sql', False)
if data: if data:
sql, name = self.getSQL(gid=gid, sid=sid, did=did, data=data, sql, _ = self.getSQL(gid=gid, sid=sid, did=did, data=data,
eid=oid) eid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -634,7 +634,7 @@ class ForeignDataWrapperView(PGChildNodeView, SchemaDiffObjectCompare):
except ValueError: except ValueError:
data[k] = v data[k] = v
try: try:
sql, name = self.get_sql(gid, sid, data, did, fid) sql, _ = self.get_sql(gid, sid, data, did, fid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -1009,8 +1009,8 @@ class ForeignDataWrapperView(PGChildNodeView, SchemaDiffObjectCompare):
drop_sql = kwargs.get('drop_sql', False) drop_sql = kwargs.get('drop_sql', False)
if data: if data:
sql, name = self.get_sql(gid=gid, sid=sid, did=did, data=data, sql, _ = self.get_sql(gid=gid, sid=sid, did=did, data=data,
fid=oid) fid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -631,7 +631,7 @@ class ForeignServerView(PGChildNodeView, SchemaDiffObjectCompare):
except ValueError: except ValueError:
data[k] = v data[k] = v
try: try:
sql, name = self.get_sql(gid, sid, data, did, fid, fsid) sql, _ = self.get_sql(gid, sid, data, did, fid, fsid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -967,8 +967,8 @@ class ForeignServerView(PGChildNodeView, SchemaDiffObjectCompare):
drop_sql = kwargs.get('drop_sql', False) drop_sql = kwargs.get('drop_sql', False)
if data: if data:
sql, name = self.get_sql(gid=gid, sid=sid, did=did, data=data, sql, _ = self.get_sql(gid=gid, sid=sid, did=did, data=data,
fid=fdw_id, fsid=oid) fid=fdw_id, fsid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, fid=fdw_id, sql = self.delete(gid=gid, sid=sid, did=did, fid=fdw_id,

View File

@@ -656,7 +656,7 @@ class UserMappingView(PGChildNodeView, SchemaDiffObjectCompare):
except ValueError: except ValueError:
data[k] = v data[k] = v
try: try:
sql, name = self.get_sql(data=data, fsid=fsid, umid=umid) sql, _ = self.get_sql(data=data, fsid=fsid, umid=umid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -941,7 +941,7 @@ class UserMappingView(PGChildNodeView, SchemaDiffObjectCompare):
drop_sql = kwargs.get('drop_sql', False) drop_sql = kwargs.get('drop_sql', False)
if data: if data:
sql, name = self.get_sql(data=data, fsid=fsid, umid=oid) sql, _ = self.get_sql(data=data, fsid=fsid, umid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, fid=fid, sql = self.delete(gid=gid, sid=sid, did=did, fid=fid,

View File

@@ -595,7 +595,7 @@ class LanguageView(PGChildNodeView, SchemaDiffObjectCompare):
except ValueError: except ValueError:
data[k] = v data[k] = v
try: try:
sql, name = self.get_sql(data, lid) sql, _ = self.get_sql(data, lid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -868,7 +868,7 @@ class LanguageView(PGChildNodeView, SchemaDiffObjectCompare):
drop_sql = kwargs.get('drop_sql', False) drop_sql = kwargs.get('drop_sql', False)
if data: if data:
sql, name = self.get_sql(data=data, lid=oid) sql, _ = self.get_sql(data=data, lid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -594,7 +594,7 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
data[k] = v data[k] = v
try: try:
sql, name = self.get_sql(data, pbid) sql, _ = self.get_sql(data, pbid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -701,11 +701,9 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
if 'pubschema' in data: if 'pubschema' in data:
for schema in data['pubschema']: for schema in data['pubschema']:
if 'pubschema' in old_data and \ if (('pubschema' in old_data and
schema not in old_data['pubschema']: schema not in old_data['pubschema']) or
add_schema_data.append(schema) ('pubschema' not in old_data)):
add_schema = True
elif 'pubschema' not in old_data:
add_schema_data.append(schema) add_schema_data.append(schema)
add_schema = True add_schema = True
@@ -900,7 +898,7 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
) )
pub_table = [] pub_table = []
status, table_res = self.conn.execute_dict(table_sql) _, table_res = self.conn.execute_dict(table_sql)
for table in table_res['rows']: for table in table_res['rows']:
if 'columns' in table and 'where' in table: if 'columns' in table and 'where' in table:
@@ -1111,7 +1109,7 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
drop_sql = kwargs.get('drop_sql', False) drop_sql = kwargs.get('drop_sql', False)
if data: if data:
sql, name = self.get_sql(data=data, pbid=oid) sql, _ = self.get_sql(data=data, pbid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -865,7 +865,7 @@ It may have been removed by another user.
data[k] = v data[k] = v
try: try:
SQL, name = self.get_sql(gid, sid, data, scid) SQL, _ = self.get_sql(gid, sid, data, scid)
if SQL and SQL.strip('\n') and SQL.strip(' '): if SQL and SQL.strip('\n') and SQL.strip(' '):
return make_json_response( return make_json_response(
data=SQL.strip('\n'), data=SQL.strip('\n'),

View File

@@ -645,7 +645,7 @@ class CollationView(PGChildNodeView, SchemaDiffObjectCompare):
data[k] = v data[k] = v
try: try:
SQL, name = self.get_sql(gid, sid, data, scid, coid) SQL, _ = self.get_sql(gid, sid, data, scid, coid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(SQL, str): if not isinstance(SQL, str):
return SQL return SQL
@@ -834,8 +834,8 @@ class CollationView(PGChildNodeView, SchemaDiffObjectCompare):
if data: if data:
if target_schema: if target_schema:
data['schema'] = target_schema data['schema'] = target_schema
sql, name = self.get_sql(gid=gid, sid=sid, data=data, scid=scid, sql, _ = self.get_sql(gid=gid, sid=sid, data=data, scid=scid,
coid=oid) coid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -585,7 +585,7 @@ AND relkind != 'c'))"""
""" """
data = self.request data = self.request
SQL, name = self.get_sql(gid, sid, data, scid) SQL, _ = self.get_sql(gid, sid, data, scid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(SQL, str): if not isinstance(SQL, str):
return SQL return SQL
@@ -814,7 +814,7 @@ AND relkind != 'c'))"""
""" """
try: try:
SQL, name = self.get_sql(gid, sid, self.request, scid, doid) SQL, _ = self.get_sql(gid, sid, self.request, scid, doid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(SQL, str): if not isinstance(SQL, str):
return SQL return SQL
@@ -980,9 +980,8 @@ AND relkind != 'c'))"""
if data: if data:
if target_schema: if target_schema:
data['schema'] = target_schema data['schema'] = target_schema
sql, name = self.get_sql(gid=gid, sid=sid, scid=scid, sql, _ = self.get_sql(gid=gid, sid=sid, scid=scid,
data=data, doid=oid, data=data, doid=oid, is_schema_diff=True)
is_schema_diff=True)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -433,7 +433,7 @@ class DomainConstraintView(PGChildNodeView):
""" """
data = self.request data = self.request
try: try:
status, SQL, name = self.get_sql(gid, sid, data, scid, doid) status, SQL, _ = self.get_sql(gid, sid, data, scid, doid)
if not status: if not status:
return SQL return SQL
@@ -652,7 +652,7 @@ class DomainConstraintView(PGChildNodeView):
""" """
data = self.request data = self.request
status, SQL, name = self.get_sql(gid, sid, data, scid, doid, coid) status, SQL, _ = self.get_sql(gid, sid, data, scid, doid, coid)
if status and SQL: if status and SQL:
return make_json_response( return make_json_response(
data=SQL, data=SQL,

View File

@@ -651,7 +651,7 @@ class FtsConfigurationView(PGChildNodeView, SchemaDiffObjectCompare):
data[k] = v data[k] = v
# Fetch sql query for modified data # Fetch sql query for modified data
SQL, name = self.get_sql(gid, sid, did, scid, data, cfgid) SQL, _ = self.get_sql(gid, sid, did, scid, data, cfgid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(SQL, str): if not isinstance(SQL, str):
return SQL return SQL
@@ -1059,8 +1059,8 @@ class FtsConfigurationView(PGChildNodeView, SchemaDiffObjectCompare):
target_schema = kwargs.get('target_schema', None) target_schema = kwargs.get('target_schema', None)
if data: if data:
sql, name = self.get_sql(gid=gid, sid=sid, did=did, scid=scid, sql, _ = self.get_sql(gid=gid, sid=sid, did=did, scid=scid,
data=data, cfgid=oid) data=data, cfgid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -642,7 +642,7 @@ class FtsDictionaryView(PGChildNodeView, SchemaDiffObjectCompare):
data[k] = v data[k] = v
# Fetch sql query for modified data # Fetch sql query for modified data
SQL, name = self.get_sql(gid, sid, did, scid, data, dcid) SQL, _ = self.get_sql(gid, sid, did, scid, data, dcid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(SQL, str): if not isinstance(SQL, str):
return SQL return SQL
@@ -963,8 +963,8 @@ class FtsDictionaryView(PGChildNodeView, SchemaDiffObjectCompare):
target_schema = kwargs.get('target_schema', None) target_schema = kwargs.get('target_schema', None)
if data: if data:
sql, name = self.get_sql(gid=gid, sid=sid, did=did, scid=scid, sql, _ = self.get_sql(gid=gid, sid=sid, did=did, scid=scid,
data=data, dcid=oid) data=data, dcid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -588,7 +588,7 @@ class FtsParserView(PGChildNodeView, SchemaDiffObjectCompare):
data[k] = v data[k] = v
# Fetch sql query for modified data # Fetch sql query for modified data
SQL, name = self.get_sql(gid, sid, did, scid, data, pid) SQL, _ = self.get_sql(gid, sid, did, scid, data, pid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(SQL, str): if not isinstance(SQL, str):
return SQL return SQL
@@ -1001,8 +1001,8 @@ class FtsParserView(PGChildNodeView, SchemaDiffObjectCompare):
target_schema = kwargs.get('target_schema', None) target_schema = kwargs.get('target_schema', None)
if data: if data:
sql, name = self.get_sql(gid=gid, sid=sid, did=did, scid=scid, sql, _ = self.get_sql(gid=gid, sid=sid, did=did, scid=scid,
data=data, pid=oid) data=data, pid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -426,7 +426,7 @@ class FtsTemplateView(PGChildNodeView, SchemaDiffObjectCompare):
) )
# Fetch sql query to update fts template # Fetch sql query to update fts template
sql, name = self.get_sql(gid, sid, did, scid, data, tid) sql, _ = self.get_sql(gid, sid, did, scid, data, tid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -540,7 +540,7 @@ class FtsTemplateView(PGChildNodeView, SchemaDiffObjectCompare):
data[k] = v data[k] = v
# Fetch sql query for modified data # Fetch sql query for modified data
SQL, name = self.get_sql(gid, sid, did, scid, data, tid) SQL, _ = self.get_sql(gid, sid, did, scid, data, tid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(SQL, str): if not isinstance(SQL, str):
return SQL return SQL
@@ -867,8 +867,8 @@ class FtsTemplateView(PGChildNodeView, SchemaDiffObjectCompare):
target_schema = kwargs.get('target_schema', None) target_schema = kwargs.get('target_schema', None)
if data: if data:
sql, name = self.get_sql(gid=gid, sid=sid, did=did, scid=scid, sql, _ = self.get_sql(gid=gid, sid=sid, did=did, scid=scid,
data=data, tid=oid) data=data, tid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -778,7 +778,7 @@ class FunctionView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
status=200 status=200
) )
except Exception: except Exception:
exc_type, exc_value, exc_traceback = sys.exc_info() _, exc_value, _ = sys.exc_info()
return internal_server_error(errormsg=str(exc_value)) return internal_server_error(errormsg=str(exc_value))
@check_precondition @check_precondition
@@ -1883,10 +1883,10 @@ class FunctionView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
if data: if data:
if target_schema: if target_schema:
data['schema'] = target_schema data['schema'] = target_schema
status, sql = self._get_sql(gid=gid, sid=sid, did=did, scid=scid, _, sql = self._get_sql(gid=gid, sid=sid, did=did, scid=scid,
data=data, fnid=oid, is_sql=False, data=data, fnid=oid, is_sql=False,
is_schema_diff=True, is_schema_diff=True,
allow_code_formatting=False) allow_code_formatting=False)
# Check if return type is changed then we need to drop the # Check if return type is changed then we need to drop the
# function first and then recreate it. # function first and then recreate it.
if 'prorettypename' in data: if 'prorettypename' in data:

View File

@@ -405,7 +405,7 @@ class PackageView(PGChildNodeView, SchemaDiffObjectCompare):
) )
data['schema'] = self.schema data['schema'] = self.schema
sql, name = self.getSQL(data=data, scid=scid, pkgid=None) sql, _ = self.getSQL(data=data, scid=scid, pkgid=None)
status, msg = self.conn.execute_scalar(sql) status, msg = self.conn.execute_scalar(sql)
if not status: if not status:
@@ -582,7 +582,7 @@ class PackageView(PGChildNodeView, SchemaDiffObjectCompare):
).format(arg) ).format(arg)
) )
sql, name = self.getSQL(data=data, scid=scid, pkgid=pkgid) sql, _ = self.getSQL(data=data, scid=scid, pkgid=pkgid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -752,10 +752,9 @@ class PackageView(PGChildNodeView, SchemaDiffObjectCompare):
if target_schema: if target_schema:
result['schema'] = target_schema result['schema'] = target_schema
sql, name = self.getSQL(data=result, scid=scid, pkgid=pkgid, sql, _ = self.getSQL(data=result, scid=scid, pkgid=pkgid,
sqltab=True, sqltab=True, is_schema_diff=is_schema_diff,
is_schema_diff=is_schema_diff, target_schema=target_schema)
target_schema=target_schema)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
@@ -886,7 +885,7 @@ class PackageView(PGChildNodeView, SchemaDiffObjectCompare):
if data: if data:
if target_schema: if target_schema:
data['schema'] = target_schema data['schema'] = target_schema
sql, name = self.getSQL(data=data, scid=scid, pkgid=oid) sql, _ = self.getSQL(data=data, scid=scid, pkgid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -543,7 +543,7 @@ class SequenceView(PGChildNodeView, SchemaDiffObjectCompare):
data = request.form if request.form else json.loads( data = request.form if request.form else json.loads(
request.data request.data
) )
sql, name = self.get_SQL(gid, sid, did, data, scid, seid) sql, _ = self.get_SQL(gid, sid, did, data, scid, seid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -614,7 +614,7 @@ class SequenceView(PGChildNodeView, SchemaDiffObjectCompare):
"Could not find the required parameter ({})." "Could not find the required parameter ({})."
).format(arg) ).format(arg)
) )
sql, name = self.get_SQL(gid, sid, did, data, scid, seid) sql, _ = self.get_SQL(gid, sid, did, data, scid, seid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -753,8 +753,8 @@ class SequenceView(PGChildNodeView, SchemaDiffObjectCompare):
result['schema'] = target_schema result['schema'] = target_schema
result = self._formatter(result, scid, seid) result = self._formatter(result, scid, seid)
sql, name = self.get_SQL(gid, sid, did, result, scid, sql, _ = self.get_SQL(gid, sid, did, result, scid,
add_not_exists_clause=True) add_not_exists_clause=True)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -977,7 +977,7 @@ class SequenceView(PGChildNodeView, SchemaDiffObjectCompare):
if data: if data:
if target_schema: if target_schema:
data['schema'] = target_schema data['schema'] = target_schema
sql, name = self.get_SQL(gid, sid, did, data, scid, oid) sql, _ = self.get_SQL(gid, sid, did, data, scid, oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -609,7 +609,7 @@ class SynonymView(PGChildNodeView, SchemaDiffObjectCompare):
data[k] = v data[k] = v
try: try:
SQL, name = self.get_sql(gid, sid, data, scid, syid) SQL, _ = self.get_sql(gid, sid, data, scid, syid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(SQL, str): if not isinstance(SQL, str):
return SQL return SQL
@@ -789,7 +789,7 @@ class SynonymView(PGChildNodeView, SchemaDiffObjectCompare):
if data: if data:
if target_schema: if target_schema:
data['schema'] = target_schema data['schema'] = target_schema
sql, name = self.get_sql(gid, sid, data, scid, oid) sql, _ = self.get_sql(gid, sid, data, scid, oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -1295,7 +1295,7 @@ class TableView(BaseTableView, DataTypeReader, SchemaDiffTableCompare):
if target_schema: if target_schema:
data['schema'] = target_schema data['schema'] = target_schema
sql, partition_sql = BaseTableView.get_reverse_engineered_sql( sql, _ = BaseTableView.get_reverse_engineered_sql(
self, did=did, scid=scid, tid=tid, main_sql=main_sql, self, did=did, scid=scid, tid=tid, main_sql=main_sql,
data=data, json_resp=json_resp, data=data, json_resp=json_resp,
add_not_exists_clause=if_exists_flag) add_not_exists_clause=if_exists_flag)
@@ -1336,7 +1336,7 @@ class TableView(BaseTableView, DataTypeReader, SchemaDiffTableCompare):
if not status: if not status:
return res return res
SQL, name = self.get_sql(did, scid, tid, data, res) SQL, _ = self.get_sql(did, scid, tid, data, res)
SQL = re.sub('\n{2,}', '\n\n', SQL) SQL = re.sub('\n{2,}', '\n\n', SQL)
SQL = SQL.strip('\n') SQL = SQL.strip('\n')

View File

@@ -600,7 +600,7 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
column_utils.type_formatter(data['cltype']) column_utils.type_formatter(data['cltype'])
try: try:
SQL, name = self.get_sql(scid, tid, clid, data) SQL, _ = self.get_sql(scid, tid, clid, data)
if not isinstance(SQL, str): if not isinstance(SQL, str):
return SQL return SQL
@@ -688,7 +688,7 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
column_utils.type_formatter(old_data['cltype']) column_utils.type_formatter(old_data['cltype'])
if 'cltype' in data and data['cltype'] != old_data['cltype']: if 'cltype' in data and data['cltype'] != old_data['cltype']:
length, precision, typeval = \ length, precision, _ = \
self.get_length_precision(data['cltype']) self.get_length_precision(data['cltype'])
# if new datatype does not have length or precision # if new datatype does not have length or precision
@@ -793,7 +793,7 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
data = column_utils.column_formatter(self.conn, tid, clid, data = column_utils.column_formatter(self.conn, tid, clid,
data, []) data, [])
SQL, name = self.get_sql(scid, tid, None, data, is_sql=True) SQL, _ = self.get_sql(scid, tid, None, data, is_sql=True)
if not isinstance(SQL, str): if not isinstance(SQL, str):
return SQL return SQL

View File

@@ -714,7 +714,7 @@ class CompoundTriggerView(PGChildNodeView, SchemaDiffObjectCompare):
data['table'] = self.table data['table'] = self.table
try: try:
sql, name = compound_trigger_utils.get_sql( sql, _ = compound_trigger_utils.get_sql(
self.conn, data, tid, trid, self._DATABASE_LAST_SYSTEM_OID) self.conn, data, tid, trid, self._DATABASE_LAST_SYSTEM_OID)
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -884,7 +884,7 @@ class CompoundTriggerView(PGChildNodeView, SchemaDiffObjectCompare):
target_schema = kwargs.get('target_schema', None) target_schema = kwargs.get('target_schema', None)
if data: if data:
sql, name = compound_trigger_utils.get_sql( sql, _ = compound_trigger_utils.get_sql(
self.conn, data, tid, oid, self._DATABASE_LAST_SYSTEM_OID) self.conn, data, tid, oid, self._DATABASE_LAST_SYSTEM_OID)
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -932,7 +932,7 @@ class CompoundTriggerView(PGChildNodeView, SchemaDiffObjectCompare):
if target_schema: if target_schema:
data['schema'] = target_schema data['schema'] = target_schema
sql, name = compound_trigger_utils.get_sql( sql, _ = compound_trigger_utils.get_sql(
self.conn, data, tid, None, self._DATABASE_LAST_SYSTEM_OID) self.conn, data, tid, None, self._DATABASE_LAST_SYSTEM_OID)
# If compound trigger is disbaled then add sql # If compound trigger is disbaled then add sql

View File

@@ -179,7 +179,7 @@ def get_reverse_engineered_sql(conn, **kwargs):
data = trigger_definition(data) data = trigger_definition(data)
SQL, name = get_sql(conn, data, tid, None, datlastsysoid) SQL, _ = get_sql(conn, data, tid, None, datlastsysoid)
sql_header = "-- Compound Trigger: {0}\n\n-- ".format(data['name']) sql_header = "-- Compound Trigger: {0}\n\n-- ".format(data['name'])

View File

@@ -279,7 +279,7 @@ class CheckConstraintView(PGChildNodeView):
SQL = render_template("/".join([self.template_path, SQL = render_template("/".join([self.template_path,
self._PROPERTIES_SQL]), tid=tid) self._PROPERTIES_SQL]), tid=tid)
status, res = self.conn.execute_dict(SQL) _, res = self.conn.execute_dict(SQL)
for row in res['rows']: for row in res['rows']:
row['_type'] = self.node_type row['_type'] = self.node_type
@@ -303,7 +303,7 @@ class CheckConstraintView(PGChildNodeView):
self._NODES_SQL]), self._NODES_SQL]),
tid=tid, tid=tid,
cid=cid) cid=cid)
status, rset = self.conn.execute_2darray(SQL) _, rset = self.conn.execute_2darray(SQL)
if len(rset['rows']) == 0: if len(rset['rows']) == 0:
return gone(_("""Could not find the check constraint.""")) return gone(_("""Could not find the check constraint."""))
@@ -344,7 +344,7 @@ class CheckConstraintView(PGChildNodeView):
SQL = render_template("/".join([self.template_path, SQL = render_template("/".join([self.template_path,
self._NODES_SQL]), self._NODES_SQL]),
tid=tid) tid=tid)
status, rset = self.conn.execute_2darray(SQL) _, rset = self.conn.execute_2darray(SQL)
for row in rset['rows']: for row in rset['rows']:
if "convalidated" in row and row["convalidated"]: if "convalidated" in row and row["convalidated"]:
@@ -399,7 +399,7 @@ class CheckConstraintView(PGChildNodeView):
SQL = render_template("/".join([self.template_path, SQL = render_template("/".join([self.template_path,
self._NODES_SQL]), self._NODES_SQL]),
tid=tid) tid=tid)
status, rset = self.conn.execute_2darray(SQL) _, rset = self.conn.execute_2darray(SQL)
for row in rset['rows']: for row in rset['rows']:
if "convalidated" in row and row["convalidated"]: if "convalidated" in row and row["convalidated"]:

View File

@@ -128,7 +128,7 @@ def get_check_constraint_sql(conn, tid, data, template_path=None):
c['schema'] = data['schema'] c['schema'] = data['schema']
c['table'] = data['name'] c['table'] = data['name']
modified_sql, name = get_sql(conn, c, tid, c['oid']) modified_sql, _ = get_sql(conn, c, tid, c['oid'])
sql.append(modified_sql.strip('\n')) sql.append(modified_sql.strip('\n'))
if 'added' in constraint: if 'added' in constraint:
@@ -136,7 +136,7 @@ def get_check_constraint_sql(conn, tid, data, template_path=None):
c['schema'] = data['schema'] c['schema'] = data['schema']
c['table'] = data['name'] c['table'] = data['name']
add_sql, name = get_sql(conn, c, tid) add_sql, _ = get_sql(conn, c, tid)
sql.append(add_sql.strip("\n")) sql.append(add_sql.strip("\n"))
if len(sql) > 0: if len(sql) > 0:

View File

@@ -345,7 +345,7 @@ class ExclusionConstraintView(PGChildNodeView):
self._PROPERTIES_SQL]), self._PROPERTIES_SQL]),
did=did, did=did,
tid=tid) tid=tid)
status, res = self.conn.execute_dict(SQL) _, res = self.conn.execute_dict(SQL)
for row in res['rows']: for row in res['rows']:
row['_type'] = self.node_type row['_type'] = self.node_type
@@ -374,7 +374,7 @@ class ExclusionConstraintView(PGChildNodeView):
self._NODES_SQL]), self._NODES_SQL]),
tid=tid, tid=tid,
exid=exid) exid=exid)
status, rset = self.conn.execute_2darray(SQL) _, rset = self.conn.execute_2darray(SQL)
if len(rset['rows']) == 0: if len(rset['rows']) == 0:
return gone(_("""Could not find the exclusion constraint.""")) return gone(_("""Could not find the exclusion constraint."""))
@@ -411,7 +411,7 @@ class ExclusionConstraintView(PGChildNodeView):
SQL = render_template("/".join([self.template_path, SQL = render_template("/".join([self.template_path,
self._NODES_SQL]), self._NODES_SQL]),
tid=tid) tid=tid)
status, rset = self.conn.execute_2darray(SQL) _, rset = self.conn.execute_2darray(SQL)
for row in rset['rows']: for row in rset['rows']:
res.append( res.append(
@@ -457,7 +457,7 @@ class ExclusionConstraintView(PGChildNodeView):
SQL = render_template("/".join([self.template_path, SQL = render_template("/".join([self.template_path,
self._NODES_SQL]), self._NODES_SQL]),
tid=tid) tid=tid)
status, rset = self.conn.execute_2darray(SQL) _, rset = self.conn.execute_2darray(SQL)
for row in rset['rows']: for row in rset['rows']:
res.append( res.append(
@@ -756,7 +756,7 @@ class ExclusionConstraintView(PGChildNodeView):
data['schema'] = self.schema data['schema'] = self.schema
data['table'] = self.table data['table'] = self.table
try: try:
sql, name = \ sql, _ = \
exclusion_utils.get_sql(self.conn, data, did, tid, exid) exclusion_utils.get_sql(self.conn, data, did, tid, exid)
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql

View File

@@ -181,7 +181,7 @@ def get_exclusion_constraint_sql(conn, did, tid, data, template_path=None):
c['schema'] = data['schema'] c['schema'] = data['schema']
c['table'] = data['name'] c['table'] = data['name']
modified_sql, name = get_sql(conn, c, did, tid, c['oid']) modified_sql, _ = get_sql(conn, c, did, tid, c['oid'])
sql.append(modified_sql.strip('\n')) sql.append(modified_sql.strip('\n'))
if 'added' in constraint: if 'added' in constraint:
@@ -189,7 +189,7 @@ def get_exclusion_constraint_sql(conn, did, tid, data, template_path=None):
c['schema'] = data['schema'] c['schema'] = data['schema']
c['table'] = data['name'] c['table'] = data['name']
add_sql, name = get_sql(conn, c, did, tid) add_sql, _ = get_sql(conn, c, did, tid)
sql.append(add_sql.strip("\n")) sql.append(add_sql.strip("\n"))
if len(sql) > 0: if len(sql) > 0:

View File

@@ -355,7 +355,7 @@ class ForeignKeyConstraintView(PGChildNodeView):
SQL = render_template("/".join([self.template_path, SQL = render_template("/".join([self.template_path,
self._PROPERTIES_SQL]), self._PROPERTIES_SQL]),
tid=tid) tid=tid)
status, res = self.conn.execute_dict(SQL) _, res = self.conn.execute_dict(SQL)
for row in res['rows']: for row in res['rows']:
row['_type'] = self.node_type row['_type'] = self.node_type
@@ -382,7 +382,7 @@ class ForeignKeyConstraintView(PGChildNodeView):
SQL = render_template( SQL = render_template(
"/".join([self.template_path, self._NODES_SQL]), tid=tid "/".join([self.template_path, self._NODES_SQL]), tid=tid
) )
status, rset = self.conn.execute_2darray(SQL) _, rset = self.conn.execute_2darray(SQL)
if len(rset['rows']) == 0: if len(rset['rows']) == 0:
return gone(gettext(FOREIGN_KEY_NOT_FOUND)) return gone(gettext(FOREIGN_KEY_NOT_FOUND))
@@ -427,7 +427,7 @@ class ForeignKeyConstraintView(PGChildNodeView):
SQL = render_template("/".join([self.template_path, SQL = render_template("/".join([self.template_path,
self._NODES_SQL]), self._NODES_SQL]),
tid=tid) tid=tid)
status, rset = self.conn.execute_2darray(SQL) _, rset = self.conn.execute_2darray(SQL)
res = [] res = []
for row in rset['rows']: for row in rset['rows']:
if row["convalidated"]: if row["convalidated"]:
@@ -467,7 +467,7 @@ class ForeignKeyConstraintView(PGChildNodeView):
SQL = render_template("/".join([self.template_path, SQL = render_template("/".join([self.template_path,
self._NODES_SQL]), self._NODES_SQL]),
tid=tid) tid=tid)
status, rset = self.conn.execute_2darray(SQL) _, rset = self.conn.execute_2darray(SQL)
for row in rset['rows']: for row in rset['rows']:
if row["convalidated"]: if row["convalidated"]:
@@ -822,7 +822,7 @@ class ForeignKeyConstraintView(PGChildNodeView):
data['schema'] = self.schema data['schema'] = self.schema
data['table'] = self.table data['table'] = self.table
try: try:
sql, name = fkey_utils.get_sql(self.conn, data, tid, fkid) sql, _ = fkey_utils.get_sql(self.conn, data, tid, fkid)
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
sql = sql.strip('\n').strip(' ') sql = sql.strip('\n').strip(' ')

View File

@@ -224,7 +224,7 @@ def get_foreign_key_sql(conn, tid, data, template_path=None):
c['schema'] = data['schema'] c['schema'] = data['schema']
c['table'] = data['name'] c['table'] = data['name']
modified_sql, name = get_sql(conn, c, tid, c['oid']) modified_sql, _ = get_sql(conn, c, tid, c['oid'])
sql.append(modified_sql.strip("\n")) sql.append(modified_sql.strip("\n"))
if 'added' in constraint: if 'added' in constraint:
@@ -232,7 +232,7 @@ def get_foreign_key_sql(conn, tid, data, template_path=None):
c['schema'] = data['schema'] c['schema'] = data['schema']
c['table'] = data['name'] c['table'] = data['name']
add_sql, name = get_sql(conn, c, tid) add_sql, _ = get_sql(conn, c, tid)
sql.append(add_sql.strip("\n")) sql.append(add_sql.strip("\n"))
if len(sql) > 0: if len(sql) > 0:

View File

@@ -363,7 +363,7 @@ class IndexConstraintView(PGChildNodeView):
self._PROPERTIES_SQL]), did=did, self._PROPERTIES_SQL]), did=did,
tid=tid, tid=tid,
constraint_type=self.constraint_type) constraint_type=self.constraint_type)
status, res = self.conn.execute_dict(SQL) _, res = self.conn.execute_dict(SQL)
for row in res['rows']: for row in res['rows']:
row['_type'] = self.node_type row['_type'] = self.node_type
@@ -480,7 +480,7 @@ class IndexConstraintView(PGChildNodeView):
SQL = render_template("/".join([self.template_path, self._NODES_SQL]), SQL = render_template("/".join([self.template_path, self._NODES_SQL]),
tid=tid, tid=tid,
constraint_type=self.constraint_type) constraint_type=self.constraint_type)
status, rset = self.conn.execute_2darray(SQL) _, rset = self.conn.execute_2darray(SQL)
for row in rset['rows']: for row in rset['rows']:
res.append( res.append(
@@ -815,8 +815,8 @@ class IndexConstraintView(PGChildNodeView):
data['schema'] = self.schema data['schema'] = self.schema
data['table'] = self.table data['table'] = self.table
try: try:
sql, name = idxcons_utils.get_sql(self.conn, data, did, tid, sql, _ = idxcons_utils.get_sql(self.conn, data, did, tid,
self.constraint_type, cid) self.constraint_type, cid)
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
sql = sql.strip('\n').strip(' ') sql = sql.strip('\n').strip(' ')

View File

@@ -156,8 +156,7 @@ def _get_sql_to_change_constraints(did, tid, ctype, data, constraint,
c['schema'] = data['schema'] c['schema'] = data['schema']
c['table'] = data['name'] c['table'] = data['name']
modified_sql, name = get_sql(conn, c, did, tid, ctype, modified_sql, _ = get_sql(conn, c, did, tid, ctype, c['oid'])
c['oid'])
if modified_sql: if modified_sql:
sql.append(modified_sql.strip('\n')) sql.append(modified_sql.strip('\n'))
@@ -180,7 +179,7 @@ def _get_sql_to_add_constraints(did, tid, ctype, data, constraint,
c['schema'] = data['schema'] c['schema'] = data['schema']
c['table'] = data['name'] c['table'] = data['name']
add_sql, name = get_sql(conn, c, did, tid, ctype) add_sql, _ = get_sql(conn, c, did, tid, ctype)
sql.append(add_sql.strip("\n")) sql.append(add_sql.strip("\n"))

View File

@@ -843,7 +843,7 @@ class IndexesView(PGChildNodeView, SchemaDiffObjectCompare):
data["storage_parameters"].update({param: data[param]}) data["storage_parameters"].update({param: data[param]})
try: try:
sql, name = index_utils.get_sql( sql, _ = index_utils.get_sql(
self.conn, data=data, did=did, tid=tid, idx=idx, self.conn, data=data, did=did, tid=tid, idx=idx,
datlastsysoid=self._DATABASE_LAST_SYSTEM_OID, mode='create', datlastsysoid=self._DATABASE_LAST_SYSTEM_OID, mode='create',
show_sys_objects=self.blueprint.show_system_objects) show_sys_objects=self.blueprint.show_system_objects)
@@ -905,7 +905,7 @@ class IndexesView(PGChildNodeView, SchemaDiffObjectCompare):
data['nspname'] = self.schema data['nspname'] = self.schema
data['table'] = self.table data['table'] = self.table
sql, name = index_utils.get_sql( sql, _ = index_utils.get_sql(
self.conn, data=data, did=did, tid=tid, idx=idx, self.conn, data=data, did=did, tid=tid, idx=idx,
datlastsysoid=self._DATABASE_LAST_SYSTEM_OID, mode='create', datlastsysoid=self._DATABASE_LAST_SYSTEM_OID, mode='create',
show_sys_objects=self.blueprint.show_system_objects) show_sys_objects=self.blueprint.show_system_objects)

View File

@@ -341,9 +341,9 @@ def get_reverse_engineered_sql(conn, **kwargs):
if conn.manager.version >= 110000: if conn.manager.version >= 110000:
data = get_include_details(conn, idx, data) data = get_include_details(conn, idx, data)
SQL, name = get_sql(conn, data=data, did=did, tid=tid, idx=None, SQL, _ = get_sql(conn, data=data, did=did, tid=tid, idx=None,
datlastsysoid=datlastsysoid, datlastsysoid=datlastsysoid,
if_exists_flag=if_exists_flag) if_exists_flag=if_exists_flag)
if with_header: if with_header:
sql_header = '' sql_header = ''

View File

@@ -655,7 +655,7 @@ class PartitionsView(BaseTableView, DataTypeReader, SchemaDiffObjectCompare):
if not status: if not status:
return res return res
SQL, name = self.get_sql(did, scid, ptid, data, res) SQL, _ = self.get_sql(did, scid, ptid, data, res)
SQL = re.sub('\n{2,}', '\n\n', SQL) SQL = re.sub('\n{2,}', '\n\n', SQL)
SQL = SQL.strip('\n') SQL = SQL.strip('\n')
if SQL == '': if SQL == '':

View File

@@ -525,7 +525,7 @@ class RowSecurityView(PGChildNodeView):
""" """
data = dict(request.args) data = dict(request.args)
sql, name = row_security_policies_utils.get_sql( sql, _ = row_security_policies_utils.get_sql(
self.conn, data=data, scid=scid, plid=plid, policy_table_id=tid, self.conn, data=data, scid=scid, plid=plid, policy_table_id=tid,
schema=self.schema, table=self.table) schema=self.schema, table=self.table)
if not isinstance(sql, str): if not isinstance(sql, str):
@@ -619,7 +619,7 @@ class RowSecurityView(PGChildNodeView):
if data: if data:
data['schema'] = self.schema data['schema'] = self.schema
data['table'] = self.table data['table'] = self.table
sql, name = row_security_policies_utils.get_sql( sql, _ = row_security_policies_utils.get_sql(
self.conn, data=data, scid=scid, plid=oid, policy_table_id=tid, self.conn, data=data, scid=scid, plid=oid, policy_table_id=tid,
schema=self.schema, table=self.table) schema=self.schema, table=self.table)

View File

@@ -133,10 +133,9 @@ def get_reverse_engineered_sql(conn, **kwargs):
data['schema'] = schema data['schema'] = schema
data['table'] = table data['table'] = table
SQL, name = get_sql(conn, data=data, scid=scid, plid=None, SQL, _ = get_sql(conn, data=data, scid=scid, plid=None,
policy_table_id=policy_table_id, policy_table_id=policy_table_id,
datlastsysoid=datlastsysoid, schema=schema, datlastsysoid=datlastsysoid, schema=schema, table=table)
table=table)
if with_header: if with_header:
sql_header = "-- POLICY: {0}\n\n-- ".format(data['name']) sql_header = "-- POLICY: {0}\n\n-- ".format(data['name'])

View File

@@ -472,7 +472,7 @@ class RuleView(PGChildNodeView, SchemaDiffObjectCompare):
This function returns modified SQL This function returns modified SQL
""" """
data = request.args data = request.args
sql, name = self.getSQL(gid, sid, data, tid, rid) sql, _ = self.getSQL(gid, sid, data, tid, rid)
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
sql = sql.strip('\n').strip(' ') sql = sql.strip('\n').strip(' ')

View File

@@ -805,7 +805,7 @@ class TriggerView(PGChildNodeView, SchemaDiffObjectCompare):
data['table'] = self.table data['table'] = self.table
try: try:
sql, name = trigger_utils.get_sql( sql, _ = trigger_utils.get_sql(
self.conn, data=data, tid=tid, trid=trid, self.conn, data=data, tid=tid, trid=trid,
datlastsysoid=self._DATABASE_LAST_SYSTEM_OID, datlastsysoid=self._DATABASE_LAST_SYSTEM_OID,
show_system_objects=self.blueprint.show_system_objects) show_system_objects=self.blueprint.show_system_objects)
@@ -861,7 +861,7 @@ class TriggerView(PGChildNodeView, SchemaDiffObjectCompare):
target_schema = kwargs.get('target_schema', None) target_schema = kwargs.get('target_schema', None)
if data: if data:
SQL, name = trigger_utils.get_sql( SQL, _ = trigger_utils.get_sql(
self.conn, data=data, tid=tid, trid=oid, self.conn, data=data, tid=tid, trid=oid,
datlastsysoid=self._DATABASE_LAST_SYSTEM_OID, datlastsysoid=self._DATABASE_LAST_SYSTEM_OID,
show_system_objects=self.blueprint.show_system_objects, show_system_objects=self.blueprint.show_system_objects,

View File

@@ -292,9 +292,9 @@ def get_reverse_engineered_sql(conn, **kwargs):
data = trigger_definition(data) data = trigger_definition(data)
SQL, name = get_sql(conn, data=data, tid=tid, trid=None, SQL, _ = get_sql(conn, data=data, tid=tid, trid=None,
datlastsysoid=datlastsysoid, datlastsysoid=datlastsysoid,
show_system_objects=show_system_objects) show_system_objects=show_system_objects)
if with_header: if with_header:
sql_header = "-- Trigger: {0}\n\n-- ".format(data['name']) sql_header = "-- Trigger: {0}\n\n-- ".format(data['name'])

View File

@@ -435,7 +435,7 @@ class TypeView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
typelist += collate typelist += collate
properties_list.append(typelist) properties_list.append(typelist)
is_tlength, is_precision, typeval = \ is_tlength, is_precision, _ = \
self.get_length_precision(row.get('elemoid', None)) self.get_length_precision(row.get('elemoid', None))
# Split length, precision from type name for grid # Split length, precision from type name for grid
@@ -468,7 +468,7 @@ class TypeView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
:param rows: list of data :param rows: list of data
:return: formatted response :return: formatted response
""" """
is_tlength, is_precision, typeval = \ is_tlength, is_precision, _ = \
self.get_length_precision(data.get('elemoid', None)) self.get_length_precision(data.get('elemoid', None))
# Split length, precision from type name for grid # Split length, precision from type name for grid
@@ -1227,7 +1227,7 @@ class TypeView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
data[key] = val data[key] = val
try: try:
sql, name = self.get_sql(gid, sid, data, scid, tid) sql, _ = self.get_sql(gid, sid, data, scid, tid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -1475,7 +1475,7 @@ class TypeView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
if data[k] == '-': if data[k] == '-':
data[k] = None data[k] = None
SQL, name = self.get_sql(gid, sid, data, scid, tid=None, is_sql=True) SQL, _ = self.get_sql(gid, sid, data, scid, tid=None, is_sql=True)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(SQL, str): if not isinstance(SQL, str):
return SQL return SQL
@@ -1582,8 +1582,8 @@ class TypeView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
if data: if data:
if target_schema: if target_schema:
data['schema'] = target_schema data['schema'] = target_schema
sql, name = self.get_sql(gid=gid, sid=sid, scid=scid, sql, _ = self.get_sql(gid=gid, sid=sid, scid=scid, data=data,
data=data, tid=oid) tid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -1735,7 +1735,7 @@ class ViewNode(PGChildNodeView, VacuumSettings, SchemaDiffObjectCompare):
if data: if data:
if target_schema: if target_schema:
data['schema'] = target_schema data['schema'] = target_schema
sql, name_or_error = self.getSQL(gid, sid, did, data, oid) sql, _ = self.getSQL(gid, sid, did, data, oid)
if sql.find('DROP VIEW') != -1: if sql.find('DROP VIEW') != -1:
sql = gettext(""" sql = gettext("""
-- Changing the columns in a view requires dropping and re-creating the view. -- Changing the columns in a view requires dropping and re-creating the view.

View File

@@ -407,7 +407,7 @@ class SubscriptionView(PGChildNodeView, SchemaDiffObjectCompare):
sql = render_template("/".join([self.template_path, sql = render_template("/".join([self.template_path,
'stats.sql']), 'stats.sql']),
subid=subid, did=did, conn=self.conn) subid=subid, did=did, conn=self.conn)
status, res = self.conn.execute_dict(sql) _, res = self.conn.execute_dict(sql)
return make_json_response( return make_json_response(
data=res, data=res,
status=200 status=200
@@ -585,7 +585,7 @@ class SubscriptionView(PGChildNodeView, SchemaDiffObjectCompare):
except ValueError: except ValueError:
data[k] = v data[k] = v
try: try:
sql, name = self.get_sql(data, subid, 'msql') sql, _ = self.get_sql(data, subid, 'msql')
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql
@@ -946,7 +946,7 @@ class SubscriptionView(PGChildNodeView, SchemaDiffObjectCompare):
if 'pub' in data and isinstance(data['pub'], str): if 'pub' in data and isinstance(data['pub'], str):
# Convert publication details to list # Convert publication details to list
data['pub'] = data['pub'].split(',,') data['pub'] = data['pub'].split(',,')
sql, name = self.get_sql(data=data, subid=oid) sql, _ = self.get_sql(data=data, subid=oid)
else: else:
if drop_sql: if drop_sql:
sql = self.delete(gid=gid, sid=sid, did=did, sql = self.delete(gid=gid, sid=sid, did=did,

View File

@@ -174,7 +174,7 @@ class JobView(PGChildNodeView):
self.template_path = 'pga_job/sql/pre3.4' self.template_path = 'pga_job/sql/pre3.4'
if 'pgAgent'not in self.manager.db_info: if 'pgAgent'not in self.manager.db_info:
status, res = self.conn.execute_dict(""" _, res = self.conn.execute_dict("""
SELECT EXISTS( SELECT EXISTS(
SELECT 1 FROM information_schema.columns SELECT 1 FROM information_schema.columns
WHERE WHERE

View File

@@ -201,7 +201,7 @@ class JobStepView(PGChildNodeView):
self.template_path = 'pga_jobstep/sql/pre3.4' self.template_path = 'pga_jobstep/sql/pre3.4'
if 'pgAgent' not in self.manager.db_info: if 'pgAgent' not in self.manager.db_info:
status, res = self.conn.execute_dict(""" _, res = self.conn.execute_dict("""
SELECT EXISTS( SELECT EXISTS(
SELECT 1 FROM information_schema.columns SELECT 1 FROM information_schema.columns
WHERE WHERE

View File

@@ -606,7 +606,7 @@ class ResourceGroupView(NodeView):
except ValueError: except ValueError:
data[k] = v data[k] = v
sql, name = self.get_sql(data, rg_id) sql, _ = self.get_sql(data, rg_id)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql

View File

@@ -485,7 +485,7 @@ class TablespaceView(PGChildNodeView):
current_app.logger.exception(ve) current_app.logger.exception(ve)
data[k] = v data[k] = v
sql, name = self.get_sql(gid, sid, data, tsid) sql, _ = self.get_sql(gid, sid, data, tsid)
# Most probably this is due to error # Most probably this is due to error
if not isinstance(sql, str): if not isinstance(sql, str):
return sql return sql

View File

@@ -459,7 +459,7 @@ def dashboard_stats(sid=None, did=None):
"/".join([g.template_path, 'dashboard_stats.sql']), did=did, "/".join([g.template_path, 'dashboard_stats.sql']), did=did,
chart_names=chart_names, chart_names=chart_names,
) )
status, res = g.conn.execute_dict(sql) _, res = g.conn.execute_dict(sql)
for chart_row in res['rows']: for chart_row in res['rows']:
resp_data[chart_row['chart_name']] = json.loads( resp_data[chart_row['chart_name']] = json.loads(

View File

@@ -188,7 +188,7 @@ def stop_heartbeat():
if data != '': if data != '':
data = json.loads(data) data = json.loads(data)
status, msg = stop_server_heartbeat(data) _, msg = stop_server_heartbeat(data)
return make_json_response(data=msg, return make_json_response(data=msg,
status=200) status=200)

View File

@@ -581,7 +581,7 @@ class BatchProcess:
execution_time = None execution_time = None
if j is not None: if j is not None:
status, updated = BatchProcess.update_process_info(j) _, updated = BatchProcess.update_process_info(j)
if updated: if updated:
db.session.commit() db.session.commit()
self.stime = j.start_time self.stime = j.start_time

View File

@@ -326,7 +326,7 @@ class Azure:
if type in self._clients: if type in self._clients:
return self._clients[type] return self._clients[type]
status, _credentials = self._get_azure_credentials() _, _credentials = self._get_azure_credentials()
if type == 'postgresql': if type == 'postgresql':
client = PostgreSQLManagementClient(_credentials, client = PostgreSQLManagementClient(_credentials,

View File

@@ -103,7 +103,7 @@ def biganimal_providers(project_id):
def biganimal_regions(): def biganimal_regions():
"""Get Regions.""" """Get Regions."""
biganimal_obj = pickle.loads(session['biganimal']['provider_obj']) biganimal_obj = pickle.loads(session['biganimal']['provider_obj'])
status, regions = biganimal_obj.get_regions() _, regions = biganimal_obj.get_regions()
session['biganimal']['provider_obj'] = pickle.dumps(biganimal_obj, -1) session['biganimal']['provider_obj'] = pickle.dumps(biganimal_obj, -1)
return make_json_response(data=regions) return make_json_response(data=regions)

View File

@@ -282,7 +282,7 @@ def save_pref(data):
and data['value'].isspace(): and data['value'].isspace():
data['value'] = '' data['value'] = ''
res, msg = Preferences.save_cli( res, _ = Preferences.save_cli(
data['mid'], data['category_id'], data['id'], data['user_id'], data['mid'], data['category_id'], data['id'], data['user_id'],
data['value']) data['value'])

View File

@@ -580,9 +580,9 @@ def objects(sid, did, scid=None):
server_info['template_path'] = 'grant_wizard/ppas/#{0}#'.format( server_info['template_path'] = 'grant_wizard/ppas/#{0}#'.format(
server_info['version']) server_info['version'])
res, msg, empty_schema_list = get_data(sid, did, scid, res, _, empty_schema_list = get_data(sid, did, scid,
'schema' if scid else 'database', 'schema' if scid else 'database',
server_info, True) server_info, True)
tree_data = { tree_data = {
'table': [], 'table': [],

View File

@@ -2033,8 +2033,7 @@ def check_result(result, conn, statusmsg):
) )
else: else:
status = 'Success' status = 'Success'
additional_msgs, statusmsg = get_additional_msgs(conn, _, statusmsg = get_additional_msgs(conn, statusmsg)
statusmsg)
columns, result = convert_data_to_dict(conn, result) columns, result = convert_data_to_dict(conn, result)
@@ -2097,7 +2096,7 @@ def poll_end_execution_result(trans_id):
(de_inst.function_data['language'] == 'edbspl' or (de_inst.function_data['language'] == 'edbspl' or
de_inst.function_data['language'] == 'plpgsql'): de_inst.function_data['language'] == 'plpgsql'):
status = 'Success' status = 'Success'
additional_msgs, statusmsg = get_additional_msgs(conn, statusmsg) _, statusmsg = get_additional_msgs(conn, statusmsg)
return make_json_response( return make_json_response(
success=1, success=1,
@@ -2111,8 +2110,7 @@ def poll_end_execution_result(trans_id):
return check_result(result, conn, statusmsg) return check_result(result, conn, statusmsg)
else: else:
status = 'Busy' status = 'Busy'
additional_msgs, statusmsg = get_additional_msgs(conn, _, statusmsg = get_additional_msgs(conn, statusmsg)
statusmsg)
return make_json_response( return make_json_response(
data={ data={
'status': status, 'status': status,
@@ -2164,7 +2162,7 @@ def poll_result(trans_id):
status = 'ERROR' status = 'ERROR'
elif status == ASYNC_OK and result is not None: elif status == ASYNC_OK and result is not None:
status = 'Success' status = 'Success'
columns, result = convert_data_to_dict(conn, result) _, result = convert_data_to_dict(conn, result)
else: else:
status = 'Busy' status = 'Busy'
else: else:

View File

@@ -640,7 +640,7 @@ def sql(trans_id, sgid, sid, did):
sql += table_sql sql += table_sql
for tab_fk in tab_foreign_keys: for tab_fk in tab_foreign_keys:
fk_sql, name = fkey_utils.get_sql(conn, tab_fk, None) fk_sql, _ = fkey_utils.get_sql(conn, tab_fk, None)
sql += '\n\n' + fk_sql sql += '\n\n' + fk_sql
return make_json_response( return make_json_response(

View File

@@ -106,7 +106,7 @@ class ERDHelper:
conn_id=self.conn_id, did=self.did, sid=self.sid) conn_id=self.conn_id, did=self.did, sid=self.sid)
def get_table_sql(self, data, with_drop=False): def get_table_sql(self, data, with_drop=False):
SQL, name = self.table_view.sql( SQL, _ = self.table_view.sql(
conn_id=self.conn_id, did=self.did, sid=self.sid, conn_id=self.conn_id, did=self.did, sid=self.sid,
data=data, with_drop=with_drop) data=data, with_drop=with_drop)
return SQL return SQL

View File

@@ -296,7 +296,7 @@ def start_process(data):
data['db'] = db data['db'] = db
conn, manager = _get_connection(int(data['sid']), data) _, manager = _get_connection(int(data['sid']), data)
psql_utility = manager.utility('sql') psql_utility = manager.utility('sql')
connection_data = get_connection_str(psql_utility, db, connection_data = get_connection_str(psql_utility, db,
manager) manager)

View File

@@ -365,8 +365,7 @@ def create_restore_job(sid):
if is_error: if is_error:
return errmsg return errmsg
is_error, errmsg, driver, manager, conn, \ is_error, errmsg, driver, manager, conn, _, server = _connect_server(sid)
connected, server = _connect_server(sid)
if is_error: if is_error:
return errmsg return errmsg

View File

@@ -677,7 +677,7 @@ def ddl_compare(trans_id, source_sid, source_did, source_scid,
DDL comparison. DDL comparison.
""" """
# Check the transaction and connection status # Check the transaction and connection status
status, error_msg, diff_model_obj, session_obj = \ _, error_msg, _, _ = \
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == ERROR_MSG_TRANS_ID_NOT_FOUND: if error_msg == ERROR_MSG_TRANS_ID_NOT_FOUND:

View File

@@ -99,7 +99,7 @@ class SearchObjectsHelper:
""" """
if obj_type == 'all': if obj_type == 'all':
status, result = conn.execute_dict( _, result = conn.execute_dict(
"SELECT COUNT(1) FROM information_schema.table_privileges " "SELECT COUNT(1) FROM information_schema.table_privileges "
"WHERE table_name = 'pg_subscription' " "WHERE table_name = 'pg_subscription' "
"AND privilege_type = 'SELECT'") "AND privilege_type = 'SELECT'")

View File

@@ -452,7 +452,7 @@ def _init_sqleditor(trans_id, connect, sgid, sid, did, dbname=None, **kwargs):
else {"did": did})) else {"did": did}))
if connect: if connect:
status, msg, is_ask_password, user, role, password = _connect( status, msg, is_ask_password, user, _, _ = _connect(
conn, **kwargs) conn, **kwargs)
if not status: if not status:
current_app.logger.error(msg) current_app.logger.error(msg)
@@ -483,7 +483,7 @@ def _init_sqleditor(trans_id, connect, sgid, sid, did, dbname=None, **kwargs):
**({"database": dbname} **({"database": dbname}
if dbname is not None if dbname is not None
else {"did": did})) else {"did": did}))
status, msg, is_ask_password, user, role, password = _connect( status, msg, is_ask_password, user, _, _ = _connect(
conn_ac, **kwargs) conn_ac, **kwargs)
except (ConnectionLost, SSHTunnelConnectionLost) as e: except (ConnectionLost, SSHTunnelConnectionLost) as e:
@@ -564,7 +564,7 @@ def update_sqleditor_connection(trans_id, sgid, sid, did):
return errmsg return errmsg
else: else:
try: try:
_, _, _, new_trans_obj, new_session_obj = \ _, _, _, _, new_session_obj = \
check_transaction_status(new_trans_id) check_transaction_status(new_trans_id)
new_session_obj['primary_keys'] = session_obj[ new_session_obj['primary_keys'] = session_obj[
@@ -814,7 +814,7 @@ def start_view_data(trans_id):
# Fetch the sql and primary_keys from the object # Fetch the sql and primary_keys from the object
sql = trans_obj.get_sql(default_conn) sql = trans_obj.get_sql(default_conn)
pk_names, primary_keys = trans_obj.get_primary_keys(default_conn) _, primary_keys = trans_obj.get_primary_keys(default_conn)
session_obj['command_obj'] = pickle.dumps(trans_obj, -1) session_obj['command_obj'] = pickle.dumps(trans_obj, -1)
@@ -1016,7 +1016,7 @@ def poll(trans_id):
# resultsets and primary keys # resultsets and primary keys
if isinstance(trans_obj, QueryToolCommand) and \ if isinstance(trans_obj, QueryToolCommand) and \
trans_obj.check_updatable_results_pkeys_oids(): trans_obj.check_updatable_results_pkeys_oids():
pk_names, primary_keys = trans_obj.get_primary_keys() _, primary_keys = trans_obj.get_primary_keys()
session_obj['has_oids'] = trans_obj.has_oids() session_obj['has_oids'] = trans_obj.has_oids()
# Update command_obj in session obj # Update command_obj in session obj
session_obj['command_obj'] = pickle.dumps( session_obj['command_obj'] = pickle.dumps(
@@ -1744,7 +1744,7 @@ def check_and_upgrade_to_qt(trans_id, connect):
'password': default_conn.manager.password, 'password': default_conn.manager.password,
'conn_id': data.conn_id 'conn_id': data.conn_id
} }
is_error, errmsg, conn_id, version = _init_sqleditor( is_error, errmsg, _, _ = _init_sqleditor(
trans_id, connect, data.sgid, data.sid, data.did, **kwargs) trans_id, connect, data.sgid, data.sid, data.did, **kwargs)
return is_error, errmsg return is_error, errmsg
@@ -2598,8 +2598,7 @@ def add_query_history(trans_id):
did: database id did: database id
""" """
status, error_msg, conn, trans_obj, session_ob = \ _, _, conn, trans_obj, _ = check_transaction_status(trans_id)
check_transaction_status(trans_id)
if not trans_obj: if not trans_obj:
return make_json_response( return make_json_response(
@@ -2625,9 +2624,7 @@ def clear_query_history(trans_id):
did: database id did: database id
""" """
status, error_msg, conn, trans_obj, session_ob = \ _, _, conn, trans_obj, _ = check_transaction_status(trans_id)
check_transaction_status(trans_id)
filter_json = request.get_json(silent=True) filter_json = request.get_json(silent=True)
return QueryHistory.clear(current_user.id, trans_obj.sid, conn.db, return QueryHistory.clear(current_user.id, trans_obj.sid, conn.db,
filter_json) filter_json)
@@ -2647,8 +2644,7 @@ def get_query_history(trans_id):
did: database id did: database id
""" """
status, error_msg, conn, trans_obj, session_ob = \ _, _, conn, trans_obj, _ = check_transaction_status(trans_id)
check_transaction_status(trans_id)
return QueryHistory.get(current_user.id, trans_obj.sid, conn.db) return QueryHistory.get(current_user.id, trans_obj.sid, conn.db)
@@ -2671,8 +2667,7 @@ def macros(trans_id, macro_id=None, json_resp=True):
macro_id: Macro id macro_id: Macro id
""" """
status, error_msg, conn, trans_obj, session_ob = \ _, _, _, _, _ = check_transaction_status(trans_id)
check_transaction_status(trans_id)
return get_macros(macro_id, json_resp) return get_macros(macro_id, json_resp)
@@ -2690,7 +2685,6 @@ def update_macros(trans_id):
trans_id: unique transaction id trans_id: unique transaction id
""" """
status, error_msg, conn, trans_obj, session_ob = \ _, _, _, _, _ = check_transaction_status(trans_id)
check_transaction_status(trans_id)
return set_macros() return set_macros()

View File

@@ -482,7 +482,7 @@ class TableCommand(GridCommand):
""" """
# Fetch the primary keys for the table # Fetch the primary keys for the table
pk_names, primary_keys = self.get_primary_keys(default_conn) _, primary_keys = self.get_primary_keys(default_conn)
# Fetch OIDs status # Fetch OIDs status
has_oids = self.has_oids(default_conn) has_oids = self.has_oids(default_conn)

View File

@@ -153,7 +153,7 @@ class StartRunningQuery:
# and formatted_error is True. # and formatted_error is True.
with app.app_context(): with app.app_context():
try: try:
status, result = conn.execute_async(sql) _, _ = conn.execute_async(sql)
# # If the transaction aborted for some reason and # # If the transaction aborted for some reason and
# # Auto RollBack is True then issue a rollback to cleanup. # # Auto RollBack is True then issue a rollback to cleanup.
if is_rollback_req: if is_rollback_req:

View File

@@ -1857,7 +1857,7 @@ Failed to reset the connection to the server due to following error:
:param parameters: query parameters / variables :param parameters: query parameters / variables
:return: :return:
""" """
status, cursor = self.__cursor() status, _ = self.__cursor()
if not status: if not status:
return None return None
else: else:

View File

@@ -60,7 +60,7 @@ def configure_driver_encodings(encodings):
# python encoding of pyscopg's internal encodings dict. # python encoding of pyscopg's internal encodings dict.
for key, val in encode_dict.items(): for key, val in encode_dict.items():
postgres_encoding, python_encoding = val _, python_encoding = val
psycopg._encodings._py_codecs[key] = python_encoding psycopg._encodings._py_codecs[key] = python_encoding
encodings.update((k.encode(), v encodings.update((k.encode(), v

View File

@@ -860,11 +860,8 @@ class SQLAutoComplete():
"signature": self.signature_arg_style, "signature": self.signature_arg_style,
}[usage] }[usage]
args = func.args() args = func.args()
if not template: if (not template or (usage == "call" and len(args) < 2) or
return "()" (usage == "call" and func.has_variadic())):
elif usage == "call" and len(args) < 2:
return "()"
elif usage == "call" and func.has_variadic():
return "()" return "()"
multiline = usage == "call" and len(args) > self.call_arg_oneliner_max multiline = usage == "call" and len(args) > self.call_arg_oneliner_max
max_arg_len = max(len(a.name) for a in args) if multiline else 0 max_arg_len = max(len(a.name) for a in args) if multiline else 0
@@ -1248,7 +1245,7 @@ class SQLAutoComplete():
:return: :return:
""" """
data = [] data = []
query, in_clause = self._get_function_sql(schema) query, _ = self._get_function_sql(schema)
if self.conn.connected(): if self.conn.connected():
status, res = self.conn.execute_dict(query) status, res = self.conn.execute_dict(query)

View File

@@ -19,7 +19,7 @@ def isolate_query_ctes(full_text, text_before_cursor):
if not full_text or not full_text.strip(): if not full_text or not full_text.strip():
return full_text, text_before_cursor, tuple() return full_text, text_before_cursor, tuple()
ctes, remainder = extract_ctes(full_text) ctes, _ = extract_ctes(full_text)
if not ctes: if not ctes:
return full_text, text_before_cursor, () return full_text, text_before_cursor, ()

View File

@@ -15,7 +15,7 @@ import config
def validate_email(email): def validate_email(email):
try: try:
# Validate. # Validate.
valid = email_validate( _ = email_validate(
email, check_deliverability=config.CHECK_EMAIL_DELIVERABILITY) email, check_deliverability=config.CHECK_EMAIL_DELIVERABILITY)
# Update with the normalized form. # Update with the normalized form.

View File

@@ -65,7 +65,7 @@ def get_version_mapping(template):
template_path_parts = template.split("#", 3) template_path_parts = template.split("#", 3)
if len(template_path_parts) == 4: if len(template_path_parts) == 4:
_, server_type, _, _ = template_path_parts _, _, _, _ = template_path_parts
return get_version_mapping_directories() return get_version_mapping_directories()

View File

@@ -1103,7 +1103,7 @@ def get_scenario_name(cases):
for class_name, test_case_list in cases.items(): for class_name, test_case_list in cases.items():
result = {class_name: []} result = {class_name: []}
for case_name_dict in test_case_list: for case_name_dict in test_case_list:
key, value = list(case_name_dict.items())[0] key, _ = list(case_name_dict.items())[0]
class_names_dict = dict( class_names_dict = dict(
(c_name, "") for scenario in result[class_name] for (c_name, "") for scenario in result[class_name] for
c_name in scenario.keys()) c_name in scenario.keys())

View File

@@ -255,7 +255,6 @@ class ManageUsers:
): ):
"""Get user(s) details.""" """Get user(s) details."""
app = create_app(config.APP_NAME + '-cli') app = create_app(config.APP_NAME + '-cli')
usr = None
with app.test_request_context(): with app.test_request_context():
if username and auth_source: if username and auth_source:
users = User.query.filter_by(username=username, users = User.query.filter_by(username=username,

File diff suppressed because it is too large Load Diff