1) Fixed cognitive complexity issues reported by SonarQube.

2) Fixed code smell 'Add a nested comment explaining why this method is
   empty, or complete the implementation'.
This commit is contained in:
Aditya Toshniwal 2020-07-24 11:46:30 +05:30 committed by Akshay Joshi
parent 310450cfa7
commit 505a3ac960
31 changed files with 221 additions and 178 deletions

View File

@ -30,4 +30,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
${downgrades if downgrades else "pass"}

View File

@ -31,5 +31,7 @@ def upgrade():
'ALTER TABLE server ADD COLUMN fgcolor TEXT(10)'
)
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -229,9 +229,5 @@ def upgrade():
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
print(u"""
Cannot downgrade from this version
Exiting...""")
sys.exit(1)
# ### end Alembic commands ###
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -48,4 +48,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -23,4 +23,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -25,4 +25,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -78,5 +78,7 @@ def upgrade():
'ALTER TABLE server ADD COLUMN service TEXT'
)
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -86,5 +86,7 @@ def upgrade():
# Remove old data
db.engine.execute("DROP TABLE server_old")
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -48,4 +48,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -29,4 +29,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -37,5 +37,7 @@ def upgrade():
'ALTER TABLE server ADD COLUMN tunnel_identity_file TEXT'
)
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -88,4 +88,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -29,4 +29,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -24,4 +24,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -30,4 +30,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -31,4 +31,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -43,5 +43,7 @@ def upgrade():
"""
)
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -23,4 +23,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -39,4 +39,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -31,4 +31,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -43,4 +43,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -45,4 +45,5 @@ def upgrade():
def downgrade():
# pgAdmin only upgrades, downgrade not implemented.
pass

View File

@ -462,6 +462,41 @@ class ExclusionConstraintView(PGChildNodeView):
))
return res
@staticmethod
def parse_input_data(data):
"""
This function is used to parse the input data.
:param data:
:return:
"""
for k, v in data.items():
try:
# comments should be taken as is because if user enters a
# json comment it is parsed by loads which should not happen
if k in ('comment',):
data[k] = v
else:
data[k] = json.loads(v, encoding='utf-8')
except (ValueError, TypeError, KeyError):
data[k] = v
return data
@staticmethod
def check_required_args(data, required_args):
"""
This function is used to check the required arguments.
:param data:
:param required_args:
:return:
"""
for arg in required_args:
if arg not in data or \
(isinstance(data[arg], list) and len(data[arg]) < 1):
return arg
return None
@check_precondition
def create(self, gid, sid, did, scid, tid, exid=None):
"""
@ -480,36 +515,22 @@ class ExclusionConstraintView(PGChildNodeView):
"""
required_args = ['columns']
data = request.form if request.form else json.loads(
request.data, encoding='utf-8'
)
for k, v in data.items():
try:
# comments should be taken as is because if user enters a
# json comment it is parsed by loads which should not happen
if k in ('comment',):
data[k] = v
else:
data[k] = json.loads(v, encoding='utf-8')
except (ValueError, TypeError, KeyError):
data[k] = v
for arg in required_args:
if arg not in data or \
(isinstance(data[arg], list) and len(data[arg]) < 1):
return make_json_response(
status=400,
success=0,
errormsg=_(
"Could not find required parameter ({})."
).format(arg)
)
data = json.loads(request.data, encoding='utf-8')
data = self.parse_input_data(data)
arg_missing = self.check_required_args(data, required_args)
if arg_missing is not None:
return make_json_response(
status=400,
success=0,
errormsg=_(
"Could not find required parameter ({})."
).format(arg_missing)
)
data['schema'] = self.schema
data['table'] = self.table
try:
if 'name' not in data or data['name'] == "":
if data.get('name', '') == "":
SQL = render_template(
"/".join([self.template_path, 'begin.sql']))
# Start transaction.
@ -528,7 +549,7 @@ class ExclusionConstraintView(PGChildNodeView):
self.end_transaction()
return internal_server_error(errormsg=res)
if 'name' not in data or data['name'] == "":
if data.get('name', '') == "":
sql = render_template(
"/".join([self.template_path,
'get_oid_with_transaction.sql']),
@ -784,13 +805,8 @@ class ExclusionConstraintView(PGChildNodeView):
columns = []
for row in res['rows']:
if row['options'] & 1:
order = False
nulls_order = True if (row['options'] & 2) else False
else:
order = True
nulls_order = True if (row['options'] & 2) else False
nulls_order = True if (row['options'] & 2) else False
order = False if row['options'] & 1 else True
columns.append({"column": row['coldef'].strip('"'),
"oper_class": row['opcname'],
"order": order,
@ -814,7 +830,7 @@ class ExclusionConstraintView(PGChildNodeView):
data['include'] = [col['colname'] for col in res['rows']]
if not data['amname'] or data['amname'] == '':
if data.get('amname', '') == "":
data['amname'] = 'btree'
SQL = render_template(

View File

@ -501,9 +501,6 @@ class VacuumSettings:
"""
vacuum_settings = dict()
def __init__(self):
pass
def fetch_default_vacuum_settings(self, conn, sid, setting_type):
"""
This function is used to fetch and cached the default vacuum settings
@ -583,16 +580,11 @@ class VacuumSettings:
row_name = row['name']
if type == 'toast':
row_name = 'toast_{0}'.format(row['name'])
if row_name in result and result[row_name] is not None:
if row['column_type'] == 'number':
value = float(result[row_name])
value = int(value) if value % 1 == 0 else value
else:
value = int(result[row_name])
row['value'] = value
if result.get(row_name, None) is not None:
value = float(result[row_name])
row['value'] = int(value) if value % 1 == 0 else value
else:
if 'value' in row:
row.pop('value')
row.pop('value', None)
return vacuum_settings_tmp

View File

@ -371,8 +371,8 @@ def execute(argv):
_log('Bye!')
# Let's ignore all the signal comming to us.
def signal_handler(signal, msg):
# Let's ignore all the signal comming to us.
pass

View File

@ -607,6 +607,87 @@ def direct_new(trans_id):
)
def get_debugger_version(conn):
"""
Function returns the debugger version.
:param conn:
:return:
"""
debugger_version = 0
status, rid = conn.execute_scalar(
"SELECT COUNT(*) FROM pg_catalog.pg_proc p"
" LEFT JOIN pg_catalog.pg_namespace n ON p.pronamespace = n.oid"
" WHERE n.nspname = ANY(current_schemas(false)) AND"
" p.proname = 'pldbg_get_proxy_info';"
)
if not status:
return False, internal_server_error(errormsg=rid)
if rid == 0:
debugger_version = 1
status, rid = conn.execute_scalar(
"SELECT proxyapiver FROM pldbg_get_proxy_info();")
if status and rid in (2, 3):
debugger_version = rid
return True, debugger_version
def validate_debug(conn, debug_type, is_superuser):
"""
This function is used to validate the options required for debugger.
:param conn:
:param debug_type:
:param is_superuser:
:return:
"""
if debug_type == 'indirect' and not is_superuser:
# If user is super user then we should check debugger library is
# loaded or not
msg = gettext("You must be a superuser to set a global breakpoint"
" and perform indirect debugging.")
return False, internal_server_error(errormsg=msg)
status, rid_pre = conn.execute_scalar(
"SHOW shared_preload_libraries"
)
if not status:
return False, internal_server_error(
gettext("Could not fetch debugger plugin information.")
)
# Need to check if plugin is really loaded or not with
# "plugin_debugger" string
if debug_type == 'indirect' and "plugin_debugger" not in rid_pre:
msg = gettext(
"The debugger plugin is not enabled. "
"Please add the plugin to the shared_preload_libraries "
"setting in the postgresql.conf file and restart the "
"database server for indirect debugging."
)
current_app.logger.debug(msg)
return False, internal_server_error(msg)
# Check debugger extension version for EPAS 11 and above.
# If it is 1.0 then return error to upgrade the extension.
status, ext_version = conn.execute_scalar(
"SELECT installed_version FROM pg_catalog.pg_available_extensions "
"WHERE name = 'pldbgapi'"
)
if not status:
return False, internal_server_error(errormsg=ext_version)
if conn.manager.server_type == 'ppas' and conn.manager.sversion >= 110000 \
and float(ext_version) < 1.1:
return False, internal_server_error(
errormsg=gettext("Please upgrade the pldbgapi extension "
"to 1.1 or above and try again."))
return True, None
@blueprint.route(
'/initialize_target/<debug_type>/<int:trans_id>/<int:sid>/<int:did>/'
'<int:scid>/<int:func_id>',
@ -644,11 +725,8 @@ def initialize_target(debug_type, trans_id, sid, did,
# Create asynchronous connection using random connection id.
conn_id = str(random.randint(1, 9999999))
try:
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid)
conn = manager.connection(did=did, conn_id=conn_id)
except Exception as e:
return internal_server_error(errormsg=str(e))
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid)
conn = manager.connection(did=did, conn_id=conn_id)
# Connect the Server
status, msg = conn.connect()
@ -656,49 +734,9 @@ def initialize_target(debug_type, trans_id, sid, did,
return internal_server_error(errormsg=str(msg))
user = manager.user_info
if debug_type == 'indirect':
# If user is super user then we should check debugger library is
# loaded or not
if not user['is_superuser']:
msg = gettext("You must be a superuser to set a global breakpoint"
" and perform indirect debugging.")
return internal_server_error(errormsg=msg)
else:
status_in, rid_pre = conn.execute_scalar(
"SHOW shared_preload_libraries"
)
if not status_in:
return internal_server_error(
gettext("Could not fetch debugger plugin information.")
)
# Need to check if plugin is really loaded or not with
# "plugin_debugger" string
if "plugin_debugger" not in rid_pre:
msg = gettext(
"The debugger plugin is not enabled. "
"Please add the plugin to the shared_preload_libraries "
"setting in the postgresql.conf file and restart the "
"database server for indirect debugging."
)
current_app.logger.debug(msg)
return internal_server_error(msg)
# Check debugger extension version for EPAS 11 and above.
# If it is 1.0 then return error to upgrade the extension.
if manager.server_type == 'ppas' and manager.sversion >= 110000:
status, ext_version = conn.execute_scalar(
"SELECT installed_version FROM pg_catalog.pg_available_extensions "
"WHERE name = 'pldbgapi'"
)
if not status:
return internal_server_error(errormsg=ext_version)
else:
if float(ext_version) < 1.1:
return internal_server_error(
errormsg=gettext("Please upgrade the pldbgapi extension "
"to 1.1 or above and try again."))
status, error = validate_debug(conn, debug_type, user['is_superuser'])
if not status:
return error
# Set the template path required to read the sql files
template_path = 'debugger/sql'
@ -718,30 +756,10 @@ def initialize_target(debug_type, trans_id, sid, did,
func_id = tr_set['rows'][0]['tgfoid']
status = True
# Find out the debugger version and store it in session variables
status, rid = conn.execute_scalar(
"SELECT COUNT(*) FROM pg_catalog.pg_proc p"
" LEFT JOIN pg_catalog.pg_namespace n ON p.pronamespace = n.oid"
" WHERE n.nspname = ANY(current_schemas(false)) AND"
" p.proname = 'pldbg_get_proxy_info';"
)
status, debugger_version = get_debugger_version(conn)
if not status:
return internal_server_error(errormsg=rid)
else:
if rid == 0:
debugger_version = 1
status, rid = conn.execute_scalar(
"SELECT proxyapiver FROM pldbg_get_proxy_info();")
if status:
if rid == 2 or rid == 3:
debugger_version = rid
else:
status = False
return debugger_version
# Add the debugger version information to pgadmin4 log file
current_app.logger.debug("Debugger version is: %d", debugger_version)
@ -753,9 +771,8 @@ def initialize_target(debug_type, trans_id, sid, did,
# provide the data from another session so below condition will
# be be required
if request.method == 'POST':
data = json.loads(request.values['data'], encoding='utf-8')
if data:
de_inst.function_data['args_value'] = data
de_inst.function_data['args_value'] = \
json.loads(request.values['data'], encoding='utf-8')
# Update the debugger data session variable
# Here frame_id is required when user debug the multilevel function.
@ -1143,52 +1160,44 @@ def execute_debugger_query(trans_id, query_type):
conn_id=de_inst.debugger_data['exe_conn_id'])
# find the debugger version and execute the query accordingly
dbg_version = de_inst.debugger_data['debugger_version']
if dbg_version <= 2:
template_path = 'debugger/sql/v1'
else:
template_path = 'debugger/sql/v2'
template_path = 'debugger/sql/v1' \
if de_inst.debugger_data['debugger_version'] <= 2 \
else 'debugger/sql/v2'
if conn.connected():
sql = render_template(
"/".join([template_path, query_type + ".sql"]),
session_id=de_inst.debugger_data['session_id']
)
# As the query type is continue or step_into or step_over then we
# may get result after some time so poll the result.
# We need to update the frame id variable when user move the next
# step for debugging.
if query_type == 'continue' or query_type == 'step_into' or \
query_type == 'step_over':
# We should set the frame_id to 0 when execution starts.
if de_inst.debugger_data['frame_id'] != 0:
de_inst.debugger_data['frame_id'] = 0
de_inst.update_session()
status, result = conn.execute_async(sql)
if not status:
internal_server_error(errormsg=result)
return make_json_response(
data={'status': status, 'result': result}
)
elif query_type == 'abort_target':
status, result = conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=result)
else:
return make_json_response(
info=gettext('Debugging aborted successfully.'),
data={'status': 'Success', 'result': result}
)
else:
status, result = conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=result)
else:
if not conn.connected():
result = gettext('Not connected to server or connection '
'with the server has been closed.')
return internal_server_error(errormsg=result)
sql = render_template(
"/".join([template_path, query_type + ".sql"]),
session_id=de_inst.debugger_data['session_id']
)
# As the query type is continue or step_into or step_over then we
# may get result after some time so poll the result.
# We need to update the frame id variable when user move the next
# step for debugging.
if query_type in ('continue', 'step_into', 'step_over'):
# We should set the frame_id to 0 when execution starts.
de_inst.debugger_data['frame_id'] = 0
de_inst.update_session()
status, result = conn.execute_async(sql)
if not status:
return internal_server_error(errormsg=result)
return make_json_response(
data={'status': status, 'result': result}
)
status, result = conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=result)
if query_type == 'abort_target':
return make_json_response(
info=gettext('Debugging aborted successfully.'),
data={'status': 'Success', 'result': result}
)
return make_json_response(
data={'status': 'Success', 'result': result['rows']}
)
@ -1230,7 +1239,8 @@ def messages(trans_id):
port_number = ''
if conn.connected():
status, result = conn.poll()
status = 'Busy'
_, result = conn.poll()
notify = conn.messages()
if notify:
# In notice message we need to find "PLDBGBREAK" string to find
@ -1240,19 +1250,12 @@ def messages(trans_id):
# From the above message we need to find out port number
# as "7" so below logic will find 7 as port number
# and attach listened to that port number
port_found = False
tmp_list = list(filter(lambda x: 'PLDBGBREAK' in x, notify))
if len(tmp_list) > 0:
port_number = re.search(r'\d+', tmp_list[0])
if port_number is not None:
status = 'Success'
port_number = port_number.group(0)
port_found = True
if not port_found:
status = 'Busy'
else:
status = 'Busy'
return make_json_response(
data={'status': status, 'result': port_number}

View File

@ -19,7 +19,7 @@ from urllib.parse import unquote
from pgadmin.browser.server_groups.servers.utils import parse_priv_to_db
from pgadmin.utils import PgAdminModule
from pgadmin.utils.ajax import make_response as ajax_response, \
make_json_response, internal_server_error
make_json_response, internal_server_error, bad_request
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
@ -144,7 +144,9 @@ def check_precondition(f):
@blueprint.route("/")
@login_required
def index():
pass
return bad_request(
errormsg=gettext("This URL cannot be called directly.")
)
@blueprint.route("/grant_wizard.js")

View File

@ -52,6 +52,7 @@ class PgAdminModule(Blueprint):
self.before_app_first_request(create_module_preference)
def register_preferences(self):
# To be implemented by child classes
pass
def register(self, app, options, first_registration=False):

View File

@ -48,12 +48,15 @@ class BaseFeatureTest(BaseTestGenerator):
raise
def runTest(self):
# To be implemented by child classes
pass
def before(self):
# To be implemented by child classes
pass
def after(self):
# To be implemented by child classes
pass
def tearDown(self):

View File

@ -28,12 +28,15 @@ class SQLTemplateTestBase(BaseTestGenerator):
self.database_name = -1
def test_setup(self, connection, cursor):
# To be implemented by child classes
pass
def generate_sql(self, version):
# To be implemented by child classes
pass
def assertions(self, fetch_result, descriptions):
# To be implemented by child classes
pass
def runTest(self):

View File

@ -410,6 +410,7 @@ class StreamToLogger(object):
self.logger.log(self.log_level, line.rstrip())
def flush(self):
# Function required to be implemented for logger
pass