mirror of
https://github.com/pgadmin-org/pgadmin4.git
synced 2025-02-25 18:55:31 -06:00
Some clumsy coding related fixes reported by SonarQube.
This commit is contained in:
parent
a23fad0ba8
commit
641f7bbe9d
@ -172,12 +172,7 @@ define('pgadmin.node.language', [
|
|||||||
return res;
|
return res;
|
||||||
}, disabled: function(m) {
|
}, disabled: function(m) {
|
||||||
if (m.isNew()) {
|
if (m.isNew()) {
|
||||||
if (m.get('template_list').indexOf(m.get('name')) == -1) {
|
return m.get('template_list').indexOf(m.get('name')) != -1;
|
||||||
return false;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
@ -202,12 +197,7 @@ define('pgadmin.node.language', [
|
|||||||
return res;
|
return res;
|
||||||
}, disabled: function(m) {
|
}, disabled: function(m) {
|
||||||
if (m.isNew()) {
|
if (m.isNew()) {
|
||||||
if (m.get('template_list').indexOf(m.get('name')) == -1) {
|
return m.get('template_list').indexOf(m.get('name')) != -1;
|
||||||
return false;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
@ -232,12 +222,7 @@ define('pgadmin.node.language', [
|
|||||||
return res;
|
return res;
|
||||||
}, disabled: function(m) {
|
}, disabled: function(m) {
|
||||||
if (m.isNew()) {
|
if (m.isNew()) {
|
||||||
if (m.get('template_list').indexOf(m.get('name')) == -1) {
|
return m.get('template_list').indexOf(m.get('name')) != -1;
|
||||||
return false;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
|
@ -79,9 +79,9 @@ def is_version_in_range(sversion, min_ver, max_ver):
|
|||||||
if min_ver is None and max_ver is None:
|
if min_ver is None and max_ver is None:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if min_ver is None or min_ver <= sversion:
|
if (min_ver is None or min_ver <= sversion) and \
|
||||||
if max_ver is None or max_ver >= sversion:
|
(max_ver is None or max_ver >= sversion):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@ -212,9 +212,8 @@ class PGUtilitiesBackupFeatureTest(BaseFeatureTest):
|
|||||||
|
|
||||||
test_gui_helper.close_process_watcher(self)
|
test_gui_helper.close_process_watcher(self)
|
||||||
|
|
||||||
if backup_file is not None:
|
if backup_file is not None and os.path.isfile(backup_file):
|
||||||
if os.path.isfile(backup_file):
|
os.remove(backup_file)
|
||||||
os.remove(backup_file)
|
|
||||||
|
|
||||||
def after(self):
|
def after(self):
|
||||||
test_gui_helper.close_process_watcher(self)
|
test_gui_helper.close_process_watcher(self)
|
||||||
|
@ -648,11 +648,11 @@ class Filemanager(object):
|
|||||||
user_path = u"{0}/".format(user_path)
|
user_path = u"{0}/".format(user_path)
|
||||||
else:
|
else:
|
||||||
# filter files based on file_type
|
# filter files based on file_type
|
||||||
if file_type is not None and file_type != "*":
|
if file_type is not None and file_type != "*" and \
|
||||||
if folders_only or len(supported_types) > 0 and \
|
(folders_only or len(supported_types) > 0 and
|
||||||
file_extension not in supported_types or \
|
file_extension not in supported_types or
|
||||||
file_type != file_extension:
|
file_type != file_extension):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# create a list of files and folders
|
# create a list of files and folders
|
||||||
files[f] = {
|
files[f] = {
|
||||||
|
@ -570,21 +570,21 @@ def direct_new(trans_id):
|
|||||||
user_agent = UserAgent(request.headers.get('User-Agent'))
|
user_agent = UserAgent(request.headers.get('User-Agent'))
|
||||||
|
|
||||||
function_arguments = '('
|
function_arguments = '('
|
||||||
if de_inst.function_data is not None:
|
if de_inst.function_data is not None and \
|
||||||
if 'args_name' in de_inst.function_data and \
|
'args_name' in de_inst.function_data and \
|
||||||
de_inst.function_data['args_name'] is not None and \
|
de_inst.function_data['args_name'] is not None and \
|
||||||
de_inst.function_data['args_name'] != '':
|
de_inst.function_data['args_name'] != '':
|
||||||
args_name_list = de_inst.function_data['args_name'].split(",")
|
args_name_list = de_inst.function_data['args_name'].split(",")
|
||||||
args_type_list = de_inst.function_data['args_type'].split(",")
|
args_type_list = de_inst.function_data['args_type'].split(",")
|
||||||
index = 0
|
index = 0
|
||||||
for args_name in args_name_list:
|
for args_name in args_name_list:
|
||||||
function_arguments = '{}{} {}, '.format(function_arguments,
|
function_arguments = '{}{} {}, '.format(function_arguments,
|
||||||
args_name,
|
args_name,
|
||||||
args_type_list[index])
|
args_type_list[index])
|
||||||
index += 1
|
index += 1
|
||||||
# Remove extra comma and space from the arguments list
|
# Remove extra comma and space from the arguments list
|
||||||
if len(args_name_list) > 0:
|
if len(args_name_list) > 0:
|
||||||
function_arguments = function_arguments[:-2]
|
function_arguments = function_arguments[:-2]
|
||||||
|
|
||||||
function_arguments += ')'
|
function_arguments += ')'
|
||||||
|
|
||||||
|
@ -58,11 +58,11 @@ class DebuggerInstance(object):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
def load_from_session(self):
|
def load_from_session(self):
|
||||||
if '__debugger_sessions' in session:
|
if '__debugger_sessions' in session and \
|
||||||
if str(self.trans_id) in session['__debugger_sessions']:
|
str(self.trans_id) in session['__debugger_sessions']:
|
||||||
trans_data = session['__debugger_sessions'][str(self.trans_id)]
|
trans_data = session['__debugger_sessions'][str(self.trans_id)]
|
||||||
self.function_data = trans_data.get('function_data', None)
|
self.function_data = trans_data.get('function_data', None)
|
||||||
self.debugger_data = trans_data.get('debugger_data', None)
|
self.debugger_data = trans_data.get('debugger_data', None)
|
||||||
|
|
||||||
def update_session(self):
|
def update_session(self):
|
||||||
with debugger_sessions_lock:
|
with debugger_sessions_lock:
|
||||||
@ -76,6 +76,6 @@ class DebuggerInstance(object):
|
|||||||
|
|
||||||
def clear(self):
|
def clear(self):
|
||||||
with debugger_sessions_lock:
|
with debugger_sessions_lock:
|
||||||
if '__debugger_sessions' in session:
|
if '__debugger_sessions' in session and \
|
||||||
if str(self.trans_id) in session['__debugger_sessions']:
|
str(self.trans_id) in session['__debugger_sessions']:
|
||||||
session['__debugger_sessions'].pop(str(self.trans_id))
|
session['__debugger_sessions'].pop(str(self.trans_id))
|
||||||
|
@ -259,32 +259,32 @@ def create_restore_job(sid):
|
|||||||
args.append(default_value)
|
args.append(default_value)
|
||||||
|
|
||||||
def set_multiple(key, param, with_schema=True):
|
def set_multiple(key, param, with_schema=True):
|
||||||
if key in data:
|
if key in data and \
|
||||||
if len(data[key]) > 0:
|
len(data[key]) > 0:
|
||||||
if with_schema:
|
if with_schema:
|
||||||
# TODO:// This is temporary
|
# TODO:// This is temporary
|
||||||
# Once object tree is implemented then we will use
|
# Once object tree is implemented then we will use
|
||||||
# list of tuples 'else' part
|
# list of tuples 'else' part
|
||||||
if isinstance(data[key], list):
|
if isinstance(data[key], list):
|
||||||
s, t = data[key]
|
s, t = data[key]
|
||||||
|
args.extend([
|
||||||
|
param,
|
||||||
|
driver.qtIdent(
|
||||||
|
conn, s
|
||||||
|
) + '.' + driver.qtIdent(conn, t)
|
||||||
|
])
|
||||||
|
else:
|
||||||
|
for s, o in data[key]:
|
||||||
args.extend([
|
args.extend([
|
||||||
param,
|
param,
|
||||||
driver.qtIdent(
|
driver.qtIdent(
|
||||||
conn, s
|
conn, s
|
||||||
) + '.' + driver.qtIdent(conn, t)
|
) + '.' + driver.qtIdent(conn, o)
|
||||||
])
|
])
|
||||||
else:
|
else:
|
||||||
for s, o in data[key]:
|
for o in data[key]:
|
||||||
args.extend([
|
args.extend([param, o])
|
||||||
param,
|
return True
|
||||||
driver.qtIdent(
|
|
||||||
conn, s
|
|
||||||
) + '.' + driver.qtIdent(conn, o)
|
|
||||||
])
|
|
||||||
else:
|
|
||||||
for o in data[key]:
|
|
||||||
args.extend([param, o])
|
|
||||||
return True
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
args.extend([
|
args.extend([
|
||||||
|
@ -401,17 +401,17 @@ def poll(trans_id):
|
|||||||
|
|
||||||
# If trans_obj is a QueryToolCommand then check for updatable
|
# If trans_obj is a QueryToolCommand then check for updatable
|
||||||
# resultsets and primary keys
|
# resultsets and primary keys
|
||||||
if isinstance(trans_obj, QueryToolCommand):
|
if isinstance(trans_obj, QueryToolCommand) and \
|
||||||
if trans_obj.check_updatable_results_pkeys_oids():
|
trans_obj.check_updatable_results_pkeys_oids():
|
||||||
pk_names, primary_keys = trans_obj.get_primary_keys()
|
pk_names, primary_keys = trans_obj.get_primary_keys()
|
||||||
session_obj['has_oids'] = trans_obj.has_oids()
|
session_obj['has_oids'] = trans_obj.has_oids()
|
||||||
# Update command_obj in session obj
|
# Update command_obj in session obj
|
||||||
session_obj['command_obj'] = pickle.dumps(
|
session_obj['command_obj'] = pickle.dumps(
|
||||||
trans_obj, -1)
|
trans_obj, -1)
|
||||||
# If primary_keys exist, add them to the session_obj to
|
# If primary_keys exist, add them to the session_obj to
|
||||||
# allow for saving any changes to the data
|
# allow for saving any changes to the data
|
||||||
if primary_keys is not None:
|
if primary_keys is not None:
|
||||||
session_obj['primary_keys'] = primary_keys
|
session_obj['primary_keys'] = primary_keys
|
||||||
|
|
||||||
if 'has_oids' in session_obj:
|
if 'has_oids' in session_obj:
|
||||||
has_oids = session_obj['has_oids']
|
has_oids = session_obj['has_oids']
|
||||||
|
@ -252,9 +252,9 @@ class SQLFilter(object):
|
|||||||
if self._row_filter is None or self._row_filter == '':
|
if self._row_filter is None or self._row_filter == '':
|
||||||
is_filter_applied = False
|
is_filter_applied = False
|
||||||
|
|
||||||
if not is_filter_applied:
|
if not is_filter_applied and \
|
||||||
if self._data_sorting and len(self._data_sorting) > 0:
|
self._data_sorting and len(self._data_sorting) > 0:
|
||||||
is_filter_applied = True
|
is_filter_applied = True
|
||||||
|
|
||||||
return is_filter_applied
|
return is_filter_applied
|
||||||
|
|
||||||
|
@ -323,10 +323,9 @@ class Driver(BaseDriver):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# If already quoted?, If yes then do not quote again
|
# If already quoted?, If yes then do not quote again
|
||||||
if forTypes and valNoArray:
|
if forTypes and valNoArray and \
|
||||||
if valNoArray.startswith('"') \
|
(valNoArray.startswith('"') or valNoArray.endswith('"')):
|
||||||
or valNoArray.endswith('"'):
|
return False
|
||||||
return False
|
|
||||||
|
|
||||||
if u'0' <= valNoArray[0] <= u'9':
|
if u'0' <= valNoArray[0] <= u'9':
|
||||||
return True
|
return True
|
||||||
|
@ -539,9 +539,9 @@ WHERE
|
|||||||
self.conn_id.encode('utf-8')
|
self.conn_id.encode('utf-8')
|
||||||
), None)
|
), None)
|
||||||
|
|
||||||
if self.connected() and cur and not cur.closed:
|
if self.connected() and cur and not cur.closed and \
|
||||||
if not server_cursor or (server_cursor and cur.name):
|
(not server_cursor or (server_cursor and cur.name)):
|
||||||
return True, cur
|
return True, cur
|
||||||
|
|
||||||
if not self.connected():
|
if not self.connected():
|
||||||
errmsg = ""
|
errmsg = ""
|
||||||
@ -618,21 +618,21 @@ WHERE
|
|||||||
# We need to esacpe the data so that it does not fail when
|
# We need to esacpe the data so that it does not fail when
|
||||||
# it is encoded with python ascii
|
# it is encoded with python ascii
|
||||||
# unicode_escape helps in escaping and unescaping
|
# unicode_escape helps in escaping and unescaping
|
||||||
if self.conn:
|
if self.conn and \
|
||||||
if self.conn.encoding in ('SQL_ASCII', 'SQLASCII',
|
self.conn.encoding in ('SQL_ASCII', 'SQLASCII',
|
||||||
'MULE_INTERNAL', 'MULEINTERNAL')\
|
'MULE_INTERNAL', 'MULEINTERNAL')\
|
||||||
and params is not None and type(params) == dict:
|
and params is not None and type(params) == dict:
|
||||||
for key, val in params.items():
|
for key, val in params.items():
|
||||||
modified_val = val
|
modified_val = val
|
||||||
# "unicode_escape" will convert single backslash to double
|
# "unicode_escape" will convert single backslash to double
|
||||||
# backslash, so we will have to replace/revert them again
|
# backslash, so we will have to replace/revert them again
|
||||||
# to store the correct value into the database.
|
# to store the correct value into the database.
|
||||||
if isinstance(val, six.string_types):
|
if isinstance(val, six.string_types):
|
||||||
modified_val = val.encode('unicode_escape')\
|
modified_val = val.encode('unicode_escape')\
|
||||||
.decode('raw_unicode_escape')\
|
.decode('raw_unicode_escape')\
|
||||||
.replace("\\\\", "\\")
|
.replace("\\\\", "\\")
|
||||||
|
|
||||||
params[key] = modified_val
|
params[key] = modified_val
|
||||||
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
@ -1084,13 +1084,12 @@ WHERE
|
|||||||
self.__internal_blocking_execute(cur, query, params)
|
self.__internal_blocking_execute(cur, query, params)
|
||||||
except psycopg2.Error as pe:
|
except psycopg2.Error as pe:
|
||||||
cur.close()
|
cur.close()
|
||||||
if not self.connected():
|
if not self.connected() and self.auto_reconnect and \
|
||||||
if self.auto_reconnect and \
|
not self.reconnecting:
|
||||||
not self.reconnecting:
|
return self.__attempt_execution_reconnect(
|
||||||
return self.__attempt_execution_reconnect(
|
self.execute_2darray, query, params,
|
||||||
self.execute_2darray, query, params,
|
formatted_exception_msg
|
||||||
formatted_exception_msg
|
)
|
||||||
)
|
|
||||||
errmsg = self._formatted_exception_msg(pe, formatted_exception_msg)
|
errmsg = self._formatted_exception_msg(pe, formatted_exception_msg)
|
||||||
current_app.logger.error(
|
current_app.logger.error(
|
||||||
u"Failed to execute query (execute_2darray) for the server "
|
u"Failed to execute query (execute_2darray) for the server "
|
||||||
@ -1233,9 +1232,8 @@ WHERE
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
if self.conn:
|
if self.conn and self.conn.closed:
|
||||||
if self.conn.closed:
|
self.conn = None
|
||||||
self.conn = None
|
|
||||||
pg_conn = None
|
pg_conn = None
|
||||||
manager = self.manager
|
manager = self.manager
|
||||||
|
|
||||||
@ -1463,23 +1461,22 @@ Failed to reset the connection to the server due to following error:
|
|||||||
pos += 1
|
pos += 1
|
||||||
|
|
||||||
self.row_count = cur.rowcount
|
self.row_count = cur.rowcount
|
||||||
if not no_result:
|
if not no_result and cur.rowcount > 0:
|
||||||
if cur.rowcount > 0:
|
result = []
|
||||||
result = []
|
# For DDL operation, we may not have result.
|
||||||
# For DDL operation, we may not have result.
|
#
|
||||||
#
|
# Because - there is not direct way to differentiate DML
|
||||||
# Because - there is not direct way to differentiate DML
|
# and DDL operations, we need to rely on exception to
|
||||||
# and DDL operations, we need to rely on exception to
|
# figure that out at the moment.
|
||||||
# figure that out at the moment.
|
try:
|
||||||
try:
|
for row in cur:
|
||||||
for row in cur:
|
new_row = []
|
||||||
new_row = []
|
for col in self.column_info:
|
||||||
for col in self.column_info:
|
new_row.append(row[col['name']])
|
||||||
new_row.append(row[col['name']])
|
result.append(new_row)
|
||||||
result.append(new_row)
|
|
||||||
|
|
||||||
except psycopg2.ProgrammingError:
|
except psycopg2.ProgrammingError:
|
||||||
result = None
|
result = None
|
||||||
|
|
||||||
return status, result
|
return status, result
|
||||||
|
|
||||||
@ -1726,37 +1723,37 @@ Failed to reset the connection to the server due to following error:
|
|||||||
errmsg += gettext('SQL state: ')
|
errmsg += gettext('SQL state: ')
|
||||||
errmsg += self.decode_to_utf8(exception_obj.diag.sqlstate)
|
errmsg += self.decode_to_utf8(exception_obj.diag.sqlstate)
|
||||||
|
|
||||||
if exception_obj.diag.message_detail is not None:
|
if exception_obj.diag.message_detail is not None and \
|
||||||
if 'Detail:'.lower() not in errmsg.lower():
|
'Detail:'.lower() not in errmsg.lower():
|
||||||
if not errmsg.endswith('\n'):
|
if not errmsg.endswith('\n'):
|
||||||
errmsg += '\n'
|
errmsg += '\n'
|
||||||
errmsg += gettext('Detail: ')
|
errmsg += gettext('Detail: ')
|
||||||
errmsg += self.decode_to_utf8(
|
errmsg += self.decode_to_utf8(
|
||||||
exception_obj.diag.message_detail
|
exception_obj.diag.message_detail
|
||||||
)
|
)
|
||||||
|
|
||||||
if exception_obj.diag.message_hint is not None:
|
if exception_obj.diag.message_hint is not None and \
|
||||||
if 'Hint:'.lower() not in errmsg.lower():
|
'Hint:'.lower() not in errmsg.lower():
|
||||||
if not errmsg.endswith('\n'):
|
if not errmsg.endswith('\n'):
|
||||||
errmsg += '\n'
|
errmsg += '\n'
|
||||||
errmsg += gettext('Hint: ')
|
errmsg += gettext('Hint: ')
|
||||||
errmsg += self.decode_to_utf8(exception_obj.diag.message_hint)
|
errmsg += self.decode_to_utf8(exception_obj.diag.message_hint)
|
||||||
|
|
||||||
if exception_obj.diag.statement_position is not None:
|
if exception_obj.diag.statement_position is not None and \
|
||||||
if 'Character:'.lower() not in errmsg.lower():
|
'Character:'.lower() not in errmsg.lower():
|
||||||
if not errmsg.endswith('\n'):
|
if not errmsg.endswith('\n'):
|
||||||
errmsg += '\n'
|
errmsg += '\n'
|
||||||
errmsg += gettext('Character: ')
|
errmsg += gettext('Character: ')
|
||||||
errmsg += self.decode_to_utf8(
|
errmsg += self.decode_to_utf8(
|
||||||
exception_obj.diag.statement_position
|
exception_obj.diag.statement_position
|
||||||
)
|
)
|
||||||
|
|
||||||
if exception_obj.diag.context is not None:
|
if exception_obj.diag.context is not None and \
|
||||||
if 'Context:'.lower() not in errmsg.lower():
|
'Context:'.lower() not in errmsg.lower():
|
||||||
if not errmsg.endswith('\n'):
|
if not errmsg.endswith('\n'):
|
||||||
errmsg += '\n'
|
errmsg += '\n'
|
||||||
errmsg += gettext('Context: ')
|
errmsg += gettext('Context: ')
|
||||||
errmsg += self.decode_to_utf8(exception_obj.diag.context)
|
errmsg += self.decode_to_utf8(exception_obj.diag.context)
|
||||||
|
|
||||||
notices = self.get_notices()
|
notices = self.get_notices()
|
||||||
return errmsg if notices == '' else notices + '\n' + errmsg
|
return errmsg if notices == '' else notices + '\n' + errmsg
|
||||||
|
@ -178,9 +178,8 @@ class ServerManager(object):
|
|||||||
if hasattr(str, 'decode') and \
|
if hasattr(str, 'decode') and \
|
||||||
not isinstance(database, unicode):
|
not isinstance(database, unicode):
|
||||||
database = database.decode('utf-8')
|
database = database.decode('utf-8')
|
||||||
if did is not None:
|
if did is not None and did in self.db_info:
|
||||||
if did in self.db_info:
|
self.db_info[did]['datname'] = database
|
||||||
self.db_info[did]['datname'] = database
|
|
||||||
else:
|
else:
|
||||||
if did is None:
|
if did is None:
|
||||||
database = self.db
|
database = self.db
|
||||||
@ -274,9 +273,9 @@ WHERE db.oid = {0}""".format(did))
|
|||||||
# first connection for identifications.
|
# first connection for identifications.
|
||||||
self.pinged = datetime.datetime.now()
|
self.pinged = datetime.datetime.now()
|
||||||
try:
|
try:
|
||||||
if 'password' in data and data['password']:
|
if 'password' in data and data['password'] and \
|
||||||
if hasattr(data['password'], 'encode'):
|
hasattr(data['password'], 'encode'):
|
||||||
data['password'] = data['password'].encode('utf-8')
|
data['password'] = data['password'].encode('utf-8')
|
||||||
if 'tunnel_password' in data and data['tunnel_password']:
|
if 'tunnel_password' in data and data['tunnel_password']:
|
||||||
data['tunnel_password'] = \
|
data['tunnel_password'] = \
|
||||||
data['tunnel_password'].encode('utf-8')
|
data['tunnel_password'].encode('utf-8')
|
||||||
|
@ -141,10 +141,9 @@ class _Preference(object):
|
|||||||
if self.select2 and self.select2['tags']:
|
if self.select2 and self.select2['tags']:
|
||||||
return res.value
|
return res.value
|
||||||
return self.default
|
return self.default
|
||||||
if self._type == 'text':
|
if self._type == 'text' and res.value == '' and \
|
||||||
if res.value == '' and (self.allow_blanks is None or
|
(self.allow_blanks is None or not self.allow_blanks):
|
||||||
not self.allow_blanks):
|
return self.default
|
||||||
return self.default
|
|
||||||
if self._type == 'keyboardshortcut':
|
if self._type == 'keyboardshortcut':
|
||||||
try:
|
try:
|
||||||
return json.loads(res.value)
|
return json.loads(res.value)
|
||||||
|
@ -114,11 +114,10 @@ class BaseTestGenerator(unittest.TestCase):
|
|||||||
super(BaseTestGenerator, self).setUp()
|
super(BaseTestGenerator, self).setUp()
|
||||||
self.server_id = self.server_information["server_id"]
|
self.server_id = self.server_information["server_id"]
|
||||||
server_con = server_utils.connect_server(self, self.server_id)
|
server_con = server_utils.connect_server(self, self.server_id)
|
||||||
if hasattr(self, 'skip_on_database'):
|
if hasattr(self, 'skip_on_database') and \
|
||||||
if 'data' in server_con and 'type' in server_con['data']:
|
'data' in server_con and 'type' in server_con['data'] and \
|
||||||
if server_con['data']['type'] in self.skip_on_database:
|
server_con['data']['type'] in self.skip_on_database:
|
||||||
self.skipTest('cannot run in: %s' %
|
self.skipTest('cannot run in: %s' % server_con['data']['type'])
|
||||||
server_con['data']['type'])
|
|
||||||
|
|
||||||
def setTestServer(self, server):
|
def setTestServer(self, server):
|
||||||
self.server = server
|
self.server = server
|
||||||
|
@ -257,11 +257,10 @@ class FileBackedSessionManager(SessionManager):
|
|||||||
current_time = time.time()
|
current_time = time.time()
|
||||||
if not session.hmac_digest:
|
if not session.hmac_digest:
|
||||||
session.sign(self.secret)
|
session.sign(self.secret)
|
||||||
elif not session.force_write:
|
elif not session.force_write and session.last_write is not None and \
|
||||||
if session.last_write is not None and \
|
(current_time - float(session.last_write)) < \
|
||||||
(current_time - float(session.last_write)) < \
|
self.disk_write_delay:
|
||||||
self.disk_write_delay:
|
return
|
||||||
return
|
|
||||||
|
|
||||||
session.last_write = current_time
|
session.last_write = current_time
|
||||||
session.force_write = False
|
session.force_write = False
|
||||||
@ -402,6 +401,6 @@ def cleanup_session_files():
|
|||||||
current_app.permanent_session_lifetime + \
|
current_app.permanent_session_lifetime + \
|
||||||
datetime.timedelta(days=1)
|
datetime.timedelta(days=1)
|
||||||
|
|
||||||
if file_expiration_time <= datetime.datetime.now():
|
if file_expiration_time <= datetime.datetime.now() and \
|
||||||
if os.path.exists(absolute_file_name):
|
os.path.exists(absolute_file_name):
|
||||||
os.unlink(absolute_file_name)
|
os.unlink(absolute_file_name)
|
||||||
|
@ -306,12 +306,12 @@ def suggest_based_on_last_token(token, stmt):
|
|||||||
require_last_table=True,
|
require_last_table=True,
|
||||||
local_tables=stmt.local_tables),)
|
local_tables=stmt.local_tables),)
|
||||||
|
|
||||||
elif p.token_first().value.lower() == 'select':
|
# If the lparen is preceeded by a space chances are we're about to
|
||||||
# If the lparen is preceeded by a space chances are we're about to
|
# do a sub-select.
|
||||||
# do a sub-select.
|
elif p.token_first().value.lower() == 'select' and \
|
||||||
if last_word(stmt.text_before_cursor,
|
last_word(stmt.text_before_cursor,
|
||||||
'all_punctuations').startswith('('):
|
'all_punctuations').startswith('('):
|
||||||
return (Keyword(),)
|
return (Keyword(),)
|
||||||
prev_prev_tok = prev_tok and p.token_prev(p.token_index(prev_tok))[1]
|
prev_prev_tok = prev_tok and p.token_prev(p.token_index(prev_tok))[1]
|
||||||
if prev_prev_tok and prev_prev_tok.normalized == 'INTO':
|
if prev_prev_tok and prev_prev_tok.normalized == 'INTO':
|
||||||
return (
|
return (
|
||||||
|
@ -78,16 +78,16 @@ pgadmin_credentials = test_setup.config_data
|
|||||||
# Set environment variables for email and password
|
# Set environment variables for email and password
|
||||||
os.environ['PGADMIN_SETUP_EMAIL'] = ''
|
os.environ['PGADMIN_SETUP_EMAIL'] = ''
|
||||||
os.environ['PGADMIN_SETUP_PASSWORD'] = ''
|
os.environ['PGADMIN_SETUP_PASSWORD'] = ''
|
||||||
if pgadmin_credentials:
|
if pgadmin_credentials and \
|
||||||
if 'pgAdmin4_login_credentials' in pgadmin_credentials:
|
'pgAdmin4_login_credentials' in pgadmin_credentials and \
|
||||||
if all(item in pgadmin_credentials['pgAdmin4_login_credentials']
|
all(item in pgadmin_credentials['pgAdmin4_login_credentials']
|
||||||
for item in ['login_username', 'login_password']):
|
for item in ['login_username', 'login_password']):
|
||||||
pgadmin_credentials = pgadmin_credentials[
|
pgadmin_credentials = pgadmin_credentials[
|
||||||
'pgAdmin4_login_credentials']
|
'pgAdmin4_login_credentials']
|
||||||
os.environ['PGADMIN_SETUP_EMAIL'] = str(pgadmin_credentials[
|
os.environ['PGADMIN_SETUP_EMAIL'] = str(pgadmin_credentials[
|
||||||
'login_username'])
|
'login_username'])
|
||||||
os.environ['PGADMIN_SETUP_PASSWORD'] = str(pgadmin_credentials[
|
os.environ['PGADMIN_SETUP_PASSWORD'] = str(pgadmin_credentials[
|
||||||
'login_password'])
|
'login_password'])
|
||||||
|
|
||||||
# Execute the setup file
|
# Execute the setup file
|
||||||
exec(open("setup.py").read())
|
exec(open("setup.py").read())
|
||||||
@ -248,10 +248,10 @@ def get_test_modules(arguments):
|
|||||||
driver.implicitly_wait(1)
|
driver.implicitly_wait(1)
|
||||||
else:
|
else:
|
||||||
options = Options()
|
options = Options()
|
||||||
if test_setup.config_data:
|
if test_setup.config_data and \
|
||||||
if 'headless_chrome' in test_setup.config_data:
|
'headless_chrome' in test_setup.config_data and \
|
||||||
if test_setup.config_data['headless_chrome']:
|
test_setup.config_data['headless_chrome']:
|
||||||
options.add_argument("--headless")
|
options.add_argument("--headless")
|
||||||
options.add_argument("--no-sandbox")
|
options.add_argument("--no-sandbox")
|
||||||
options.add_argument("--disable-setuid-sandbox")
|
options.add_argument("--disable-setuid-sandbox")
|
||||||
options.add_argument("--window-size=1280,1024")
|
options.add_argument("--window-size=1280,1024")
|
||||||
|
Loading…
Reference in New Issue
Block a user