Fixed cognitive complexity issues reported by SonarQube.

This commit is contained in:
Aditya Toshniwal 2020-08-25 12:39:14 +05:30 committed by Akshay Joshi
parent 86bbc3a9e8
commit 275c671576
6 changed files with 397 additions and 356 deletions

View File

@ -408,19 +408,21 @@ class EdbFuncView(PGChildNodeView, DataTypeReader):
# Insert null value against the parameters which do not have
# default values.
if len(proargmodes_fltrd) > len(proargdefaultvals):
dif = len(proargmodes_fltrd) - len(proargdefaultvals)
while (dif > 0):
proargdefaultvals.insert(0, '')
dif -= 1
dif = len(proargmodes_fltrd) - len(proargdefaultvals)
while dif > 0:
proargdefaultvals.insert(0, '')
dif -= 1
def list_get(arr, index, default=''):
return arr[index] if len(arr) > index else default
# Prepare list of Argument list dict to be displayed in the Data Grid.
params = {"arguments": [
self._map_arguments_dict(
i, proargmodes_fltrd[i] if len(proargmodes_fltrd) > i else '',
proargtypes[i] if len(proargtypes) > i else '',
proargnames[i] if len(proargnames) > i else '',
proargdefaultvals[i] if len(proargdefaultvals) > i else ''
i, list_get(proargmodes_fltrd, i),
list_get(proargtypes, i),
list_get(proargnames, i),
list_get(proargdefaultvals, i)
)
for i in range(len(proargtypes))]}
@ -428,10 +430,10 @@ class EdbFuncView(PGChildNodeView, DataTypeReader):
# panel.
proargs = [self._map_arguments_list(
proargmodes_fltrd[i] if len(proargmodes_fltrd) > i else '',
proargtypes[i] if len(proargtypes) > i else '',
proargnames[i] if len(proargnames) > i else '',
proargdefaultvals[i] if len(proargdefaultvals) > i else ''
list_get(proargmodes_fltrd, i),
list_get(proargtypes, i),
list_get(proargnames, i),
list_get(proargdefaultvals, i)
)
for i in range(len(proargtypes))]

View File

@ -14,6 +14,7 @@ from functools import wraps
import simplejson as json
from flask import render_template, request, jsonify
from flask_babelex import gettext
import re
import pgadmin.browser.server_groups.servers.databases as database
from config import PG_DEFAULT_DRIVER
@ -411,6 +412,65 @@ class TypeView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
data['precision'] = str(data['precision'])
return data
def _additional_properties_composite(self, rows):
"""
Used by additional_properties internally for composite type.
:param rows: list of data
:return: formatted response
"""
res = dict()
properties_list = []
# To display in composite collection grid
composite_lst = []
for row in rows:
# We will fetch Full type name
typelist = ' '.join([row['attname'], row['fulltype']])
if (
not row['collname'] or
(
row['collname'] == 'default' and
row['collnspname'] == 'pg_catalog'
)
):
full_collate = ''
collate = ''
else:
full_collate = get_driver(PG_DEFAULT_DRIVER).qtIdent(
self.conn, row['collnspname'], row['collname'])
collate = ' COLLATE ' + full_collate
typelist += collate
properties_list.append(typelist)
is_tlength, is_precision, typeval = \
self.get_length_precision(row.get('elemoid', None))
# Split length, precision from type name for grid
t_len, t_prec = DataTypeReader.parse_length_precision(
row['fulltype'], is_tlength, is_precision)
type_name = DataTypeReader.parse_type_name(row['typname'])
row['type'] = self._cltype_formatter(type_name)
row['hasSqrBracket'] = self.hasSqrBracket
row = self.convert_length_precision_to_string(row)
composite_lst.append({
'attnum': row['attnum'], 'member_name': row['attname'],
'type': type_name,
'collation': full_collate, 'cltype': row['type'],
'tlength': t_len, 'precision': t_prec,
'is_tlength': is_tlength, 'is_precision': is_precision,
'hasSqrBracket': row['hasSqrBracket'],
'fulltype': row['fulltype']})
# Adding both results
res['member_list'] = ', '.join(properties_list)
res['composite'] = composite_lst
return res
def additional_properties(self, copy_dict, tid):
"""
We will use this function to add additional properties according to
@ -423,93 +483,29 @@ class TypeView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
# Fetching type of type
of_type = copy_dict['typtype']
res = dict()
# If type is of Composite then we need to add members list in our
# output
render_args = {'type': of_type}
if of_type == 'c':
render_args['typrelid'] = copy_dict['typrelid']
else:
render_args['tid'] = tid
if of_type in ('c', 'e', 'r'):
SQL = render_template("/".join([self.template_path,
'additional_properties.sql']),
type='c',
typrelid=copy_dict['typrelid'])
**render_args)
status, rset = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
# If type is of Composite then we need to add members list in our
# output
if of_type == 'c':
# To display in properties
properties_list = []
# To display in composite collection grid
composite_lst = []
for row in rset['rows']:
# We will fetch Full type name
typelist = ' '.join([row['attname'], row['fulltype']])
if (
not row['collname'] or
(
row['collname'] == 'default' and
row['collnspname'] == 'pg_catalog'
)
):
full_collate = ''
collate = ''
else:
full_collate = get_driver(PG_DEFAULT_DRIVER).qtIdent(
self.conn, row['collnspname'], row['collname'])
collate = ' COLLATE ' + full_collate
typelist += collate
properties_list.append(typelist)
is_tlength = False
is_precision = False
if 'elemoid' in row:
is_tlength, is_precision, typeval = \
self.get_length_precision(row['elemoid'])
# Below logic will allow us to split length, precision from
# type name for grid
import re
t_len = None
t_prec = None
# If we have length & precision both
if is_tlength and is_precision:
match_obj = re.search(r'(\d+),(\d+)', row['fulltype'])
if match_obj:
t_len = match_obj.group(1)
t_prec = match_obj.group(2)
elif is_tlength:
# If we have length only
match_obj = re.search(r'(\d+)', row['fulltype'])
if match_obj:
t_len = match_obj.group(1)
t_prec = None
type_name = DataTypeReader.parse_type_name(row['typname'])
row['type'] = self._cltype_formatter(type_name)
row['hasSqrBracket'] = self.hasSqrBracket
row = self.convert_length_precision_to_string(row)
composite_lst.append({
'attnum': row['attnum'], 'member_name': row['attname'],
'type': type_name,
'collation': full_collate, 'cltype': row['type'],
'tlength': t_len, 'precision': t_prec,
'is_tlength': is_tlength, 'is_precision': is_precision,
'hasSqrBracket': row['hasSqrBracket'],
'fulltype': row['fulltype']})
# Adding both results
res['member_list'] = ', '.join(properties_list)
res['composite'] = composite_lst
res = self._additional_properties_composite(rset['rows'])
# If type is of ENUM then we need to add labels in our output
if of_type == 'e':
SQL = render_template("/".join([self.template_path,
'additional_properties.sql']),
type='e', tid=tid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
# To display in properties
properties_list = []
# To display in enum grid
@ -525,13 +521,7 @@ class TypeView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
# If type is of Range then we need to add collation,subtype etc in our
# output
if of_type == 'r':
SQL = render_template("/".join([self.template_path,
'additional_properties.sql']),
type='r', tid=tid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
range_dict = dict(res['rows'][0])
range_dict = dict(rset['rows'][0])
res.update(range_dict)
if 'seclabels' in copy_dict and copy_dict['seclabels'] is not None:
@ -1246,132 +1236,126 @@ class TypeView(PGChildNodeView, DataTypeReader, SchemaDiffObjectCompare):
return data
def get_sql(self, gid, sid, data, scid, tid=None, is_sql=False):
def _get_new_sql(self, data, is_sql):
"""
This function will genrate sql from model data
Used by get_sql internally for new type SQL
:param data: input data
:param is_sql: is sql
:return: generated SQL
"""
if tid is not None:
required_args = [
'name',
'typtype'
]
for key in ['typacl']:
if key in data and data[key] is not None:
if 'added' in data[key]:
data[key]['added'] = parse_priv_to_db(
data[key]['added'], self.acl)
if 'changed' in data[key]:
data[key]['changed'] = parse_priv_to_db(
data[key]['changed'], self.acl)
if 'deleted' in data[key]:
data[key]['deleted'] = parse_priv_to_db(
data[key]['deleted'], self.acl)
if 'composite' in data and len(data['composite']) > 0:
for key in ['added', 'changed', 'deleted']:
if key in data['composite']:
for each_type in data['composite'][key]:
each_type = self. \
convert_length_precision_to_string(each_type)
if 'type' in each_type:
each_type['cltype'] = self._cltype_formatter(
each_type['type'])
each_type['hasSqrBracket'] = self.hasSqrBracket
SQL = render_template(
"/".join([self.template_path,
self._PROPERTIES_SQL]),
scid=scid, tid=tid,
datlastsysoid=self.datlastsysoid,
show_system_objects=self.blueprint.show_system_objects
)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(
gettext("Could not find the type in the database.")
)
# Making copy of output for future use
old_data = dict(res['rows'][0])
SQL = render_template("/".join([self.template_path,
self._ACL_SQL]),
scid=scid, tid=tid)
status, acl = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=acl)
# We will set get privileges from acl sql so we don't need
# it from properties sql
old_data['typacl'] = []
for row in acl['rows']:
priv = parse_priv_from_db(row)
if row['deftype'] in old_data:
old_data[row['deftype']].append(priv)
else:
old_data[row['deftype']] = [priv]
# Calling function to check and additional properties if available
old_data.update(self.additional_properties(old_data, tid))
old_data = self._convert_for_sql(old_data)
# If typname or collname is changed while comparing
# two schemas then we need to drop type and recreate it
if 'typtype' in data or 'typname' in data or 'collname' in data\
or 'typinput' in data or 'typoutput' in data:
SQL = render_template(
"/".join([self.template_path, 'type_schema_diff.sql']),
data=data, o_data=old_data, conn=self.conn
)
else:
SQL = render_template(
"/".join([self.template_path, self._UPDATE_SQL]),
data=data, o_data=old_data, conn=self.conn
)
else:
required_args = [
'name',
'typtype'
]
for arg in required_args:
if arg not in data:
return "-- definition incomplete"
for arg in required_args:
if arg not in data:
return "-- definition incomplete"
# Additional checks go here
# If type is range then check if subtype is defined or not
if data and data[arg] == 'r' and \
('typname' not in data or data['typname'] is None):
if data.get(arg, None) == 'r' and \
data.get('typname', None) is None:
return "-- definition incomplete"
# If type is external then check if input/output
# conversion function is defined
if data and data[arg] == 'b' and (
'typinput' not in data or
'typoutput' not in data or
data['typinput'] is None or
data['typoutput'] is None):
if data.get(arg, None) == 'b' and (
data.get('typinput', None) is None or
data.get('typoutput', None) is None):
return "-- definition incomplete"
# Privileges
if 'typacl' in data and data['typacl'] is not None:
data['typacl'] = parse_priv_to_db(data['typacl'], self.acl)
# Privileges
if data.get('typacl', None):
data['typacl'] = parse_priv_to_db(data['typacl'], self.acl)
data = self._convert_for_sql(data)
data = self._convert_for_sql(data)
if 'composite' in data and len(data['composite']) > 0:
for each_type in data['composite']:
each_type = self.convert_length_precision_to_string(
each_type)
if len(data.get('composite', [])) > 0:
for each_type in data['composite']:
each_type = self.convert_length_precision_to_string(
each_type)
each_type['cltype'] = self._cltype_formatter(
each_type['type'])
each_type['hasSqrBracket'] = self.hasSqrBracket
SQL = render_template("/".join([self.template_path,
self._CREATE_SQL]),
data=data, conn=self.conn, is_sql=is_sql)
return SQL, data['name']
def get_sql(self, gid, sid, data, scid, tid=None, is_sql=False):
"""
This function will generate sql from model data
"""
if tid is None:
return self._get_new_sql(data, is_sql)
for key in ['added', 'changed', 'deleted']:
if key in data.get('typacl', []):
data['typacl'][key] = parse_priv_to_db(
data['typacl'][key], self.acl)
for each_type in data.get('composite', {}).get(key, []):
each_type = self. \
convert_length_precision_to_string(each_type)
if 'type' in each_type:
each_type['cltype'] = self._cltype_formatter(
each_type['type'])
each_type['hasSqrBracket'] = self.hasSqrBracket
SQL = render_template("/".join([self.template_path,
self._CREATE_SQL]),
data=data, conn=self.conn, is_sql=is_sql)
SQL = render_template(
"/".join([self.template_path,
self._PROPERTIES_SQL]),
scid=scid, tid=tid,
datlastsysoid=self.datlastsysoid,
show_system_objects=self.blueprint.show_system_objects
)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
if len(res['rows']) == 0:
return gone(
gettext("Could not find the type in the database.")
)
return SQL, data['name'] if 'name' in data else old_data['name']
# Making copy of output for future use
old_data = dict(res['rows'][0])
SQL = render_template("/".join([self.template_path,
self._ACL_SQL]),
scid=scid, tid=tid)
status, acl = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=acl)
# We will set get privileges from acl sql so we don't need
# it from properties sql
old_data['typacl'] = []
for row in acl['rows']:
priv = parse_priv_from_db(row)
old_data[row['deftype']] = \
old_data.get(row['deftype'], []).append(priv)
# Calling function to check and additional properties if available
old_data.update(self.additional_properties(old_data, tid))
old_data = self._convert_for_sql(old_data)
# If typname or collname is changed while comparing
# two schemas then we need to drop type and recreate it
render_sql = self._UPDATE_SQL
if any([key in data for key in
['typtype', 'typname', 'collname', 'typinput', 'typoutput']]):
render_sql = 'type_schema_diff.sql'
SQL = render_template(
"/".join([self.template_path, render_sql]),
data=data, o_data=old_data, conn=self.conn
)
return SQL, old_data['name']
@check_precondition
def sql(self, gid, sid, did, scid, tid, **kwargs):

View File

@ -11,6 +11,7 @@
import json
import copy
import re
from flask import render_template
@ -411,6 +412,30 @@ class DataTypeReader:
return type_name
@classmethod
def parse_length_precision(cls, fulltype, is_tlength, is_precision):
"""
Parse the type string and split length, precision.
:param fulltype: type string
:param is_tlength: is length type
:param is_precision: is precision type
:return: length, precision
"""
t_len, t_prec = None, None
if is_tlength and is_precision:
match_obj = re.search(r'(\d+),(\d+)', fulltype)
if match_obj:
t_len = match_obj.group(1)
t_prec = match_obj.group(2)
elif is_tlength:
# If we have length only
match_obj = re.search(r'(\d+)', fulltype)
if match_obj:
t_len = match_obj.group(1)
t_prec = None
return t_len, t_prec
def trigger_definition(data):
"""

View File

@ -566,21 +566,19 @@ SELECT EXISTS(
:return:
"""
# Format the schedule data. Convert the boolean array
if 'jschedules' in data:
if 'added' in data['jschedules']:
for added_schedule in data['jschedules']['added']:
format_schedule_data(added_schedule)
if 'changed' in data['jschedules']:
for changed_schedule in data['jschedules']['changed']:
format_schedule_data(changed_schedule)
for key in ['added', 'changed']:
jschedules = data.get('jschedules', {})
if key in jschedules:
for schedule in jschedules.get(key, []):
format_schedule_data(schedule)
has_connection_str = self.manager.db_info['pgAgent']['has_connstr']
if 'jsteps' in data and has_connection_str and \
'changed' in data['jsteps']:
for changed_step in data['jsteps']['changed']:
if 'jstconntype' not in changed_step and (
'jstdbname' in changed_step or
'jstconnstr' in changed_step):
jssteps = data.get('jsteps', {})
if 'changed' in jschedules:
for changed_step in jssteps.get('changed', []):
if 'jstconntype' not in changed_step and \
('jstdbname' in changed_step or
'jstconnstr' in changed_step) and has_connection_str:
status, rset = self.conn.execute_dict(
render_template(
"/".join([self.template_path, 'steps.sql']),
@ -596,11 +594,11 @@ SELECT EXISTS(
row = rset['rows'][0]
changed_step['jstconntype'] = row['jstconntype']
if row['jstconntype']:
if not ('jstdbname' in changed_step):
changed_step['jstdbname'] = row['jstdbname']
changed_step['jstdbname'] = changed_step.get(
'jstdbname', row['jstdbname'])
else:
if not ('jstconnstr' in changed_step):
changed_step['jstconnstr'] = row['jstconnstr']
changed_step['jstconnstr'] = changed_step.get(
'jstconnstr', row['jstconnstr'])
JobView.register_node_view(blueprint)

View File

@ -254,6 +254,39 @@ def update_status(**kw):
raise ValueError("Please verify pid and db_file arguments.")
def _handle_execute_exception(ex, args, _stderr, exit_code=None):
"""
Used internally by execute to handle exception
:param ex: exception object
:param args: execute args dict
:param _stderr: stderr
:param exit_code: exit code override
"""
info = _log_exception()
if _stderr:
_stderr.log(info)
else:
print("WARNING: ", ex.strerror, file=sys.stderr)
args.update({'end_time': get_current_time()})
args.update({
'exit_code': ex.errno if exit_code is None else exit_code})
def _fetch_execute_output(process, _stdout, _stderr):
"""
Used internally by execute to fetch execute output and log it.
:param process: process obj
:param _stdout: stdout
:param _stderr: stderr
"""
data = process.communicate()
if data:
if data[0]:
_stdout.log(data[0])
if data[1]:
_stderr.log(data[1])
def execute(argv):
"""
This function will execute the background process
@ -268,7 +301,6 @@ def execute(argv):
# Create seprate thread for stdout and stderr
process_stdout = ProcessLogger('out')
process_stderr = ProcessLogger('err')
process = None
try:
# update start_time
@ -283,7 +315,7 @@ def execute(argv):
update_status(**args)
_log('Status updated...')
if 'PROCID' in os.environ and os.environ['PROCID'] in os.environ:
if os.environ.get(os.environ.get('PROCID', None), None):
os.environ['PGPASSWORD'] = os.environ[os.environ['PROCID']]
kwargs = dict()
@ -331,34 +363,14 @@ def execute(argv):
args.update({'end_time': get_current_time()})
# Fetch last output, and error from process if it has missed.
data = process.communicate()
if data:
if data[0]:
process_stdout.log(data[0])
if data[1]:
process_stderr.log(data[1])
_fetch_execute_output(process, process_stdout, process_stderr)
# If executable not found or invalid arguments passed
except OSError as e:
info = _log_exception()
args.update({'exit_code': 500})
if process_stderr:
process_stderr.log(info)
else:
print("WARNING: ", e.strerror, file=sys.stderr)
args.update({'end_time': get_current_time()})
args.update({'exit_code': e.errno})
_handle_execute_exception(e, args, process_stderr, exit_code=None)
# Unknown errors
except Exception as e:
info = _log_exception()
args.update({'exit_code': 501})
if process_stderr:
process_stderr.log(info)
else:
print("WARNING: ", str(e), file=sys.stderr)
args.update({'end_time': get_current_time()})
args.update({'exit_code': -1})
_handle_execute_exception(e, args, process_stderr, exit_code=-1)
finally:
# Update the execution end_time, and exit-code.
update_status(**args)

View File

@ -11,6 +11,8 @@
import simplejson as json
import os
import functools
import operator
from flask import render_template, request, current_app, \
url_for, Response
@ -269,6 +271,125 @@ def filename_with_file_manager_path(_file, create_file=True):
return short_filepath()
def _get_args_params_values(data, conn, backup_obj_type, backup_file, server,
manager):
"""
Used internally by create_backup_objects_job. This function will create
the required args and params for the job.
:param data: input data
:param conn: connection obj
:param backup_obj_type: object type
:param backup_file: file name
:param server: server obj
:param manager: connection manager
:return: args array
"""
from pgadmin.utils.driver import get_driver
driver = get_driver(PG_DEFAULT_DRIVER)
host, port = (manager.local_bind_host, str(manager.local_bind_port)) \
if manager.use_ssh_tunnel else (server.host, str(server.port))
args = [
'--file',
backup_file,
'--host',
host,
'--port',
port,
'--username',
server.username,
'--no-password'
]
def set_param(key, param, assertion=True):
if not assertion:
return
if data.get(key, None):
args.append(param)
def set_value(key, param, default_value=None, assertion=True):
if not assertion:
return
val = data.get(key, default_value)
if val:
args.append(param)
args.append(val)
if backup_obj_type != 'objects':
args.append('--database')
args.append(server.maintenance_db)
if backup_obj_type == 'globals':
args.append('--globals-only')
set_param('verbose', '--verbose')
set_param('dqoute', '--quote-all-identifiers')
set_value('role', '--role')
if backup_obj_type == 'objects' and data.get('format', None):
args.extend(['--format={0}'.format({
'custom': 'c',
'tar': 't',
'plain': 'p',
'directory': 'd'
}[data['format']])])
set_param('blobs', '--blobs', data['format'] in ['custom', 'tar'])
set_value('ratio', '--compress', None,
['custom', 'plain', 'directory'])
set_param('only_data', '--data-only',
data.get('only_data', None))
set_param('disable_trigger', '--disable-triggers',
data.get('only_data', None) and
data.get('format', '') == 'plain')
set_param('only_schema', '--schema-only',
data.get('only_schema', None) and
not data.get('only_data', None))
set_param('dns_owner', '--no-owner')
set_param('include_create_database', '--create')
set_param('include_drop_database', '--clean')
set_param('pre_data', '--section=pre-data')
set_param('data', '--section=data')
set_param('post_data', '--section=post-data')
set_param('dns_privilege', '--no-privileges')
set_param('dns_tablespace', '--no-tablespaces')
set_param('dns_unlogged_tbl_data', '--no-unlogged-table-data')
set_param('use_insert_commands', '--inserts')
set_param('use_column_inserts', '--column-inserts')
set_param('disable_quoting', '--disable-dollar-quoting')
set_param('with_oids', '--oids')
set_param('use_set_session_auth', '--use-set-session-authorization')
set_param('no_comments', '--no-comments', manager.version >= 110000)
set_param('load_via_partition_root', '--load-via-partition-root',
manager.version >= 110000)
set_value('encoding', '--encoding')
set_value('no_of_jobs', '--jobs')
args.extend(
functools.reduce(operator.iconcat, map(
lambda s: ['--schema', r'{0}'.format(driver.qtIdent(conn, s).
replace('"', '\"'))],
data.get('schemas', [])), []
)
)
args.extend(
functools.reduce(operator.iconcat, map(
lambda s, t: ['--table',
r'{0}'.format(driver.qtIdent(conn, s, t)
.replace('"', '\"'))],
data.get('tables', [])), []
)
)
return args
@blueprint.route(
'/job/<int:sid>', methods=['POST'], endpoint='create_server_job'
)
@ -287,20 +408,13 @@ def create_backup_objects_job(sid):
Returns:
None
"""
if request.form:
data = json.loads(request.form['data'], encoding='utf-8')
else:
data = json.loads(request.data, encoding='utf-8')
backup_obj_type = 'objects'
if 'type' in data:
backup_obj_type = data['type']
data = json.loads(request.data, encoding='utf-8')
backup_obj_type = data.get('type', 'objects')
try:
if 'format' in data and data['format'] == 'directory':
backup_file = filename_with_file_manager_path(data['file'], False)
else:
backup_file = filename_with_file_manager_path(data['file'])
backup_file = filename_with_file_manager_path(
data['file'], (data.get('format', '') != 'directory'))
except Exception as e:
return bad_request(errormsg=str(e))
@ -338,112 +452,21 @@ def create_backup_objects_job(sid):
errormsg=ret_val
)
args = [
'--file',
backup_file,
'--host',
manager.local_bind_host if manager.use_ssh_tunnel else server.host,
'--port',
str(manager.local_bind_port) if manager.use_ssh_tunnel
else str(server.port),
'--username',
server.username,
'--no-password'
]
if backup_obj_type != 'objects':
args.append('--database')
args.append(server.maintenance_db)
if backup_obj_type == 'globals':
args.append('--globals-only')
def set_param(key, param):
if key in data and data[key]:
args.append(param)
def set_value(key, param, default_value=None):
if key in data and data[key] is not None and data[key] != '':
args.append(param)
args.append(data[key])
elif default_value is not None:
args.append(param)
args.append(default_value)
set_param('verbose', '--verbose')
set_param('dqoute', '--quote-all-identifiers')
set_value('role', '--role')
if backup_obj_type == 'objects' and \
'format' in data and data['format'] is not None:
if data['format'] == 'custom':
args.extend(['--format=c'])
set_param('blobs', '--blobs')
set_value('ratio', '--compress')
elif data['format'] == 'tar':
args.extend(['--format=t'])
set_param('blobs', '--blobs')
elif data['format'] == 'plain':
args.extend(['--format=p'])
set_value('ratio', '--compress')
elif data['format'] == 'directory':
args.extend(['--format=d'])
set_value('ratio', '--compress')
if 'only_data' in data and data['only_data']:
set_param('only_data', '--data-only')
if 'format' in data and data['format'] == 'plain':
set_param('disable_trigger', '--disable-triggers')
elif 'only_schema' in data and data['only_schema']:
set_param('only_schema', '--schema-only')
set_param('dns_owner', '--no-owner')
set_param('include_create_database', '--create')
set_param('include_drop_database', '--clean')
set_param('pre_data', '--section=pre-data')
set_param('data', '--section=data')
set_param('post_data', '--section=post-data')
set_param('dns_privilege', '--no-privileges')
set_param('dns_tablespace', '--no-tablespaces')
set_param('dns_unlogged_tbl_data', '--no-unlogged-table-data')
set_param('use_insert_commands', '--inserts')
set_param('use_column_inserts', '--column-inserts')
set_param('disable_quoting', '--disable-dollar-quoting')
set_param('with_oids', '--oids')
set_param('use_set_session_auth', '--use-set-session-authorization')
if manager.version >= 110000:
set_param('no_comments', '--no-comments')
set_param('load_via_partition_root', '--load-via-partition-root')
set_value('encoding', '--encoding')
set_value('no_of_jobs', '--jobs')
if 'schemas' in data:
for s in data['schemas']:
args.extend(['--schema', r'{0}'.format(
driver.qtIdent(conn, s).replace('"', '\"'))])
if 'tables' in data:
for s, t in data['tables']:
args.extend([
'--table', r'{0}'.format(
driver.qtIdent(conn, s, t).replace('"', '\"'))
])
args = _get_args_params_values(
data, conn, backup_obj_type, backup_file, server, manager)
escaped_args = [
escape_dquotes_process_arg(arg) for arg in args
]
try:
bfile = data['file'].encode('utf-8') \
if hasattr(data['file'], 'encode') else data['file']
if backup_obj_type == 'objects':
args.append(data['database'])
escaped_args.append(data['database'])
p = BatchProcess(
desc=BackupMessage(
BACKUP.OBJECT, sid,
data['file'].encode('utf-8') if hasattr(
data['file'], 'encode'
) else data['file'],
BACKUP.OBJECT, sid, bfile,
*args,
database=data['database']
),
@ -454,10 +477,7 @@ def create_backup_objects_job(sid):
desc=BackupMessage(
BACKUP.SERVER if backup_obj_type != 'globals'
else BACKUP.GLOBALS,
sid,
data['file'].encode('utf-8') if hasattr(
data['file'], 'encode'
) else data['file'],
sid, bfile,
*args
),
cmd=utility, args=escaped_args