2016-05-15 05:29:32 -05:00
|
|
|
##########################################################################
|
|
|
|
#
|
|
|
|
# pgAdmin 4 - PostgreSQL Tools
|
|
|
|
#
|
2024-01-01 02:43:48 -06:00
|
|
|
# Copyright (C) 2013 - 2024, The pgAdmin Development Team
|
2016-05-15 05:29:32 -05:00
|
|
|
# This software is released under the PostgreSQL Licence
|
|
|
|
#
|
|
|
|
##########################################################################
|
|
|
|
"""Implements Backup Utility"""
|
|
|
|
|
2023-02-14 23:40:12 -06:00
|
|
|
import json
|
2023-09-13 00:37:28 -05:00
|
|
|
import copy
|
2020-08-25 02:09:14 -05:00
|
|
|
import functools
|
|
|
|
import operator
|
2016-05-15 09:29:57 -05:00
|
|
|
|
2024-06-10 07:34:32 -05:00
|
|
|
from flask import render_template, request, current_app, Response
|
2024-01-25 01:16:55 -06:00
|
|
|
from flask_babel import gettext
|
2024-04-29 03:11:02 -05:00
|
|
|
from pgadmin.user_login_check import pga_login_required
|
2016-05-15 05:29:32 -05:00
|
|
|
from pgadmin.misc.bgprocess.processes import BatchProcess, IProcessDesc
|
2024-06-10 07:34:32 -05:00
|
|
|
from pgadmin.utils import PgAdminModule, does_utility_exist, get_server, \
|
2023-01-13 00:59:21 -06:00
|
|
|
filename_with_file_manager_path
|
|
|
|
from pgadmin.utils.ajax import make_json_response, bad_request, unauthorized
|
2016-05-15 05:29:32 -05:00
|
|
|
|
2016-06-21 08:12:14 -05:00
|
|
|
from config import PG_DEFAULT_DRIVER
|
2024-01-25 06:37:40 -06:00
|
|
|
# This unused import is required as API test cases will fail if we remove it,
|
|
|
|
# Have to identify the cause and then remove it.
|
|
|
|
from pgadmin.model import Server, SharedServer
|
2024-06-11 02:29:48 -05:00
|
|
|
from flask_security import current_user
|
2019-10-10 07:28:32 -05:00
|
|
|
from pgadmin.misc.bgprocess import escape_dquotes_process_arg
|
2024-06-10 07:34:32 -05:00
|
|
|
from pgadmin.utils.constants import MIMETYPE_APP_JS, SERVER_NOT_FOUND
|
|
|
|
from pgadmin.tools.grant_wizard import get_data
|
2016-05-15 05:29:32 -05:00
|
|
|
|
|
|
|
# set template path for sql scripts
|
|
|
|
MODULE_NAME = 'backup'
|
|
|
|
server_info = {}
|
2024-06-10 07:34:32 -05:00
|
|
|
MVIEW_STR = 'materialized view'
|
|
|
|
FOREIGN_TABLE_STR = 'foreign table'
|
2016-05-15 05:29:32 -05:00
|
|
|
|
|
|
|
|
|
|
|
class BackupModule(PgAdminModule):
|
|
|
|
"""
|
2022-11-18 22:43:41 -06:00
|
|
|
class BackupModule():
|
2016-05-15 05:29:32 -05:00
|
|
|
|
|
|
|
It is a utility which inherits PgAdminModule
|
|
|
|
class and define methods to load its own
|
|
|
|
javascript file.
|
|
|
|
"""
|
|
|
|
|
2024-01-25 01:16:55 -06:00
|
|
|
LABEL = gettext('Backup')
|
2016-05-15 05:29:32 -05:00
|
|
|
|
|
|
|
def show_system_objects(self):
|
|
|
|
"""
|
|
|
|
return system preference objects
|
|
|
|
"""
|
|
|
|
return self.pref_show_system_objects
|
|
|
|
|
2017-06-12 22:17:15 -05:00
|
|
|
def get_exposed_url_endpoints(self):
|
|
|
|
"""
|
|
|
|
Returns:
|
|
|
|
list: URL endpoints for backup module
|
|
|
|
"""
|
2018-10-22 02:05:21 -05:00
|
|
|
return ['backup.create_server_job', 'backup.create_object_job',
|
2023-09-13 00:37:28 -05:00
|
|
|
'backup.utility_exists', 'backup.objects',
|
|
|
|
'backup.schema_objects']
|
2017-06-12 22:17:15 -05:00
|
|
|
|
2016-05-15 05:29:32 -05:00
|
|
|
|
|
|
|
# Create blueprint for BackupModule class
|
|
|
|
blueprint = BackupModule(
|
|
|
|
MODULE_NAME, __name__, static_url_path=''
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-11-18 22:43:41 -06:00
|
|
|
class BACKUP():
|
2016-05-15 05:29:32 -05:00
|
|
|
"""
|
|
|
|
Constants defined for Backup utilities
|
|
|
|
"""
|
|
|
|
GLOBALS = 1
|
|
|
|
SERVER = 2
|
|
|
|
OBJECT = 3
|
|
|
|
|
|
|
|
|
|
|
|
class BackupMessage(IProcessDesc):
|
|
|
|
"""
|
|
|
|
BackupMessage(IProcessDesc)
|
|
|
|
|
|
|
|
Defines the message shown for the backup operation.
|
|
|
|
"""
|
2016-06-21 08:21:06 -05:00
|
|
|
|
Resolved quite a few file-system encoding/decoding related cases.
In order to resolve the non-ascii characters in path (in user directory,
storage path, etc) on windows, we have converted the path into the
short-path, so that - we don't need to deal with the encoding issues
(specially with Python 2).
We've resolved majority of the issues with this patch.
We still need couple issues to resolve after this in the same area.
TODO
* Add better support for non-ascii characters in the database name on
windows with Python 3
* Improve the messages created after the background processes by
different modules (such as Backup, Restore, Import/Export, etc.),
which does not show short-paths, and xml representable characters for
non-ascii characters, when found in the database objects, and the file
PATH.
Fixes #2174, #1797, #2166, #1940
Initial patch by: Surinder Kumar
Reviewed by: Murtuza Zabuawala
2017-03-07 04:00:57 -06:00
|
|
|
def __init__(self, _type, _sid, _bfile, *_args, **_kwargs):
|
2016-05-15 05:29:32 -05:00
|
|
|
self.backup_type = _type
|
|
|
|
self.sid = _sid
|
2016-05-15 09:29:57 -05:00
|
|
|
self.bfile = _bfile
|
Resolved quite a few file-system encoding/decoding related cases.
In order to resolve the non-ascii characters in path (in user directory,
storage path, etc) on windows, we have converted the path into the
short-path, so that - we don't need to deal with the encoding issues
(specially with Python 2).
We've resolved majority of the issues with this patch.
We still need couple issues to resolve after this in the same area.
TODO
* Add better support for non-ascii characters in the database name on
windows with Python 3
* Improve the messages created after the background processes by
different modules (such as Backup, Restore, Import/Export, etc.),
which does not show short-paths, and xml representable characters for
non-ascii characters, when found in the database objects, and the file
PATH.
Fixes #2174, #1797, #2166, #1940
Initial patch by: Surinder Kumar
Reviewed by: Murtuza Zabuawala
2017-03-07 04:00:57 -06:00
|
|
|
self.database = _kwargs['database'] if 'database' in _kwargs else None
|
|
|
|
self.cmd = ''
|
2020-09-29 04:38:14 -05:00
|
|
|
self.args_str = "{0} ({1}:{2})"
|
2016-05-15 05:29:32 -05:00
|
|
|
|
2020-07-01 03:20:51 -05:00
|
|
|
def cmd_arg(x):
|
Resolved quite a few file-system encoding/decoding related cases.
In order to resolve the non-ascii characters in path (in user directory,
storage path, etc) on windows, we have converted the path into the
short-path, so that - we don't need to deal with the encoding issues
(specially with Python 2).
We've resolved majority of the issues with this patch.
We still need couple issues to resolve after this in the same area.
TODO
* Add better support for non-ascii characters in the database name on
windows with Python 3
* Improve the messages created after the background processes by
different modules (such as Backup, Restore, Import/Export, etc.),
which does not show short-paths, and xml representable characters for
non-ascii characters, when found in the database objects, and the file
PATH.
Fixes #2174, #1797, #2166, #1940
Initial patch by: Surinder Kumar
Reviewed by: Murtuza Zabuawala
2017-03-07 04:00:57 -06:00
|
|
|
if x:
|
|
|
|
x = x.replace('\\', '\\\\')
|
|
|
|
x = x.replace('"', '\\"')
|
|
|
|
x = x.replace('""', '\\"')
|
2017-05-15 07:01:12 -05:00
|
|
|
return ' "' + x + '"'
|
Resolved quite a few file-system encoding/decoding related cases.
In order to resolve the non-ascii characters in path (in user directory,
storage path, etc) on windows, we have converted the path into the
short-path, so that - we don't need to deal with the encoding issues
(specially with Python 2).
We've resolved majority of the issues with this patch.
We still need couple issues to resolve after this in the same area.
TODO
* Add better support for non-ascii characters in the database name on
windows with Python 3
* Improve the messages created after the background processes by
different modules (such as Backup, Restore, Import/Export, etc.),
which does not show short-paths, and xml representable characters for
non-ascii characters, when found in the database objects, and the file
PATH.
Fixes #2174, #1797, #2166, #1940
Initial patch by: Surinder Kumar
Reviewed by: Murtuza Zabuawala
2017-03-07 04:00:57 -06:00
|
|
|
return ''
|
|
|
|
|
|
|
|
for arg in _args:
|
2024-06-12 07:39:06 -05:00
|
|
|
if arg and len(arg) >= 2 and arg.startswith('--'):
|
Resolved quite a few file-system encoding/decoding related cases.
In order to resolve the non-ascii characters in path (in user directory,
storage path, etc) on windows, we have converted the path into the
short-path, so that - we don't need to deal with the encoding issues
(specially with Python 2).
We've resolved majority of the issues with this patch.
We still need couple issues to resolve after this in the same area.
TODO
* Add better support for non-ascii characters in the database name on
windows with Python 3
* Improve the messages created after the background processes by
different modules (such as Backup, Restore, Import/Export, etc.),
which does not show short-paths, and xml representable characters for
non-ascii characters, when found in the database objects, and the file
PATH.
Fixes #2174, #1797, #2166, #1940
Initial patch by: Surinder Kumar
Reviewed by: Murtuza Zabuawala
2017-03-07 04:00:57 -06:00
|
|
|
self.cmd += ' ' + arg
|
|
|
|
else:
|
2020-07-01 03:20:51 -05:00
|
|
|
self.cmd += cmd_arg(arg)
|
2016-05-15 05:29:32 -05:00
|
|
|
|
2022-08-11 00:19:45 -05:00
|
|
|
def get_server_name(self):
|
2021-04-22 06:59:04 -05:00
|
|
|
s = get_server(self.sid)
|
2016-05-15 05:29:32 -05:00
|
|
|
|
2022-08-11 00:19:45 -05:00
|
|
|
if s is None:
|
2024-01-25 01:16:55 -06:00
|
|
|
return gettext("Not available")
|
2022-08-11 00:19:45 -05:00
|
|
|
|
2018-05-30 20:25:42 -05:00
|
|
|
from pgadmin.utils.driver import get_driver
|
|
|
|
driver = get_driver(PG_DEFAULT_DRIVER)
|
|
|
|
manager = driver.connection_manager(self.sid)
|
|
|
|
|
|
|
|
host = manager.local_bind_host if manager.use_ssh_tunnel else s.host
|
|
|
|
port = manager.local_bind_port if manager.use_ssh_tunnel else s.port
|
|
|
|
|
2022-08-11 00:19:45 -05:00
|
|
|
return "{0} ({1}:{2})".format(s.name, host, port)
|
2018-06-15 05:36:07 -05:00
|
|
|
|
2019-01-16 00:25:08 -06:00
|
|
|
@property
|
|
|
|
def type_desc(self):
|
|
|
|
if self.backup_type == BACKUP.OBJECT:
|
2024-01-25 01:16:55 -06:00
|
|
|
return gettext("Backing up an object on the server")
|
2019-01-16 00:25:08 -06:00
|
|
|
if self.backup_type == BACKUP.GLOBALS:
|
2024-01-25 01:16:55 -06:00
|
|
|
return gettext("Backing up the global objects")
|
2019-01-16 00:25:08 -06:00
|
|
|
elif self.backup_type == BACKUP.SERVER:
|
2024-01-25 01:16:55 -06:00
|
|
|
return gettext("Backing up the server")
|
2019-01-16 00:25:08 -06:00
|
|
|
else:
|
|
|
|
# It should never reach here.
|
2024-01-25 01:16:55 -06:00
|
|
|
return gettext("Unknown Backup")
|
2019-01-16 00:25:08 -06:00
|
|
|
|
2018-06-15 05:36:07 -05:00
|
|
|
@property
|
|
|
|
def message(self):
|
2022-08-11 00:19:45 -05:00
|
|
|
server_name = self.get_server_name()
|
2018-06-15 05:36:07 -05:00
|
|
|
|
2016-05-15 05:29:32 -05:00
|
|
|
if self.backup_type == BACKUP.OBJECT:
|
2024-01-25 01:16:55 -06:00
|
|
|
return gettext(
|
2017-05-15 07:01:12 -05:00
|
|
|
"Backing up an object on the server '{0}' "
|
2019-01-16 00:25:08 -06:00
|
|
|
"from database '{1}'"
|
2022-09-15 06:13:37 -05:00
|
|
|
).format(server_name, self.database)
|
2016-05-15 05:29:32 -05:00
|
|
|
if self.backup_type == BACKUP.GLOBALS:
|
2024-01-25 01:16:55 -06:00
|
|
|
return gettext("Backing up the global objects on "
|
2024-01-25 01:21:29 -06:00
|
|
|
"the server '{0}'").format(
|
2022-08-11 00:19:45 -05:00
|
|
|
server_name
|
2016-05-15 05:29:32 -05:00
|
|
|
)
|
|
|
|
elif self.backup_type == BACKUP.SERVER:
|
2024-01-25 01:16:55 -06:00
|
|
|
return gettext("Backing up the server '{0}'").format(
|
2022-08-11 00:19:45 -05:00
|
|
|
server_name
|
2016-05-15 05:29:32 -05:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
# It should never reach here.
|
|
|
|
return "Unknown Backup"
|
|
|
|
|
|
|
|
def details(self, cmd, args):
|
2022-08-11 00:19:45 -05:00
|
|
|
server_name = self.get_server_name()
|
2024-01-25 01:16:55 -06:00
|
|
|
backup_type = gettext("Backup")
|
2016-05-15 05:29:32 -05:00
|
|
|
if self.backup_type == BACKUP.OBJECT:
|
2024-01-25 01:16:55 -06:00
|
|
|
backup_type = gettext("Backup Object")
|
2016-06-17 16:05:49 -05:00
|
|
|
elif self.backup_type == BACKUP.GLOBALS:
|
2024-01-25 01:16:55 -06:00
|
|
|
backup_type = gettext("Backup Globals")
|
2016-05-15 05:29:32 -05:00
|
|
|
elif self.backup_type == BACKUP.SERVER:
|
2024-01-25 01:16:55 -06:00
|
|
|
backup_type = gettext("Backup Server")
|
2016-05-15 05:29:32 -05:00
|
|
|
|
2022-08-11 00:19:45 -05:00
|
|
|
return {
|
|
|
|
"message": self.message,
|
|
|
|
"cmd": cmd + self.cmd,
|
|
|
|
"server": server_name,
|
|
|
|
"object": self.database,
|
|
|
|
"type": backup_type,
|
|
|
|
}
|
2016-05-15 05:29:32 -05:00
|
|
|
|
2018-01-26 10:54:21 -06:00
|
|
|
|
2016-05-15 05:29:32 -05:00
|
|
|
@blueprint.route("/")
|
2024-04-29 03:11:02 -05:00
|
|
|
@pga_login_required
|
2016-05-15 05:29:32 -05:00
|
|
|
def index():
|
2024-01-25 01:16:55 -06:00
|
|
|
return bad_request(errormsg=gettext("This URL cannot be called directly."))
|
2016-05-15 05:29:32 -05:00
|
|
|
|
|
|
|
|
|
|
|
@blueprint.route("/backup.js")
|
2024-04-29 03:11:02 -05:00
|
|
|
@pga_login_required
|
2016-05-15 05:29:32 -05:00
|
|
|
def script():
|
|
|
|
"""render own javascript"""
|
|
|
|
return Response(
|
|
|
|
response=render_template(
|
|
|
|
"backup/js/backup.js", _=_
|
|
|
|
),
|
|
|
|
status=200,
|
2020-08-20 09:56:51 -05:00
|
|
|
mimetype=MIMETYPE_APP_JS
|
2016-05-15 05:29:32 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-08-25 02:09:14 -05:00
|
|
|
def _get_args_params_values(data, conn, backup_obj_type, backup_file, server,
|
|
|
|
manager):
|
|
|
|
"""
|
|
|
|
Used internally by create_backup_objects_job. This function will create
|
|
|
|
the required args and params for the job.
|
|
|
|
:param data: input data
|
|
|
|
:param conn: connection obj
|
|
|
|
:param backup_obj_type: object type
|
|
|
|
:param backup_file: file name
|
|
|
|
:param server: server obj
|
|
|
|
:param manager: connection manager
|
|
|
|
:return: args array
|
|
|
|
"""
|
|
|
|
from pgadmin.utils.driver import get_driver
|
|
|
|
driver = get_driver(PG_DEFAULT_DRIVER)
|
|
|
|
|
|
|
|
host, port = (manager.local_bind_host, str(manager.local_bind_port)) \
|
|
|
|
if manager.use_ssh_tunnel else (server.host, str(server.port))
|
|
|
|
args = [
|
|
|
|
'--file',
|
|
|
|
backup_file,
|
|
|
|
'--host',
|
|
|
|
host,
|
|
|
|
'--port',
|
|
|
|
port,
|
|
|
|
'--username',
|
2024-03-26 00:58:20 -05:00
|
|
|
manager.user,
|
2020-08-25 02:09:14 -05:00
|
|
|
'--no-password'
|
|
|
|
]
|
|
|
|
|
|
|
|
def set_param(key, param, assertion=True):
|
|
|
|
if not assertion:
|
|
|
|
return
|
|
|
|
if data.get(key, None):
|
|
|
|
args.append(param)
|
|
|
|
|
|
|
|
def set_value(key, param, default_value=None, assertion=True):
|
|
|
|
if not assertion:
|
|
|
|
return
|
|
|
|
val = data.get(key, default_value)
|
|
|
|
if val:
|
2024-04-29 02:42:21 -05:00
|
|
|
if isinstance(val, list):
|
|
|
|
for c_val in val:
|
|
|
|
args.append(param)
|
|
|
|
args.append(c_val)
|
|
|
|
return
|
|
|
|
args.append(param)
|
|
|
|
args.append(val)
|
2020-08-25 02:09:14 -05:00
|
|
|
|
|
|
|
if backup_obj_type != 'objects':
|
|
|
|
args.append('--database')
|
|
|
|
args.append(server.maintenance_db)
|
|
|
|
|
|
|
|
if backup_obj_type == 'globals':
|
|
|
|
args.append('--globals-only')
|
|
|
|
|
|
|
|
set_value('role', '--role')
|
|
|
|
|
|
|
|
if backup_obj_type == 'objects' and data.get('format', None):
|
|
|
|
args.extend(['--format={0}'.format({
|
|
|
|
'custom': 'c',
|
|
|
|
'tar': 't',
|
|
|
|
'plain': 'p',
|
|
|
|
'directory': 'd'
|
|
|
|
}[data['format']])])
|
|
|
|
|
2023-07-10 00:04:30 -05:00
|
|
|
# --blobs is deprecated from v16
|
|
|
|
if manager.version >= 160000:
|
2023-07-10 02:13:24 -05:00
|
|
|
set_param('blobs', '--large-objects',
|
2023-07-10 00:04:30 -05:00
|
|
|
data['format'] in ['custom', 'tar'])
|
|
|
|
else:
|
|
|
|
set_param('blobs', '--blobs', data['format'] in ['custom', 'tar'])
|
|
|
|
set_value('ratio', '--compress')
|
2020-08-25 02:09:14 -05:00
|
|
|
|
2023-07-10 00:04:30 -05:00
|
|
|
set_value('encoding', '--encoding')
|
|
|
|
set_value('no_of_jobs', '--jobs')
|
|
|
|
|
|
|
|
# Data options
|
2020-08-25 02:09:14 -05:00
|
|
|
set_param('only_data', '--data-only',
|
|
|
|
data.get('only_data', None))
|
|
|
|
set_param('only_schema', '--schema-only',
|
|
|
|
data.get('only_schema', None) and
|
|
|
|
not data.get('only_data', None))
|
2023-07-10 00:04:30 -05:00
|
|
|
set_param('only_tablespaces', '--tablespaces-only',
|
|
|
|
data.get('only_tablespaces', None))
|
|
|
|
set_param('only_roles', '--roles-only',
|
|
|
|
data.get('only_roles', None))
|
2020-08-25 02:09:14 -05:00
|
|
|
|
2023-07-10 00:04:30 -05:00
|
|
|
# Sections
|
2020-08-25 02:09:14 -05:00
|
|
|
set_param('pre_data', '--section=pre-data')
|
|
|
|
set_param('data', '--section=data')
|
|
|
|
set_param('post_data', '--section=post-data')
|
2023-07-10 00:04:30 -05:00
|
|
|
|
|
|
|
# Do not Save
|
|
|
|
set_param('dns_owner', '--no-owner')
|
2020-08-25 02:09:14 -05:00
|
|
|
set_param('dns_privilege', '--no-privileges')
|
|
|
|
set_param('dns_tablespace', '--no-tablespaces')
|
|
|
|
set_param('dns_unlogged_tbl_data', '--no-unlogged-table-data')
|
2023-07-10 00:04:30 -05:00
|
|
|
set_param('dns_comments', '--no-comments', manager.version >= 110000)
|
|
|
|
set_param('dns_publications', '--no-publications',
|
|
|
|
manager.version >= 110000)
|
|
|
|
set_param('dns_subscriptions', '--no-subscriptions',
|
|
|
|
manager.version >= 110000)
|
|
|
|
set_param('dns_security_labels', '--no-security-labels',
|
|
|
|
manager.version >= 110000)
|
|
|
|
set_param('dns_toast_compression', '--no-toast-compression',
|
|
|
|
manager.version >= 140000)
|
|
|
|
set_param('dns_table_access_method', '--no-table-access-method',
|
|
|
|
manager.version >= 150000)
|
|
|
|
set_param('dns_no_role_passwords', '--no-role-passwords')
|
|
|
|
|
|
|
|
# Query Options
|
2020-08-25 02:09:14 -05:00
|
|
|
set_param('use_insert_commands', '--inserts')
|
2023-07-10 00:04:30 -05:00
|
|
|
set_value('max_rows_per_insert', '--rows-per-insert', None,
|
|
|
|
manager.version >= 120000)
|
|
|
|
set_param('on_conflict_do_nothing', '--on-conflict-do-nothing',
|
|
|
|
manager.version >= 120000)
|
|
|
|
set_param('include_create_database', '--create')
|
|
|
|
set_param('include_drop_database', '--clean')
|
|
|
|
set_param('if_exists', '--if-exists')
|
2020-08-25 02:09:14 -05:00
|
|
|
|
2023-07-10 00:04:30 -05:00
|
|
|
# Table options
|
|
|
|
set_param('use_column_inserts', '--column-inserts')
|
2020-08-25 02:09:14 -05:00
|
|
|
set_param('load_via_partition_root', '--load-via-partition-root',
|
|
|
|
manager.version >= 110000)
|
2023-07-10 00:04:30 -05:00
|
|
|
set_param('enable_row_security', '--enable-row-security')
|
|
|
|
set_value('exclude_table_data', '--exclude-table-data')
|
|
|
|
set_value('table_and_children', '--table-and-children', None,
|
|
|
|
manager.version >= 160000)
|
|
|
|
set_value('exclude_table_and_children', '--exclude-table-and-children',
|
|
|
|
None, manager.version >= 160000)
|
|
|
|
set_value('exclude_table_data_and_children',
|
|
|
|
'--exclude-table-data-and-children', None,
|
|
|
|
manager.version >= 160000)
|
2024-04-15 01:20:08 -05:00
|
|
|
set_value('exclude_table', '--exclude-table')
|
2023-07-10 00:04:30 -05:00
|
|
|
|
|
|
|
# Disable options
|
|
|
|
set_param('disable_trigger', '--disable-triggers',
|
|
|
|
data.get('only_data', None) and
|
|
|
|
data.get('format', '') == 'plain')
|
|
|
|
set_param('disable_quoting', '--disable-dollar-quoting')
|
2020-08-25 02:09:14 -05:00
|
|
|
|
2023-07-10 00:04:30 -05:00
|
|
|
# Misc Options
|
|
|
|
set_param('verbose', '--verbose')
|
|
|
|
set_param('dqoute', '--quote-all-identifiers')
|
|
|
|
set_param('use_set_session_auth', '--use-set-session-authorization')
|
2023-07-13 02:31:48 -05:00
|
|
|
set_value('exclude_schema', '--exclude-schema')
|
2023-07-10 00:04:30 -05:00
|
|
|
set_value('extra_float_digits', '--extra-float-digits', None,
|
|
|
|
manager.version >= 120000)
|
|
|
|
set_value('lock_wait_timeout', '--lock-wait-timeout')
|
2023-07-10 02:13:24 -05:00
|
|
|
set_value('exclude_database', '--exclude-database', None,
|
|
|
|
manager.version >= 160000)
|
2020-08-25 02:09:14 -05:00
|
|
|
|
|
|
|
args.extend(
|
|
|
|
functools.reduce(operator.iconcat, map(
|
|
|
|
lambda s: ['--schema', r'{0}'.format(driver.qtIdent(conn, s).
|
|
|
|
replace('"', '\"'))],
|
|
|
|
data.get('schemas', [])), []
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
args.extend(
|
|
|
|
functools.reduce(operator.iconcat, map(
|
2020-08-25 07:36:38 -05:00
|
|
|
lambda t: ['--table',
|
|
|
|
r'{0}'.format(driver.qtIdent(conn, t[0], t[1])
|
|
|
|
.replace('"', '\"'))],
|
2020-08-25 02:09:14 -05:00
|
|
|
data.get('tables', [])), []
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2023-09-13 00:37:28 -05:00
|
|
|
if 'objects' in data:
|
|
|
|
selected_objects = data.get('objects', {})
|
|
|
|
for _key in selected_objects:
|
|
|
|
param = 'schema' if _key == 'schema' else 'table'
|
|
|
|
args.extend(
|
|
|
|
functools.reduce(operator.iconcat, map(
|
|
|
|
lambda s: [f'--{param}',
|
|
|
|
r'{0}.{1}'.format(
|
|
|
|
driver.qtIdent(conn, s['schema']).replace(
|
|
|
|
'"', '\"'),
|
|
|
|
driver.qtIdent(conn, s['name']).replace(
|
|
|
|
'"', '\"')) if type(
|
|
|
|
s) is dict else driver.qtIdent(
|
|
|
|
conn, s).replace('"', '\"')],
|
|
|
|
selected_objects[_key] or []), [])
|
|
|
|
)
|
|
|
|
|
2020-08-25 02:09:14 -05:00
|
|
|
return args
|
|
|
|
|
|
|
|
|
2017-06-12 22:17:15 -05:00
|
|
|
@blueprint.route(
|
|
|
|
'/job/<int:sid>', methods=['POST'], endpoint='create_server_job'
|
|
|
|
)
|
|
|
|
@blueprint.route(
|
|
|
|
'/job/<int:sid>/object', methods=['POST'], endpoint='create_object_job'
|
|
|
|
)
|
2024-04-29 03:11:02 -05:00
|
|
|
@pga_login_required
|
2016-05-15 05:29:32 -05:00
|
|
|
def create_backup_objects_job(sid):
|
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
sid: Server ID
|
|
|
|
|
2018-01-26 10:54:21 -06:00
|
|
|
Creates a new job for backup task
|
|
|
|
(Backup Database(s)/Schema(s)/Table(s))
|
2016-05-15 05:29:32 -05:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
None
|
|
|
|
"""
|
|
|
|
|
2023-02-14 23:40:12 -06:00
|
|
|
data = json.loads(request.data)
|
2020-08-25 02:09:14 -05:00
|
|
|
backup_obj_type = data.get('type', 'objects')
|
2017-08-17 06:05:42 -05:00
|
|
|
|
2017-03-09 03:54:55 -06:00
|
|
|
try:
|
2020-08-25 02:09:14 -05:00
|
|
|
backup_file = filename_with_file_manager_path(
|
|
|
|
data['file'], (data.get('format', '') != 'directory'))
|
2023-01-13 00:59:21 -06:00
|
|
|
except PermissionError as e:
|
|
|
|
return unauthorized(errormsg=str(e))
|
2017-03-09 03:54:55 -06:00
|
|
|
except Exception as e:
|
|
|
|
return bad_request(errormsg=str(e))
|
2016-05-15 05:29:32 -05:00
|
|
|
|
|
|
|
# Fetch the server details like hostname, port, roles etc
|
2021-04-22 06:59:04 -05:00
|
|
|
server = get_server(sid)
|
2016-05-15 05:29:32 -05:00
|
|
|
|
|
|
|
if server is None:
|
|
|
|
return make_json_response(
|
|
|
|
success=0,
|
2024-06-10 07:34:32 -05:00
|
|
|
errormsg=SERVER_NOT_FOUND
|
2016-05-15 05:29:32 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
# To fetch MetaData for the server
|
|
|
|
from pgadmin.utils.driver import get_driver
|
|
|
|
driver = get_driver(PG_DEFAULT_DRIVER)
|
|
|
|
manager = driver.connection_manager(server.id)
|
|
|
|
conn = manager.connection()
|
|
|
|
connected = conn.connected()
|
|
|
|
|
|
|
|
if not connected:
|
|
|
|
return make_json_response(
|
|
|
|
success=0,
|
2024-01-25 01:16:55 -06:00
|
|
|
errormsg=gettext("Please connect to the server first.")
|
2016-05-15 05:29:32 -05:00
|
|
|
)
|
|
|
|
|
2018-08-22 01:47:50 -05:00
|
|
|
utility = manager.utility('backup') if backup_obj_type == 'objects' \
|
|
|
|
else manager.utility('backup_server')
|
|
|
|
|
2019-07-12 07:00:23 -05:00
|
|
|
ret_val = does_utility_exist(utility)
|
2018-10-22 02:05:21 -05:00
|
|
|
if ret_val:
|
|
|
|
return make_json_response(
|
|
|
|
success=0,
|
|
|
|
errormsg=ret_val
|
|
|
|
)
|
|
|
|
|
2020-08-25 02:09:14 -05:00
|
|
|
args = _get_args_params_values(
|
|
|
|
data, conn, backup_obj_type, backup_file, server, manager)
|
2016-05-15 05:29:32 -05:00
|
|
|
|
2019-10-10 07:28:32 -05:00
|
|
|
escaped_args = [
|
|
|
|
escape_dquotes_process_arg(arg) for arg in args
|
|
|
|
]
|
2016-05-15 05:29:32 -05:00
|
|
|
try:
|
2020-08-25 02:09:14 -05:00
|
|
|
bfile = data['file'].encode('utf-8') \
|
|
|
|
if hasattr(data['file'], 'encode') else data['file']
|
2018-08-22 01:47:50 -05:00
|
|
|
if backup_obj_type == 'objects':
|
|
|
|
args.append(data['database'])
|
2019-10-10 07:28:32 -05:00
|
|
|
escaped_args.append(data['database'])
|
2018-08-22 01:47:50 -05:00
|
|
|
p = BatchProcess(
|
|
|
|
desc=BackupMessage(
|
2021-04-22 06:59:04 -05:00
|
|
|
BACKUP.OBJECT, server.id, bfile,
|
2018-08-22 01:47:50 -05:00
|
|
|
*args,
|
|
|
|
database=data['database']
|
|
|
|
),
|
2023-07-11 05:04:14 -05:00
|
|
|
cmd=utility, args=escaped_args, manager_obj=manager
|
2018-08-22 01:47:50 -05:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
p = BatchProcess(
|
|
|
|
desc=BackupMessage(
|
|
|
|
BACKUP.SERVER if backup_obj_type != 'globals'
|
|
|
|
else BACKUP.GLOBALS,
|
2021-04-22 06:59:04 -05:00
|
|
|
server.id, bfile,
|
2018-08-22 01:47:50 -05:00
|
|
|
*args
|
|
|
|
),
|
2023-07-11 05:04:14 -05:00
|
|
|
cmd=utility, args=escaped_args, manager_obj=manager
|
2018-08-22 01:47:50 -05:00
|
|
|
)
|
|
|
|
|
2023-07-11 05:04:14 -05:00
|
|
|
p.set_env_variables(server)
|
2016-05-15 05:29:32 -05:00
|
|
|
p.start()
|
|
|
|
jid = p.id
|
|
|
|
except Exception as e:
|
|
|
|
current_app.logger.exception(e)
|
|
|
|
return make_json_response(
|
|
|
|
status=410,
|
|
|
|
success=0,
|
|
|
|
errormsg=str(e)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Return response
|
|
|
|
return make_json_response(
|
2022-08-11 00:19:45 -05:00
|
|
|
data={'job_id': jid, 'desc': p.desc.message, 'Success': 1}
|
2016-05-15 05:29:32 -05:00
|
|
|
)
|
2018-10-22 02:05:21 -05:00
|
|
|
|
|
|
|
|
|
|
|
@blueprint.route(
|
|
|
|
'/utility_exists/<int:sid>/<backup_obj_type>', endpoint='utility_exists'
|
|
|
|
)
|
2024-04-29 03:11:02 -05:00
|
|
|
@pga_login_required
|
2018-10-22 02:05:21 -05:00
|
|
|
def check_utility_exists(sid, backup_obj_type):
|
|
|
|
"""
|
|
|
|
This function checks the utility file exist on the given path.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
sid: Server ID
|
|
|
|
backup_obj_type: Type of the object
|
|
|
|
Returns:
|
|
|
|
None
|
|
|
|
"""
|
2021-04-22 06:59:04 -05:00
|
|
|
server = get_server(sid)
|
2018-10-22 02:05:21 -05:00
|
|
|
|
|
|
|
if server is None:
|
|
|
|
return make_json_response(
|
|
|
|
success=0,
|
2024-06-10 07:34:32 -05:00
|
|
|
errormsg=SERVER_NOT_FOUND
|
2018-10-22 02:05:21 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
from pgadmin.utils.driver import get_driver
|
|
|
|
driver = get_driver(PG_DEFAULT_DRIVER)
|
|
|
|
manager = driver.connection_manager(server.id)
|
|
|
|
|
|
|
|
utility = manager.utility('backup') if backup_obj_type == 'objects' \
|
|
|
|
else manager.utility('backup_server')
|
|
|
|
|
2019-07-12 07:00:23 -05:00
|
|
|
ret_val = does_utility_exist(utility)
|
2018-10-22 02:05:21 -05:00
|
|
|
if ret_val:
|
|
|
|
return make_json_response(
|
|
|
|
success=0,
|
|
|
|
errormsg=ret_val
|
|
|
|
)
|
|
|
|
|
|
|
|
return make_json_response(success=1)
|
2023-09-13 00:37:28 -05:00
|
|
|
|
|
|
|
|
|
|
|
@blueprint.route(
|
|
|
|
'/objects/<int:sid>/<int:did>', endpoint='objects'
|
|
|
|
)
|
|
|
|
@blueprint.route(
|
|
|
|
'/objects/<int:sid>/<int:did>/<int:scid>', endpoint='schema_objects'
|
|
|
|
)
|
2024-04-29 03:11:02 -05:00
|
|
|
@pga_login_required
|
2023-09-13 00:37:28 -05:00
|
|
|
def objects(sid, did, scid=None):
|
|
|
|
"""
|
|
|
|
This function returns backup objects
|
|
|
|
|
|
|
|
Args:
|
|
|
|
sid: Server ID
|
|
|
|
did: database ID
|
|
|
|
scid: schema ID
|
|
|
|
Returns:
|
|
|
|
list of objects
|
|
|
|
"""
|
|
|
|
server = get_server(sid)
|
|
|
|
|
|
|
|
if server is None:
|
|
|
|
return make_json_response(
|
|
|
|
success=0,
|
2024-06-10 07:34:32 -05:00
|
|
|
errormsg=SERVER_NOT_FOUND
|
2023-09-13 00:37:28 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
from pgadmin.utils.driver import get_driver
|
|
|
|
from pgadmin.utils.ajax import precondition_required
|
|
|
|
|
|
|
|
server_info = {}
|
|
|
|
server_info['manager'] = get_driver(PG_DEFAULT_DRIVER) \
|
|
|
|
.connection_manager(sid)
|
|
|
|
server_info['conn'] = server_info['manager'].connection(
|
|
|
|
did=did)
|
|
|
|
# If DB not connected then return error to browser
|
|
|
|
if not server_info['conn'].connected():
|
|
|
|
return precondition_required(
|
|
|
|
gettext("Connection to the server has been lost.")
|
|
|
|
)
|
|
|
|
|
|
|
|
# Set template path for sql scripts
|
|
|
|
server_info['server_type'] = server_info['manager'].server_type
|
|
|
|
server_info['version'] = server_info['manager'].version
|
|
|
|
if server_info['server_type'] == 'pg':
|
|
|
|
server_info['template_path'] = 'grant_wizard/pg/#{0}#'.format(
|
|
|
|
server_info['version'])
|
|
|
|
elif server_info['server_type'] == 'ppas':
|
|
|
|
server_info['template_path'] = 'grant_wizard/ppas/#{0}#'.format(
|
|
|
|
server_info['version'])
|
|
|
|
|
2024-01-24 07:03:43 -06:00
|
|
|
res, _, empty_schema_list = get_data(sid, did, scid,
|
|
|
|
'schema' if scid else 'database',
|
|
|
|
server_info, True)
|
2023-09-13 00:37:28 -05:00
|
|
|
|
|
|
|
tree_data = {
|
|
|
|
'table': [],
|
|
|
|
'view': [],
|
2024-06-10 07:34:32 -05:00
|
|
|
MVIEW_STR: [],
|
|
|
|
FOREIGN_TABLE_STR: [],
|
2023-09-13 00:37:28 -05:00
|
|
|
'sequence': []
|
|
|
|
}
|
|
|
|
|
|
|
|
schema_group = {}
|
|
|
|
|
|
|
|
for data in res:
|
|
|
|
obj_type = data['object_type'].lower()
|
2024-06-10 07:34:32 -05:00
|
|
|
if obj_type in ['table', 'view', MVIEW_STR, FOREIGN_TABLE_STR,
|
2023-09-13 00:37:28 -05:00
|
|
|
'sequence']:
|
|
|
|
|
|
|
|
if data['nspname'] not in schema_group:
|
|
|
|
schema_group[data['nspname']] = {
|
|
|
|
'id': data['nspname'],
|
|
|
|
'name': data['nspname'],
|
|
|
|
'icon': 'icon-schema',
|
|
|
|
'children': copy.deepcopy(tree_data),
|
|
|
|
'is_schema': True,
|
|
|
|
}
|
|
|
|
icon_data = {
|
2024-06-10 07:34:32 -05:00
|
|
|
MVIEW_STR: 'icon-mview',
|
|
|
|
FOREIGN_TABLE_STR: 'icon-foreign_table'
|
2023-09-13 00:37:28 -05:00
|
|
|
}
|
|
|
|
icon = icon_data[obj_type] if obj_type in icon_data \
|
|
|
|
else data['icon']
|
|
|
|
schema_group[data['nspname']]['children'][obj_type].append({
|
|
|
|
'id': f'{data["nspname"]}_{data["name"]}',
|
|
|
|
'name': data['name'],
|
|
|
|
'icon': icon,
|
|
|
|
'schema': data['nspname'],
|
|
|
|
'type': obj_type,
|
|
|
|
'_name': '{0}.{1}'.format(data['nspname'], data['name'])
|
|
|
|
})
|
|
|
|
|
|
|
|
schema_group = [dt for k, dt in schema_group.items()]
|
|
|
|
for ch in schema_group:
|
|
|
|
children = []
|
|
|
|
for obj_type, data in ch['children'].items():
|
|
|
|
if data:
|
|
|
|
icon_data = {
|
2024-06-10 07:34:32 -05:00
|
|
|
MVIEW_STR: 'icon-coll-mview',
|
|
|
|
FOREIGN_TABLE_STR: 'icon-coll-foreign_table'
|
2023-09-13 00:37:28 -05:00
|
|
|
}
|
|
|
|
icon = icon_data[obj_type] if obj_type in icon_data \
|
|
|
|
else f'icon-coll-{obj_type.lower()}',
|
|
|
|
children.append({
|
|
|
|
'id': f'{ch["id"]}_{obj_type}',
|
|
|
|
'name': f'{obj_type.title()}s',
|
|
|
|
'icon': icon,
|
|
|
|
'children': data,
|
|
|
|
'type': obj_type,
|
|
|
|
'is_collection': True,
|
|
|
|
})
|
|
|
|
|
|
|
|
ch['children'] = children
|
|
|
|
|
2024-01-12 05:49:27 -06:00
|
|
|
for empty_schema in empty_schema_list:
|
|
|
|
schema_group.append({
|
|
|
|
'id': empty_schema,
|
|
|
|
'name': empty_schema,
|
|
|
|
'icon': 'icon-schema',
|
|
|
|
'children': [],
|
|
|
|
'is_schema': True,
|
|
|
|
})
|
2023-09-13 00:37:28 -05:00
|
|
|
return make_json_response(
|
|
|
|
data=schema_group,
|
|
|
|
success=200
|
|
|
|
)
|