Added support to download utility files at the client-side. Fixes #3318

This commit is contained in:
Rahul Shirsat 2020-10-23 16:14:55 +05:30 committed by Akshay Joshi
parent 7573fac29f
commit c2ad97d0ab
24 changed files with 1842 additions and 61 deletions

View File

@ -9,6 +9,7 @@ This release contains a number of bug fixes and new features since the release o
New features
************
| `Issue #3318 <https://redmine.postgresql.org/issues/3318>`_ - Added support to download utility files at the client-side.
| `Issue #4232 <https://redmine.postgresql.org/issues/4232>`_ - Added tab title placeholder for Query Tool, View/Edit Data, and Debugger.
Housekeeping

View File

@ -22,7 +22,7 @@ from subprocess import Popen, PIPE
import logging
from pgadmin.utils import u_encode, file_quote, fs_encoding, \
get_complete_file_path
get_complete_file_path, get_storage_directory, IS_WIN
import pytz
from dateutil import parser
@ -59,6 +59,48 @@ class IProcessDesc(object, metaclass=ABCMeta):
def details(self, cmd, args):
pass
@property
def current_storage_dir(self):
if config.SERVER_MODE:
file = self.bfile
try:
# check if file name is encoded with UTF-8
file = self.bfile.decode('utf-8')
except Exception as e:
str(e)
# do nothing if bfile is not encoded.
path = get_complete_file_path(file)
path = file if path is None else path
if IS_WIN:
path = os.path.realpath(path)
storage_directory = os.path.basename(get_storage_directory())
if storage_directory in path:
start = path.index(storage_directory)
end = start + (len(storage_directory))
last_dir = os.path.dirname(path[end:])
else:
last_dir = file
if IS_WIN:
if '\\' in last_dir:
if len(last_dir) == 1:
last_dir = last_dir.replace('\\', '\\\\')
else:
last_dir = last_dir.replace('\\', '/')
else:
last_dir = last_dir.replace('\\', '/')
return None if hasattr(self, 'is_import') and self.is_import \
else last_dir
return None
class BatchProcess(object):
def __init__(self, **kwargs):
@ -543,8 +585,12 @@ class BatchProcess(object):
desc = loads(p.desc)
details = desc
type_desc = ''
current_storage_dir = None
if isinstance(desc, IProcessDesc):
from pgadmin.tools.backup import BackupMessage
from pgadmin.tools.import_export import IEMessage
args = []
args_csv = StringIO(
p.arguments.encode('utf-8')
@ -555,9 +601,11 @@ class BatchProcess(object):
args = args + arg
details = desc.details(p.command, args)
type_desc = desc.type_desc
if isinstance(desc, (BackupMessage, IEMessage)):
current_storage_dir = desc.current_storage_dir
desc = desc.message
return desc, details, type_desc
return desc, details, type_desc, current_storage_dir
@staticmethod
def list():
@ -584,7 +632,8 @@ class BatchProcess(object):
execution_time = BatchProcess.total_seconds(etime - stime)
desc, details, type_desc = BatchProcess._check_process_desc(p)
desc, details, type_desc, current_storage_dir = BatchProcess.\
_check_process_desc(p)
res.append({
'id': p.pid,
@ -596,7 +645,8 @@ class BatchProcess(object):
'exit_code': p.exit_code,
'acknowledge': p.acknowledge,
'execution_time': execution_time,
'process_state': p.process_state
'process_state': p.process_state,
'current_storage_dir': current_storage_dir,
})
if changed:

View File

@ -57,3 +57,7 @@ ol.pg-bg-process-logs {
.pg-bg-bgprocess:hover .bg-close {
opacity: 0.95;
}
.icon-storage-manager:before {
font-icon: url('../img/storage_manager.svg');
}

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 24.3.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 16 16" style="enable-background:new 0 0 16 16;" xml:space="preserve">
<g>
<path d="M4.3,5.3h8.8V4c0-0.7-0.6-1.3-1.3-1.3H7.5L5.8,1H1.5C0.8,1,0.2,1.6,0.2,2.3v7.4L2,6.5C2.6,5.8,3.4,5.3,4.3,5.3z"/>
<path d="M7.5,8.9c0-1.2,0.7-2.3,1.6-2.8H4.3C3.7,6.1,3.1,6.4,2.8,7l-1.9,3.3c-0.2,0.4,0.1,1,0.6,1h7C7.8,10.7,7.5,9.8,7.5,8.9z"/>
<path d="M15,6.1h-2.6c1,0.6,1.6,1.6,1.6,2.8c0,0.3-0.1,0.6-0.1,0.9L15.5,7C15.8,6.7,15.5,6.1,15,6.1z"/>
<circle cx="10.8" cy="8.9" r="2.8"/>
</g>
<path d="M14,11.8l-1.2-0.3c-1.3,0.9-2.9,0.8-3.8,0l-1.2,0.3C7,12,6.5,12.7,6.5,13.5v0.7c0,0.4,0.3,0.9,0.9,0.9h2.3l1.1-2.4l1.3,2.4
h2.4c0.4,0,0.9-0.3,0.9-0.9v-0.8C15.2,12.6,14.8,11.9,14,11.8z"/>
</svg>

After

Width:  |  Height:  |  Size: 944 B

View File

@ -20,6 +20,8 @@ define('misc.bgprocess', [
return pgBrowser.BackgroundProcessObsorver;
}
var isServerMode = (function() { return pgAdmin.server_mode == 'True'; })();
var wcDocker = window.wcDocker;
var BGProcess = function(info, notify) {
@ -61,6 +63,7 @@ define('misc.bgprocess', [
curr_status: null,
state: 0, // 0: NOT Started, 1: Started, 2: Finished, 3: Terminated
completed: false,
current_storage_dir: null,
id: info['id'],
type_desc: null,
@ -161,6 +164,9 @@ define('misc.bgprocess', [
if ('process_state' in data)
self.state = data.process_state;
if ('current_storage_dir' in data)
self.current_storage_dir = data.current_storage_dir;
if ('out' in data) {
self.out = data.out && data.out.pos;
@ -325,8 +331,8 @@ define('misc.bgprocess', [
</div>
<div class="pg-bg-etime my-auto mr-2"></div>
<div class="ml-auto">
<button class="btn btn-secondary pg-bg-more-details"><span class="fa fa-info-circle" role="img"></span>&nbsp;` + gettext('More details...') + `</button>
<button class="btn btn-danger bg-process-stop" disabled><span class="fa fa-times-circle" role="img"></span>&nbsp;` + gettext('Stop Process') + `</button>
<button class="btn btn-secondary pg-bg-more-details" title="More Details"><span class="fa fa-info-circle" role="img"></span>&nbsp;` + gettext('More details...') + `</button>
<button class="btn btn-danger bg-process-stop" disabled><span class="fa fa-times-circle" role="img" title="Stop the operation"></span>&nbsp;` + gettext('Stop Process') + `</button>
</div>
</div>
<div class="pg-bg-status py-1">
@ -387,6 +393,7 @@ define('misc.bgprocess', [
var $status_bar = $(self.container.find('.pg-bg-status'));
$status_bar.html(self.curr_status);
var $btn_stop_process = $(self.container.find('.bg-process-stop'));
// Enable Stop Process button only when process is running
if (parseInt(self.state) === 1) {
$btn_stop_process.attr('disabled', false);
@ -415,7 +422,27 @@ define('misc.bgprocess', [
$logs = container.find('.bg-process-watcher'),
$header = container.find('.bg-process-details'),
$footer = container.find('.bg-process-footer'),
$btn_stop_process = container.find('.bg-process-stop');
$btn_stop_process = container.find('.bg-process-stop'),
$btn_storage_manager = container.find('.bg-process-storage-manager');
if(self.current_storage_dir && isServerMode) { //for backup & exports with server mode, operate over storage manager
if($btn_storage_manager.length == 0) {
var str_storage_manager_btn = '<button id="bg-process-storage-manager" class="btn btn-secondary bg-process-storage-manager" title="Click to open file location" aria-label="Storage Manager" tabindex="0" disabled><span class="pg-font-icon icon-storage-manager" role="img"></span></button>&nbsp;';
container.find('.bg-process-details .bg-btn-section').prepend(str_storage_manager_btn);
$btn_storage_manager = container.find('.bg-process-storage-manager');
}
// Disable storage manager button only when process is running
if (parseInt(self.state) === 1) {
$btn_storage_manager.attr('disabled', true);
}
else {
$btn_storage_manager.attr('disabled', false);
}
// On Click event for storage manager button.
$btn_storage_manager.off('click').on('click', self.storage_manager.bind(this));
}
// Enable Stop Process button only when process is running
if (parseInt(self.state) === 1) {
@ -526,6 +553,15 @@ define('misc.bgprocess', [
});
},
storage_manager: function() {
var self = this;
if(self.current_storage_dir) {
pgBrowser.Events.trigger(
'pgadmin:tools:storage_manager', self.current_storage_dir
);
}
},
});
_.extend(
@ -634,7 +670,7 @@ define('misc.bgprocess', [
'<span>' + gettext('Start time') + ': <span class="bgprocess-start-time"></span>' +
'</span>'+
'</div>' +
'<div class="ml-auto">' +
'<div class="ml-auto bg-btn-section">' +
'<button type="button" class="btn btn-danger bg-process-stop" disabled><span class="fa fa-times-circle" role="img"></span>&nbsp;' + gettext('Stop Process') + '</button>' +
'</div>' +
'</div>' +

View File

@ -23,7 +23,7 @@ from werkzeug.exceptions import InternalServerError
import simplejson as json
from flask import render_template, Response, session, request as req, \
url_for, current_app
url_for, current_app, send_from_directory
from flask_babelex import gettext
from flask_security import login_required
from pgadmin.utils import PgAdminModule
@ -1214,11 +1214,16 @@ class Filemanager(object):
'attachment; filename=' + name
return resp
name = path.split('/')[-1]
content = open(orig_path, 'rb')
resp = Response(content)
resp.headers['Content-Disposition'] = 'attachment; filename=' + name
return resp
name = os.path.basename(path)
if orig_path and len(orig_path) > 0:
dir_path = os.path.dirname(orig_path)
else:
dir_path = os.path.dirname(path)
response = send_from_directory(dir_path, name, as_attachment=True)
response.headers["filename"] = name
return response
def permission(self, path=None, req=None):
the_dir = self.dir if self.dir is not None else ''

View File

@ -9,6 +9,7 @@
import './select_dialogue';
import './create_dialogue';
import './storage_dialogue';
define('misc.file_manager', [
'sources/gettext', 'sources/url_for', 'jquery', 'underscore',
@ -41,7 +42,10 @@ define('misc.file_manager', [
let dialogHeight = pgAdmin.Browser.stdH.calc(pgAdmin.Browser.stdH.lg);
if (params.dialog_type == 'create_file') {
Alertify.createModeDlg(params).resizeTo(dialogWidth, dialogHeight);
} else {
} else if(params.dialog_type == 'storage_dialog') {
Alertify.fileStorageDlg(params).resizeTo(dialogWidth, dialogHeight);
}
else {
Alertify.fileSelectionDlg(params).resizeTo(dialogWidth, dialogHeight);
}
},

View File

@ -77,7 +77,8 @@ module.exports = Alertify.dialog('fileSelectionDlg', function() {
$($(self.elements.footer).find('.file_manager_ok')).trigger('click');
});
}, 200);
self.__internal.buttons[1].element.disabled = true;
if(self.__internal.buttons[1])
self.__internal.buttons[1].element.disabled = true;
},
setup: function() {
return {

View File

@ -0,0 +1,45 @@
/////////////////////////////////////////////////////////////
//
// pgAdmin 4 - PostgreSQL Tools
//
// Copyright (C) 2013 - 2020, The pgAdmin Development Team
// This software is released under the PostgreSQL Licence
//
//////////////////////////////////////////////////////////////
import gettext from 'sources/gettext';
import Alertify from 'pgadmin.alertifyjs';
// Declare the Storage dialog
module.exports = Alertify.dialog('fileStorageDlg', function() {
// Dialog property
return {
settingUpdated: function(key, oldValue, newValue) {
if(key == 'message') {
this.setMessage(newValue);
}
},
setup: function() {
return {
buttons: [{
text: gettext('Cancel'),
key: 27,
className: 'btn btn-secondary fa fa-times pg-alertify-button',
}],
options: {
closableByDimmer: false,
maximizable: false,
closable: false,
movable: true,
padding: !1,
overflow: !1,
model: 0,
resizable: true,
pinnable: false,
modal: false,
autoReset: false,
},
};
},
};
}, true, 'fileSelectionDlg');

View File

@ -23,6 +23,8 @@ define([
'sources/csrf', 'tablesorter', 'tablesorter-metric',
], function($, _, Alertify, gettext, url_for, Dropzone, pgAdmin, csrf) {
pgAdmin.Browser = pgAdmin.Browser || {};
/*---------------------------------------------------------
Define functions used for various operations
---------------------------------------------------------*/
@ -179,14 +181,17 @@ define([
$('.file_manager').find('button.download').hide();
} else {
$('.file_manager').find('button.download').off().on('click', function() {
var path;
var path,
params = {};
params[pgAdmin.csrf_token_header] = pgAdmin.csrf_token;
if ($('.fileinfo').data('view') == 'grid') {
path = $('.fileinfo li.selected').find('.clip span').attr('data-alt');
window.open(pgAdmin.FileUtils.fileConnector + '?_=' + Date.now() + 'mode=download&path=' + path, '_blank');
} else {
path = $('.fileinfo').find('table#contents tbody tr.selected td:first-child').attr('title');
window.open(pgAdmin.FileUtils.fileConnector + '?_=' + Date.now() + 'mode=download&path=' + path, '_blank');
}
download_file(path);
});
}
};
@ -1030,12 +1035,15 @@ define([
$('.file_manager_ok').removeClass('disabled');
$('.file_manager_ok').attr('disabled', false);
$('.file_manager button.delete, .file_manager button.rename').removeAttr(
$('.file_manager button.delete').removeAttr(
'disabled', 'disabled'
);
$('.file_manager button.download').attr(
'disabled', 'disabled'
);
$('.file_manager button.rename').attr(
'disabled', 'disabled'
);
// set selected folder name in breadcrums
$('.file_manager #uploader .input-path').hide();
$('.file_manager #uploader .show_selected_file').remove();
@ -1078,7 +1086,8 @@ define([
$('.file_manager_ok').removeClass('disabled');
$('.file_manager_ok').attr('disabled', false);
$('.file_manager button.download').attr('disabled', 'disabled');
$('.file_manager button.delete, .file_manager button.rename').removeAttr('disabled');
$('.file_manager button.rename').attr('disabled', 'disabled');
$('.file_manager button.delete').removeAttr('disabled');
// set selected folder name in breadcrums
$('.file_manager #uploader .input-path').hide();
@ -1168,6 +1177,60 @@ define([
is_protected == undefined;
};
// Download selected file
var download_file = function (path) {
var data = { 'path': path, 'mode': 'download' },
params = {};
params[pgAdmin.csrf_token_header] = pgAdmin.csrf_token;
$.ajax({
type: 'POST',
url: pgAdmin.FileUtils.fileConnector,
contentType: false,
headers: params,
xhrFields: {
responseType: 'blob',
},
cache: false,
data: JSON.stringify(data),
success: function (blob, status, xhr) {
// check for a filename
var filename = xhr.getResponseHeader('filename');
if (typeof window.navigator.msSaveBlob !== 'undefined') {
// IE workaround for "HTML7007: One or more blob URLs were revoked by closing the blob for which they were created. These URLs will no longer resolve as the data backing the URL has been freed."
window.navigator.msSaveBlob(blob, filename);
} else {
var URL = window.URL || window.webkitURL;
var downloadUrl = URL.createObjectURL(blob);
if (filename) {
// use HTML5 a[download] attribute to specify filename
var a = document.createElement('a');
// safari doesn't support this yet
if (typeof a.download === 'undefined') {
window.location.href = downloadUrl;
} else {
a.href = downloadUrl;
a.download = filename;
document.body.appendChild(a);
a.click();
}
} else {
window.location.href = downloadUrl;
}
setTimeout(function () { URL.revokeObjectURL(downloadUrl); }, 100); // cleanup
}
},
error: function (error) {
Alertify.error(error);
},
});
};
/*---------------------------------------------------------
Initialization - Entry point
---------------------------------------------------------*/
@ -1446,12 +1509,13 @@ define([
) {
$('.file_manager_ok').removeClass('disabled');
$('.file_manager_ok').attr('disabled', false);
$('.file_manager button.delete, .file_manager button.rename').removeAttr(
$('.file_manager button.delete').removeAttr(
'disabled', 'disabled'
);
$('.file_manager button.download').attr(
'disabled', 'disabled'
);
$('.file_manager button.rename').attr('disabled', 'disabled');
// set selected folder name in breadcrums
$('.file_manager #uploader .input-path').hide();
$('.file_manager #uploader .show_selected_file').remove();

View File

@ -141,6 +141,11 @@ class BackupMessage(IProcessDesc):
# It should never reach here.
return _("Unknown Backup")
# @property
# def current_storage_dir(self):
# return self.bfile if os.path.isdir(self.bfile) \
# else os.path.dirname(self.bfile)
@property
def message(self):
name, host, port = self.get_server_details()

View File

@ -14,6 +14,10 @@ from unittest.mock import patch
class BackupMessageTest(BaseTestGenerator):
"""Test the BackupMessage class"""
expected_storage_dir = '/test_path'
pg_dump = "/pg_dump"
scenarios = [
('When Backup server',
dict(
@ -24,7 +28,7 @@ class BackupMessageTest(BaseTestGenerator):
port=5444,
host='localhost',
database='postgres',
bfile='test_restore',
bfile='/test_path/test_restore.sql',
args=[
'--file',
"backup_file",
@ -38,14 +42,15 @@ class BackupMessageTest(BaseTestGenerator):
'--database',
"postgres"
],
cmd="/test_path/pg_dump"
cmd=expected_storage_dir + pg_dump
),
expected_msg="Backing up the server"
" 'test_backup_server (localhost:5444)'",
expected_details_cmd='/test_path/pg_dump --file '
'"backup_file" --host "localhost" '
'--port "5444" --username "postgres" '
'--no-password --database "postgres"'
'--no-password --database "postgres"',
expected_storage_dir=expected_storage_dir
)),
('When Backup global',
@ -57,7 +62,7 @@ class BackupMessageTest(BaseTestGenerator):
port=5444,
host='localhost',
database='postgres',
bfile='test_backup',
bfile='/test_path/test_backup',
args=[
'--file',
'backup_file',
@ -71,14 +76,15 @@ class BackupMessageTest(BaseTestGenerator):
'--database',
'postgres'
],
cmd="/test_path/pg_dump"
cmd=expected_storage_dir + pg_dump
),
expected_msg="Backing up the global objects on the server "
"'test_backup_server (localhost:5444)'",
expected_details_cmd='/test_path/pg_dump --file "backup_file" '
'--host "localhost"'
' --port "5444" --username "postgres" '
'--no-password --database "postgres"'
'--no-password --database "postgres"',
expected_storage_dir=expected_storage_dir
)),
('When backup object',
@ -90,7 +96,7 @@ class BackupMessageTest(BaseTestGenerator):
port=5444,
host='localhost',
database='postgres',
bfile='test_backup',
bfile='/test_path/test_backup',
args=[
'--file',
'backup_file',
@ -104,7 +110,7 @@ class BackupMessageTest(BaseTestGenerator):
'--database',
'postgres'
],
cmd="/test_path/pg_dump"
cmd=expected_storage_dir + pg_dump
),
expected_msg="Backing up an object on the server "
"'test_backup_server (localhost:5444)'"
@ -112,13 +118,15 @@ class BackupMessageTest(BaseTestGenerator):
expected_details_cmd='/test_path/pg_dump --file "backup_file" '
'--host "localhost" '
'--port "5444" --username "postgres" '
'--no-password --database "postgres"'
'--no-password --database "postgres"',
expected_storage_dir=expected_storage_dir
))
]
@patch('pgadmin.utils.get_storage_directory')
@patch('pgadmin.tools.backup.BackupMessage.get_server_details')
def runTest(self, get_server_details_mock):
def runTest(self, get_server_details_mock, get_storage_directory_mock):
get_server_details_mock.return_value = \
self.class_params['name'],\
self.class_params['host'],\
@ -132,10 +140,16 @@ class BackupMessageTest(BaseTestGenerator):
**{'database': self.class_params['database']}
)
get_storage_directory_mock.return_value = '/'
# Check the expected message returned
self.assertEqual(backup_obj.message, self.expected_msg)
# Check the command
obj_details = backup_obj.details(self.class_params['cmd'],
self.class_params['args'])
storage_dir = backup_obj.current_storage_dir
self.assertIn(self.expected_details_cmd, obj_details)
self.assertEqual(self.expected_storage_dir, storage_dir)

View File

@ -23,6 +23,7 @@ from pgadmin.utils.ajax import make_json_response, bad_request
from config import PG_DEFAULT_DRIVER
from pgadmin.model import Server
from pgadmin.utils.constants import MIMETYPE_APP_JS
import config
MODULE_NAME = 'import_export'
@ -71,16 +72,17 @@ class IEMessage(IProcessDesc):
Defines the message shown for the import/export operation.
"""
def __init__(self, _sid, _schema, _tbl, _database, _storage, *_args):
self.sid = _sid
self.schema = _schema
self.table = _tbl
self.database = _database
def __init__(self, *_args, **io_params):
self.sid = io_params['sid']
self.schema = io_params['schema']
self.table = io_params['table']
self.database = io_params['database']
self._cmd = ''
self.is_import = io_params['is_import']
self.bfile = io_params['filename']
if _storage:
_storage = _storage.replace('\\', '/')
if io_params['storage']:
io_params['storage'] = io_params['storage'].replace('\\', '/')
def cmd_arg(x):
if x:
@ -99,19 +101,25 @@ class IEMessage(IProcessDesc):
self._cmd += ' ' + arg
elif replace_next:
arg = cmd_arg(arg)
if _storage is not None:
arg = arg.replace(_storage, '<STORAGE_DIR>')
if io_params['storage'] is not None:
arg = arg.replace(io_params['storage'], '<STORAGE_DIR>')
self._cmd += ' "' + arg + '"'
else:
self._cmd += cmd_arg(arg)
@property
def message(self):
def get_server_details(self):
# Fetch the server details like hostname, port, roles etc
s = Server.query.filter_by(
id=self.sid, user_id=current_user.id
).first()
return s.name, s.host, s.port
@property
def message(self):
# Fetch the server details like hostname, port, roles etc
name, host, port = self.get_server_details()
return _(
"Copying table data '{0}.{1}' on database '{2}' "
"and server ({3}:{4})"
@ -119,19 +127,39 @@ class IEMessage(IProcessDesc):
html.safe_str(self.schema),
html.safe_str(self.table),
html.safe_str(self.database),
html.safe_str(s.host),
html.safe_str(s.port)
html.safe_str(host),
html.safe_str(port)
)
@property
def type_desc(self):
return _("Copying table data")
_type_desc = _("Import - ") if self.is_import else _("Export - ")
return _type_desc + _("Copying table data")
# @property
# def current_storage_dir(self):
#
# if config.SERVER_MODE:
# path = os.path.realpath(self.bfile)
# if get_storage_directory() < path:
# storage_directory = os.path.basename(get_storage_directory())
# start = path.index(storage_directory)
# end = start + (len(storage_directory))
#
# last_dir = os.path.dirname(path[end:])
# else:
# last_dir = '\\'
#
# else:
# last_dir = os.path.dirname(self.bfile) \
# if os.path.isfile(self.bfile) \
# else self.bfile
#
# return None if self.is_import else last_dir
def details(self, cmd, args):
# Fetch the server details like hostname, port, roles etc
s = Server.query.filter_by(
id=self.sid, user_id=current_user.id
).first()
name, host, port = self.get_server_details()
res = '<div>'
res += _(
@ -142,9 +170,9 @@ class IEMessage(IProcessDesc):
html.safe_str(self.table),
html.safe_str(self.database),
"{0} ({1}:{2})".format(
html.safe_str(s.name),
html.safe_str(s.host),
html.safe_str(s.port)
html.safe_str(name),
html.safe_str(host),
html.safe_str(port)
)
)
@ -304,8 +332,7 @@ def create_import_export_job(sid):
if not _file:
return bad_request(errormsg=_('Please specify a valid file'))
if IS_WIN:
elif IS_WIN:
_file = _file.replace('\\', '/')
data['filename'] = _file
@ -328,14 +355,22 @@ def create_import_export_job(sid):
args = ['--command', query]
try:
io_params = {
'sid': sid,
'schema': data['schema'],
'table': data['table'],
'database': data['database'],
'is_import': data['is_import'],
'filename': data['filename'],
'storage': storage_dir,
'utility': utility
}
p = BatchProcess(
desc=IEMessage(
sid,
data['schema'],
data['table'],
data['database'],
storage_dir,
utility, *args
*args,
**io_params
),
cmd=utility, args=args
)

View File

@ -0,0 +1,244 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
from pgadmin.misc.bgprocess.processes import BatchProcess, IProcessDesc, \
current_app
from pgadmin.tools.import_export import IEMessage
from pgadmin.utils.route import BaseTestGenerator
from pickle import dumps, loads
from unittest.mock import patch, MagicMock
class BatchProcessTest(BaseTestGenerator):
"""Test the BatchProcess class"""
scenarios = [
('When export file with default options',
dict(
class_params=dict(
sid=1,
name='test_export_server',
port=5444,
host='localhost',
database='postgres',
bfile='test_export',
username='postgres',
args=[
' --command',
'\\copy {0}.{1} ({2},{3}) TO \'{4}\' CSV '
'QUOTE {5} ESCAPE \'\'\'\';'
],
cmd='import_export'
),
params=dict(
filename='test_export_file.csv',
format='csv',
is_import=False,
delimiter="",
quote="\"",
escape="'",
database='postgres',
columns=['test_col_1', 'test_col_2'],
icolumns=[],
schema="export_test_schema",
table="export_test_table",
storage='/'
),
url='/import_export/job/{0}',
expected_cmd_opts=['--command', 'copy', 'TO',
'export_test_schema', 'export_test_table'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
)),
('When import file with default options',
dict(
class_params=dict(
sid=1,
name='test_import_server',
port=5444,
host='localhost',
database='postgres',
bfile='test_export',
username='postgres',
args=[
' --command',
'\\copy {0}.{1} ({2},{3}) FROM \'{4}\' CSV '
'QUOTE {5} ESCAPE \'\'\'\';'
],
cmd='import_export'
),
params=dict(
filename='test_import_file.csv',
format='csv',
is_import=True,
delimiter="",
quote="\"",
escape="'",
database='postgres',
columns=['test_col_1', 'test_col_2'],
icolumns=[],
schema="import_test_schema",
table="import_test_table",
storage='/'
),
url='/import_export/job/{0}',
expected_cmd_opts=['--command', 'copy', 'FROM',
'import_test_schema', 'import_test_table'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
))
]
@patch('pgadmin.tools.import_export.IEMessage.get_server_details')
@patch('pgadmin.misc.bgprocess.processes.Popen')
@patch('pgadmin.misc.bgprocess.processes.db')
@patch('pgadmin.tools.import_export.current_user')
@patch('pgadmin.misc.bgprocess.processes.current_user')
def runTest(self, current_user_mock, current_user, db_mock,
popen_mock, get_server_details_mock):
with self.app.app_context():
current_user.id = 1
current_user_mock.id = 1
current_app.PGADMIN_RUNTIME = False
def db_session_add_mock(j):
cmd_obj = loads(j.desc)
self.assertTrue(isinstance(cmd_obj, IProcessDesc))
self.assertEqual(cmd_obj.bfile, self.params['filename'])
self.assertEqual(cmd_obj.database,
self.class_params['database'])
command = ' "' + self.class_params['args'][0] + '"' + \
' "' + '\\' + self.class_params['args'][1].format(
self.params['schema'],
self.params['table'],
self.params['columns'][0],
self.params['columns'][1],
self.params['filename'],
'\\' + self.params['quote']
) + '"'
self.assertEqual(cmd_obj._cmd, command)
db_mock.session.add.side_effect = db_session_add_mock
db_mock.session.commit = MagicMock(return_value=True)
get_server_details_mock.return_value = \
self.class_params['name'], \
self.class_params['host'], \
self.class_params['port']
args = self.class_params['args'][1].format(
self.params['schema'],
self.params['table'],
self.params['columns'][0],
self.params['columns'][1],
self.params['filename'],
self.params['quote']
)
import_export_obj = IEMessage(
*[self.class_params['args'][0], args],
**{
'sid': self.class_params['sid'],
'schema': self.params['schema'],
'table': self.params['table'],
'is_import': self.params['is_import'],
'database': self.params['database'],
'filename': self.params['filename'],
'storage': self.params['storage'],
}
)
p = BatchProcess(
desc=import_export_obj,
cmd=self.class_params['cmd'],
args=args
)
# Check that _create_process has been called
self.assertTrue(db_mock.session.add.called)
# Check start method
self._check_start(popen_mock, p, import_export_obj)
# Check list method
self._check_list(p, import_export_obj)
@patch('pgadmin.misc.bgprocess.processes.Process')
def _check_start(self, popen_mock, p, import_export_obj, process_mock):
class TestMockProcess():
def __init__(self, desc, args, cmd):
self.pid = 1
self.exit_code = 1
self.start_time = '2018-04-17 06:18:56.315445 +0000'
self.end_time = None
self.desc = dumps(desc)
self.arguments = " ".join(args)
self.command = cmd
self.acknowledge = None
self.process_state = 0
mock_result = process_mock.query.filter_by.return_value
mock_result.first.return_value = TestMockProcess(
import_export_obj, self.class_params['args'],
self.class_params['cmd'])
cmd_test = self.class_params['cmd']
assert_true = self.assertTrue
class PopenMockSideEffect():
def __init__(self, cmd, **kwargs):
assert_true(cmd_test in cmd)
assert_true('env' in kwargs)
# Need not to call the actual poll, so passing.
def poll(self):
pass
popen_mock.side_effect = PopenMockSideEffect
p.start()
self.assertTrue(popen_mock.called)
@patch('os.path.realpath')
@patch('pgadmin.misc.bgprocess.processes.get_storage_directory')
@patch('pgadmin.misc.bgprocess.processes.get_complete_file_path')
@patch('pgadmin.misc.bgprocess.processes.Process')
@patch('pgadmin.misc.bgprocess.processes.BatchProcess.'
'update_process_info')
def _check_list(self, p, import_export_obj, update_process_info_mock,
process_mock,
get_storage_directory_mock, get_complete_file_path_mock,
realpath_mock):
class TestMockProcess():
def __init__(self, desc, args, cmd):
self.pid = 1
self.exit_code = 1
self.start_time = '2018-04-17 06:18:56.315445 +0000'
self.end_time = None
self.desc = dumps(desc)
self.arguments = " ".join(args)
self.command = cmd
self.acknowledge = None
self.process_state = 0
process_mock.query.filter_by.return_value = [
TestMockProcess(import_export_obj,
self.class_params['args'],
self.class_params['cmd'])]
update_process_info_mock.return_value = [True, True]
get_complete_file_path_mock.return_value = self.params['filename']
realpath_mock.return_value = self.params['filename']
get_storage_directory_mock.return_value = '//'
ret_value = p.list()
self.assertEqual(1, len(ret_value))
self.assertTrue('details' in ret_value[0])
self.assertTrue('desc' in ret_value[0])

View File

@ -0,0 +1,149 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import os
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
import pgadmin.tools.import_export.tests.test_import_export_utils \
as import_export_utils
from pgadmin.utils import does_utility_exist
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
class ExportJobTest(BaseTestGenerator):
"""Export api test cases"""
import_export_url = '/import_export/job/{0}'
scenarios = [
('When exporting a table with the default options',
dict(
params=dict(
filename='test_import_export',
format='csv',
is_import=False,
delimiter="",
quote="\"",
escape="'",
database='',
columns=[],
icolumns=[],
schema="",
table=""
),
url=import_export_url,
expected_params=dict(
expected_cmd_opts=['--command', 'copy', 'TO'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
)
)),
('When exporting a table with binary, encoding, delimiter, quote',
dict(
params=dict(
filename='test_import_export_bin',
format='binary',
is_import=False,
encoding="LATIN1",
delimiter="|",
quote="'",
escape="'",
database='',
columns=[],
icolumns=[],
schema="",
table=""
),
url=import_export_url,
expected_params=dict(
expected_cmd_opts=['--command', 'copy', 'TO'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
)
)),
('When exporting a table with text, encoding, delimiter, quote',
dict(
params=dict(
filename='test_import_export_text',
format='text',
is_import=False,
encoding="ISO_8859_5",
delimiter="[tab]",
quote="\"",
escape="'",
database='',
columns=[],
icolumns=[],
schema="",
table=""
),
url=import_export_url,
expected_params=dict(
expected_cmd_opts=['--command', 'copy', 'TO'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
)
))
]
def setUp(self):
import_export_utils.setup_export_data(self)
self.params['database'] = self.db_name
self.params['schema'] = self.schema_name
self.params['table'] = self.table_name
self.params['columns'] = [self.column_name, self.column_name_1]
if 'default_binary_paths' not in self.server or \
self.server['default_binary_paths'] is None or \
self.server['type'] not in self.server['default_binary_paths'] or\
self.server['default_binary_paths'][self.server['type']] == '':
self.skipTest(
"default_binary_paths is not set for the server {0}".format(
self.server['name']
)
)
bin_p = self.server['default_binary_paths'][self.server['type']]
binary_path = os.path.join(bin_p, 'psql')
if os.name == 'nt':
binary_path = binary_path + '.exe'
ret_val = does_utility_exist(binary_path)
if ret_val is not None:
self.skipTest(ret_val)
def runTest(self):
self.server_id = parent_node_dict["server"][-1]["server_id"]
url = self.url.format(self.server_id)
# Create the import/export job
job_id = import_export_utils.create_import_export_job(self.tester,
url,
self.params,
self.assertEqual)
export_file = import_export_utils\
.run_import_export_job(self.tester, job_id, self.expected_params,
self.assertIn,
self.assertNotIn,
self.assertEqual
)
if export_file is not None and os.path.isfile(export_file):
os.remove(export_file)
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -0,0 +1,241 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import os
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
import pgadmin.tools.import_export.tests.test_import_export_utils \
as import_export_utils
from pgadmin.utils import does_utility_exist
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.tools.import_export.tests import \
test_import_export_utils as io_utils
class ImportJobTest(BaseTestGenerator):
"""Import api test cases"""
import_export_url = '/import_export/job/{0}'
scenarios = [
('When importing a table with the default options',
dict(
params=dict(
filename='test_import_export',
format='csv',
is_import=True,
delimiter="",
quote="\"",
escape="'",
database='',
columns=[],
icolumns=[],
schema="",
table=""
),
url=import_export_url,
expected_params=dict(
expected_cmd_opts=['--command', 'copy', 'FROM'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
),
export_options=dict(
params=dict(
filename='test_import_export',
format='csv',
is_import=False,
delimiter="",
quote="\"",
escape="'",
database='',
columns=[],
icolumns=[],
schema="",
table=""
),
url=import_export_url,
expected_params=dict(
expected_cmd_opts=['--command', 'copy', 'TO'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
)
)
)),
('When importing a table with binary, encoding, delimiter, quote',
dict(
params=dict(
filename='test_import_export_bin',
format='binary',
is_import=True,
delimiter="",
quote="\"",
escape="'",
database='',
columns=[],
icolumns=[],
schema="",
table=""
),
url=import_export_url,
expected_params=dict(
expected_cmd_opts=['--command', 'copy', 'FROM'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
),
export_options=dict(
params=dict(
filename='test_import_export_bin',
format='binary',
is_import=False,
encoding="LATIN1",
delimiter="|",
quote="'",
escape="'",
database='',
columns=[],
icolumns=[],
schema="",
table=""
),
url=import_export_url,
expected_params=dict(
expected_cmd_opts=['--command', 'copy', 'TO'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
)
)
)),
('When importing a table with text, encoding, delimiter, quote',
dict(
params=dict(
filename='test_import_export_text',
format='text',
is_import=True,
encoding="ISO_8859_5",
delimiter="[tab]",
quote="\"",
escape="'",
database='',
columns=[],
icolumns=[],
schema="",
table=""
),
url=import_export_url,
expected_params=dict(
expected_cmd_opts=['--command', 'copy', 'FROM'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
),
export_options=dict(
params=dict(
filename='test_import_export_text',
format='text',
is_import=False,
encoding="ISO_8859_5",
delimiter="[tab]",
quote="'",
escape="'",
database='',
columns=[],
icolumns=[],
schema="",
table=""
),
url=import_export_url,
expected_params=dict(
expected_cmd_opts=['--command', 'copy', 'TO'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
)
)
))
]
def setUp(self):
import_export_utils.setup_export_data(self)
self.export_options['params']['database'] = self.db_name
self.export_options['params']['schema'] = self.schema_name
self.export_options['params']['table'] = self.table_name
self.export_options['params']['columns'] = [self.column_name,
self.column_name_1]
self.params['database'] = self.db_name
self.params['schema'] = self.schema_name
self.params['table'] = self.table_name
self.params['columns'] = [self.column_name, self.column_name_1]
if 'default_binary_paths' not in self.server or \
self.server['default_binary_paths'] is None or \
self.server['type'] not in \
self.server['default_binary_paths'] or \
self.server['default_binary_paths'][self.server['type']] == '':
self.skipTest(
"default_binary_paths is not set for the server {0}".format(
self.server['name']
)
)
bin_p = self.server['default_binary_paths'][self.server['type']]
binary_path = os.path.join(bin_p, 'psql')
if os.name == 'nt':
binary_path = binary_path + '.exe'
ret_val = does_utility_exist(binary_path)
if ret_val is not None:
self.skipTest(ret_val)
def create_export(self):
url = self.export_options['url'].format(self.server_id)
job_id = io_utils.create_import_export_job(self.tester, url,
self.export_options[
'params'],
self.assertEqual)
self.export_file = io_utils.run_import_export_job(
self.tester,
job_id,
self.export_options['expected_params'],
self.assertIn,
self.assertNotIn,
self.assertEqual
)
def runTest(self):
self.server_id = parent_node_dict["server"][-1]["server_id"]
url = self.url.format(self.server_id)
self.create_export()
# Create the import/export job
job_id = import_export_utils.create_import_export_job(self.tester,
url,
self.params,
self.assertEqual)
import_file = import_export_utils\
.run_import_export_job(self.tester, job_id, self.expected_params,
self.assertIn,
self.assertNotIn,
self.assertEqual
)
if import_file is not None and os.path.isfile(import_file):
os.remove(import_file)
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -0,0 +1,353 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import simplejson as json
import os
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from pgadmin.utils import server_utils as server_utils, does_utility_exist
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from unittest.mock import patch, MagicMock
class IECreateJobTest(BaseTestGenerator):
"""Test the IECreateJob class"""
import_export_url = '/import_export/job/{0}'
scenarios = [
('When export file with default options',
dict(
class_params=dict(
sid=1,
name='test_export_server',
port=5444,
host='localhost',
database='postgres',
bfile='test_export',
username='postgres'
),
params=dict(
filename='test_export_file.csv',
format='csv',
is_import=False,
delimiter="",
quote="\"",
escape="'",
database='postgres',
columns=['test_col_1', 'test_col_2'],
icolumns=[],
schema="export_test_schema",
table="export_test_table"
),
url=import_export_url,
expected_cmd_opts=['--command', 'copy', 'TO',
'export_test_schema', 'export_test_table'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
)),
('When export file with csv file, header, delimiter=tab, '
'encoding=LATIN1',
dict(
class_params=dict(
sid=1,
name='test_export_server',
port=5444,
host='localhost',
database='postgres',
bfile='test_export',
username='postgres'
),
params=dict(
filename='test_export_file_01',
format="csv",
encoding="LATIN1",
header=True,
delimiter="[tab]",
quote="'",
escape="\"",
is_import=False,
database='postgres',
columns=['test_col_010', 'test_col_011'],
icolumns=[],
schema="test_schema_01",
table="export_test_table_01"
),
url=import_export_url,
expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_01',
'export_test_table_01', 'HEADER', 'DELIMITER',
'LATIN1'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
)),
('When export file with csv file, header, delimiter=tab, '
'encoding=LATIN1',
dict(
class_params=dict(
sid=1,
name='test_export_server',
port=5444,
host='localhost',
database='postgres',
bfile='test_export',
username='postgres'
),
params=dict(
filename='test_export_file_01',
format="csv",
encoding="LATIN1",
header=True,
delimiter="[tab]",
quote="'",
escape="\"",
is_import=False,
database='postgres',
columns=['test_col_010', 'test_col_011'],
icolumns=[],
schema="test_schema_01",
table="export_test_table_01"
),
url=import_export_url,
expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_01',
'export_test_table_01', 'HEADER', 'DELIMITER',
'LATIN1'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
)),
('When export file with binary file, oid, encoding=UTF8',
dict(
class_params=dict(
sid=1,
name='test_export_server',
port=5444,
host='localhost',
database='postgres',
bfile='test_export',
username='postgres'
),
params=dict(
filename='test_export_file_02',
format="binary",
encoding="UTF8",
oid=True,
delimiter="",
quote="\"",
escape="'",
is_import=False,
database='postgres',
columns=['test_col_020', 'test_col_021'],
icolumns=[],
schema="test_schema_02",
table="export_test_table_02"
),
server_max_version=119999,
skip_msg="OIDs not supported by EPAS/PG 12.0 and above.",
url=import_export_url,
expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_02',
'export_test_table_02', 'UTF8',
'OIDS'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
)),
('When export file with text file, delimiter=|, encoding=ISO_8859_6',
dict(
class_params=dict(
sid=1,
name='test_export_server',
port=5444,
host='localhost',
database='postgres',
bfile='test_export',
username='postgres'
),
params=dict(
filename='test_export_file_03',
format="text",
encoding="ISO_8859_6",
delimiter="|",
quote="\"",
escape="'",
is_import=False,
database='postgres',
columns=['test_col_030', 'test_col_031'],
icolumns=[],
schema="test_schema_03",
table="export_test_table_03"
),
url=import_export_url,
expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_03',
'export_test_table_03', 'DELIMITER',
'ISO_8859_6'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
)),
('When export file with binary file, delimiter=tab, '
'encoding=ISO_8859_6',
dict(
class_params=dict(
sid=1,
name='test_export_server',
port=5444,
host='localhost',
database='postgres',
bfile='test_export',
username='postgres'
),
params=dict(
filename='test_export_file_04',
format="binary",
encoding="ISO_8859_6",
quote="\"",
escape="'",
is_import=False,
database='postgres',
columns=['test_col_040', 'test_col_041'],
icolumns=[],
schema="test_schema_04",
table="export_test_table_04"
),
url=import_export_url,
expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_04',
'export_test_table_04',
'ISO_8859_6'],
not_expected_cmd_opts=['DELIMITER'],
expected_exit_code=[0, None]
)),
('When import file with default options',
dict(
class_params=dict(
sid=1,
name='test_export_server',
port=5444,
host='localhost',
database='postgres',
bfile='test_export',
username='postgres'
),
params=dict(
filename='test_import_file.csv',
format='csv',
is_import=True,
delimiter="",
quote="\"",
escape="'",
database='postgres',
columns=['test_col_1', 'test_col_2'],
icolumns=[],
schema="import_test_schema",
table="import_test_table"
),
url=import_export_url,
expected_cmd_opts=['--command', 'copy', 'FROM',
'import_test_schema', 'import_test_table'],
not_expected_cmd_opts=[],
expected_exit_code=[0, None]
)),
]
def setUp(self):
if 'default_binary_paths' not in self.server or \
self.server['default_binary_paths'] is None or \
self.server['type'] not in self.server['default_binary_paths'] or \
self.server['default_binary_paths'][self.server['type']] == '':
self.skipTest(
"default_binary_paths is not set for the server {0}".format(
self.server['name']
)
)
bin_p = self.server['default_binary_paths'][self.server['type']]
binary_path = os.path.join(bin_p, 'psql')
if os.name == 'nt':
binary_path = binary_path + '.exe'
ret_val = does_utility_exist(binary_path)
if ret_val is not None:
self.skipTest(ret_val)
@patch('pgadmin.tools.import_export.Server')
@patch('pgadmin.tools.import_export.IEMessage')
@patch('pgadmin.tools.import_export.filename_with_file_manager_path')
@patch('pgadmin.tools.import_export.BatchProcess')
@patch('pgadmin.utils.driver.psycopg2.server_manager.ServerManager.'
'export_password_env')
def runTest(self, export_password_env_mock, batch_process_mock,
filename_mock, ie_message_mock, server_mock):
class TestMockServer():
def __init__(self, name, host, port, id, username,
maintenance_db):
self.name = name
self.host = host
self.port = port
self.id = id
self.username = username
self.maintenance_db = maintenance_db
self.server_id = parent_node_dict["server"][-1]["server_id"]
mock_obj = TestMockServer(self.class_params['name'],
self.class_params['host'],
self.class_params['port'],
self.server_id,
self.class_params['username'],
self.class_params['database']
)
mock_result = server_mock.query.filter_by.return_value
mock_result.first.return_value = mock_obj
filename_mock.return_value = self.params['filename']
batch_process_mock.set_env_variables = MagicMock(
return_value=True
)
batch_process_mock.start = MagicMock(
return_value=True
)
export_password_env_mock.return_value = True
server_response = server_utils.connect_server(self, self.server_id)
if server_response["info"] == "Server connected.":
db_owner = server_response['data']['user']['name']
self.data = database_utils.get_db_data(db_owner)
if hasattr(self, 'server_max_version') \
and server_response["data"]["version"] > self.\
server_max_version:
self.skipTest(self.skip_msg)
url = self.url.format(self.server_id)
# Create the import/export job
response = self.tester.post(url,
data=json.dumps(self.params),
content_type='html/json')
self.assertEqual(response.status_code, 200)
self.assertTrue(ie_message_mock.called)
self.assertTrue(batch_process_mock.called)
if self.expected_cmd_opts:
for opt in self.expected_cmd_opts:
arg = repr(batch_process_mock.call_args_list[0][1]['args'])
self.assertIn(
opt,
arg
)
if self.not_expected_cmd_opts:
for opt in self.not_expected_cmd_opts:
arg = repr(batch_process_mock.call_args_list[0][1]['args'])
self.assertNotIn(
opt,
arg
)

View File

@ -0,0 +1,135 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
from pgadmin.tools.import_export import IEMessage
from pgadmin.utils.route import BaseTestGenerator
from unittest.mock import patch
import config
class IEMessageTest(BaseTestGenerator):
"""Test the IEMessage class"""
scenarios = [
('When Export table with default options',
dict(
class_params=dict(
sid=1,
schema='public',
name='test_export',
is_import=False,
port=5444,
host='localhost',
database='postgres',
server='postgres x',
filename='/test_export_file.csv',
storage='/',
table='test_table',
cmd="/test_path",
args=[
'--command',
'\\copy public.test_table (m_id) TO '
'\'/test_path/text_export.csv\' CSV '
'QUOTE \'"\' ESCAPE \'\'\'\';'
]
),
expected_msg="Copying table data '{0}.{1}' on "
"database '{2}' and server ({3}:{4})",
expected_storage_dir='/'
)),
('When Export table with folder path',
dict(
class_params=dict(
sid=1,
schema='public',
name='test_export',
is_import=False,
port=5444,
host='localhost',
database='postgres',
server='postgres x',
filename='/test_path/test_export_file.csv',
storage='/',
table='test_table',
cmd="/test_path",
args=[
'--command',
'\\copy public.test_table (m_id) TO '
'\'/test_path/text_export.csv\' CSV '
'QUOTE \'"\' ESCAPE \'\'\'\';'
]
),
expected_msg="Copying table data '{0}.{1}' on "
"database '{2}' and server ({3}:{4})",
expected_storage_dir='/test_path'
)),
]
@patch('os.path.realpath')
@patch('pgadmin.misc.bgprocess.processes.get_storage_directory')
@patch('pgadmin.misc.bgprocess.processes.get_complete_file_path')
@patch('pgadmin.tools.import_export.IEMessage.get_server_details')
def runTest(self, get_server_details_mock,
get_complete_file_path_mock,
get_storage_directory_mock,
realpath_mock):
name = self.class_params['name']
host = self.class_params['host']
port = self.class_params['port']
get_server_details_mock.return_value = name, host, port
get_complete_file_path_mock.return_value \
= self.class_params['filename']
realpath_mock.return_value = self.class_params['filename']
get_storage_directory_mock.return_value = '//'
import_export_obj = IEMessage(
*self.class_params['args'],
**{
'sid': self.class_params['sid'],
'schema': self.class_params['schema'],
'table': self.class_params['table'],
'is_import': self.class_params['is_import'],
'database': self.class_params['database'],
'filename': self.class_params['filename'],
'storage': self.class_params['storage'],
}
)
expected_msg = self.expected_msg.format(
self.class_params['schema'],
self.class_params['table'],
self.class_params['database'],
self.class_params['host'],
self.class_params['port']
)
# Check the expected message returned
self.assertEqual(import_export_obj.message, expected_msg)
# Check the command
obj_details = import_export_obj.details(self.class_params['cmd'],
self.class_params['args'])
self.assertIn(self.class_params['schema'], obj_details)
self.assertIn(self.class_params['table'], obj_details)
self.assertIn(self.class_params['database'], obj_details)
self.assertIn(self.class_params['host'], obj_details)
self.assertIn(str(self.class_params['port']), obj_details)
if config.SERVER_MODE is False:
self.skipTest(
"Skipping tests for Storage manager in Desktop mode."
)
else:
storage_dir = import_export_obj.current_storage_dir
self.assertEqual(self.expected_storage_dir, storage_dir)

View File

@ -0,0 +1,194 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import time
import random
import simplejson as json
import uuid
import re
from regression import parent_node_dict
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
import utils as tables_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from regression.python_test_utils import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.schemas.\
tables.columns.tests import utils as columns_utils
def create_import_export_job(tester, url, params, assert_equal):
# Create the import/export job
response = tester.post(url,
data=json.dumps(params),
content_type='html/json')
assert_equal(response.status_code, 200)
response_data = json.loads(response.data.decode('utf-8'))
job_id = response_data['data']['job_id']
return job_id
def run_import_export_job(tester, job_id, expected_params, assert_in,
assert_not_in, assert_equal):
cnt = 0
the_process = None
while True:
if cnt >= 5:
break
# Check the process list
response1 = tester.get('/misc/bgprocess/?_={0}'.format(
random.randint(1, 9999999)))
assert_equal(response1.status_code, 200)
process_list = json.loads(response1.data.decode('utf-8'))
try:
the_process = next(
p for p in process_list if p['id'] == job_id)
except Exception:
the_process = None
if the_process and 'execution_time' in the_process:
break
time.sleep(0.5)
cnt += 1
assert_equal('execution_time' in the_process, True)
assert_equal('stime' in the_process, True)
assert_equal('exit_code' in the_process, True)
assert_equal(the_process['exit_code'] in expected_params[
'expected_exit_code'
], True)
io_file = None
if 'details' in the_process:
io_det = the_process['details']
temp_io_det = io_det.upper()
if temp_io_det.find(' TO ') > 0:
io_file = temp_io_det[temp_io_det.find(' TO ') + 3:].split(' ')[1]
else:
from_find = temp_io_det.find(' FROM ') + 5
io_file = temp_io_det[from_find:].split(' ')[1]
if expected_params['expected_cmd_opts']:
for opt in expected_params['expected_cmd_opts']:
assert_in(opt, the_process['details'])
if expected_params['not_expected_cmd_opts']:
for opt in expected_params['not_expected_cmd_opts']:
assert_not_in(opt, the_process['details'])
# Check the process details
p_details = tester.get('/misc/bgprocess/{0}?_={1}'.format(
job_id, random.randint(1, 9999999))
)
assert_equal(p_details.status_code, 200)
p_details = tester.get('/misc/bgprocess/{0}/{1}/{2}/?_={3}'.format(
job_id, 0, 0, random.randint(1, 9999999))
)
assert_equal(p_details.status_code, 200)
p_details_data = json.loads(p_details.data.decode('utf-8'))
cnt = 0
# Retrieve the io job process logs
while True:
out, err, status = get_params(p_details_data)
if status or cnt >= 5:
break
p_details = tester.get(
'/misc/bgprocess/{0}/{1}/{2}/?_={3}'.format(
job_id, out, err, random.randint(1, 9999999))
)
assert_equal(p_details.status_code, 200)
p_details_data = json.loads(p_details.data.decode('utf-8'))
cnt += 1
time.sleep(1)
# Check the job is complete.
io_ack = tester.put('/misc/bgprocess/{0}'.format(job_id))
assert_equal(io_ack.status_code, 200)
io_ack_res = json.loads(io_ack.data.decode('utf-8'))
assert_equal(io_ack_res['success'], 1)
return io_file
def get_params(data):
out = 0
out_done = False
err = 0
err_done = False
if 'out' in data:
out = data['out'] and data['out']['pos']
if 'done' in data['out']:
out_done = data['out']['done']
if 'err' in data:
err = data['err'] and data['err']['pos']
if 'done' in data['err']:
err_done = data['err']['done']
return out, err, (out_done and err_done)
def setup_export_data(sobject):
# Create db connection
sobject.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
sobject.server_id = schema_info["server_id"]
sobject.db_id = schema_info["db_id"]
db_con = database_utils.connect_database(sobject, utils.SERVER_GROUP,
sobject.server_id, sobject.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add a table.")
# Create schema
sobject.schema_id = schema_info["schema_id"]
sobject.schema_name = schema_info["schema_name"]
schema_response = schema_utils.verify_schemas(sobject.server,
sobject.db_name,
sobject.schema_name)
if not schema_response:
raise Exception("Could not find the schema to add a table.")
# Create table
sobject.table_name = "table_to_export_%s" % (str(uuid.uuid4())[1:8])
sobject.table_id = tables_utils.create_table(sobject.server,
sobject.db_name,
sobject.schema_name,
sobject.table_name)
# Create column
sobject.column_name = "column_to_export_%s" % (str(uuid.uuid4())[1:8])
sobject.column_id = columns_utils.create_column(sobject.server,
sobject.db_name,
sobject.schema_name,
sobject.table_name,
sobject.column_name)
# Create column
sobject.column_name_1 = "column_to_export_%s" % (str(uuid.uuid4())[1:8])
sobject.column_id_1 = columns_utils.create_column(sobject.server,
sobject.db_name,
sobject.schema_name,
sobject.table_name,
sobject.column_name_1)
return None

View File

@ -0,0 +1,75 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""A blueprint module implementing the storage manager functionality"""
import simplejson as json
import os
from flask import url_for, Response, render_template, request, current_app
from flask_babelex import gettext as _
from flask_security import login_required, current_user
from pgadmin.misc.bgprocess.processes import BatchProcess, IProcessDesc
from pgadmin.utils import PgAdminModule, get_storage_directory, html, \
fs_short_path, document_dir, IS_WIN, does_utility_exist
from pgadmin.utils.ajax import make_json_response, bad_request
from config import PG_DEFAULT_DRIVER
from pgadmin.model import Server
from pgadmin.utils.constants import MIMETYPE_APP_JS
MODULE_NAME = 'storage_manager'
class StorageManagerModule(PgAdminModule):
"""
class StorageManagerModule(PgAdminModule)
A module class for manipulating file operation which is derived from
PgAdminModule.
"""
LABEL = _('Storage Manager')
def get_own_javascripts(self):
""""
Returns:
list: js files used by this module
"""
scripts = list()
for name, script in [
['pgadmin.tools.storage_manager', 'js/storage_manager']
]:
scripts.append({
'name': name,
'path': url_for('storage_manager.index') + script,
'when': None
})
return scripts
blueprint = StorageManagerModule(MODULE_NAME, __name__)
@blueprint.route("/")
@login_required
def index():
return bad_request(errormsg=_("This URL cannot be called directly."))
@blueprint.route("/js/storage_manager.js")
@login_required
def script():
"""render the import/export javascript file"""
return Response(
response=render_template("storage_manager/js/storage_manager.js", _=_),
status=200,
mimetype=MIMETYPE_APP_JS
)

View File

@ -0,0 +1,93 @@
/////////////////////////////////////////////////////////////
//
// pgAdmin 4 - PostgreSQL Tools
//
// Copyright (C) 2013 - 2020, The pgAdmin Development Team
// This software is released under the PostgreSQL Licence
//
//////////////////////////////////////////////////////////////
import { set_last_traversed_dir, getTransId } from '../../../../misc/file_manager/static/js/helpers';
define([
'sources/gettext', 'sources/url_for', 'jquery', 'underscore', 'pgadmin.alertifyjs',
'sources/pgadmin', 'pgadmin.browser', 'sources/csrf', 'pgadmin.file_manager',
], function (
gettext, url_for, $, _, alertify, pgAdmin, pgBrowser, csrfToken
) {
pgAdmin = pgAdmin || window.pgAdmin || {};
var isServerMode = (function() { return pgAdmin.server_mode == 'True'; })();
var pgTools = pgAdmin.Tools = pgAdmin.Tools || {};
if(!isServerMode) {
return;
}
// Return back, this has been called more than once
if (pgAdmin.Tools.storage_manager)
return pgAdmin.Tools.storage_manager;
pgTools.storage_manager = {
init: function () {
// We do not want to initialize the module multiple times.
if (this.initialized)
return;
this.initialized = true;
csrfToken.setPGCSRFToken(pgAdmin.csrf_token_header, pgAdmin.csrf_token);
var storage_manager = this.callback_storage_manager.bind(this);
pgBrowser.Events.on(
'pgadmin:tools:storage_manager', storage_manager
);
// Define the nodes on which the menus to be appear
var menus = [{
name: 'storage_manager',
module: this,
applies: ['tools'],
callback: 'callback_storage_manager',
priority: 2,
label: gettext('Storage Manager...'),
enable: true,
}];
pgBrowser.add_menus(menus);
},
/*
Open the dialog for the storage functionality
*/
callback_storage_manager: function (path) {
var params = {
supported_types: ['sql', 'csv', '*'],
dialog_type: 'storage_dialog',
dialog_title: 'Storage Manager...',
btn_primary: undefined,
};
if (!_.isUndefined(path) && !_.isNull(path) && !_.isEmpty(path)) {
var transId = getTransId(JSON.stringify(params));
var t_res;
if (transId.readyState == 4) {
t_res = JSON.parse(transId.responseText);
}
var trans_id = _.isUndefined(t_res) ? 0 : t_res.data.fileTransId;
set_last_traversed_dir({'path': path}, trans_id);
pgAdmin.FileManager.init();
pgAdmin.FileManager.show_dialog(params);
}
else {
pgAdmin.FileManager.init();
pgAdmin.FileManager.show_dialog(params);
}
},
};
return pgAdmin.Tools.storage_manager;
});

View File

@ -149,4 +149,22 @@ describe('fileSelectDialog', function () {
expect(Alertify.createModeDlg).toHaveBeenCalled();
});
});
describe('When dialog is called for storage file', () => {
it('Storage file dialog', function() {
params = {
'dialog_title': 'Storage Manager',
'dialog_type': 'storage_dialog',
};
spyOn(Alertify, 'fileStorageDlg').and.callFake(function() {
this.resizeTo = function() {};
return this;
});
pgAdmin.FileManager.show_dialog(params);
expect(Alertify.fileStorageDlg).toHaveBeenCalled();
});
});
});

View File

@ -500,6 +500,7 @@ module.exports = [{
',pgadmin.tools.debugger.direct' +
',pgadmin.node.pga_job' +
',pgadmin.tools.schema_diff' +
',pgadmin.tools.storage_manager' +
',pgadmin.tools.search_objects',
},
}, {

View File

@ -274,6 +274,7 @@ var webpackShimConfig = {
'pgadmin.tools.schema_diff': path.join(__dirname, './pgadmin/tools/schema_diff/static/js/schema_diff'),
'pgadmin.tools.schema_diff_ui': path.join(__dirname, './pgadmin/tools/schema_diff/static/js/schema_diff_ui'),
'pgadmin.tools.search_objects': path.join(__dirname, './pgadmin/tools/search_objects/static/js/search_objects'),
'pgadmin.tools.storage_manager': path.join(__dirname, './pgadmin/tools/storage_manager/static/js/storage_manager'),
'pgadmin.search_objects': path.join(__dirname, './pgadmin/tools/search_objects/static/js'),
'pgadmin.tools.user_management': path.join(__dirname, './pgadmin/tools/user_management/static/js/user_management'),
'pgadmin.user_management.current_user': '/user_management/current_user',