mirror of
https://github.com/pgadmin-org/pgadmin4.git
synced 2025-02-20 11:48:31 -06:00
Fix saving of query output as CSV data. Fixes #1405
This commit is contained in:
parent
a6024fc49a
commit
83a1535f89
@ -182,6 +182,7 @@
|
||||
</div>
|
||||
</div>
|
||||
<div id="editor-panel"></div>
|
||||
<iframe id="download-csv" style="display:none"></iframe>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
@ -1215,3 +1215,58 @@ def save_file():
|
||||
'status': True,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@blueprint.route('/query_tool/download/<int:trans_id>', methods=["GET"])
|
||||
@login_required
|
||||
def start_query_download_tool(trans_id):
|
||||
sync_conn = None
|
||||
status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id)
|
||||
|
||||
if status and conn is not None \
|
||||
and trans_obj is not None and session_obj is not None:
|
||||
|
||||
data = request.args if request.args else None
|
||||
try:
|
||||
if data and 'query' in data:
|
||||
sql = data['query']
|
||||
conn_id = str(random.randint(1, 9999999))
|
||||
sync_conn = conn.manager.connection(
|
||||
did=trans_obj.did,
|
||||
conn_id=conn_id,
|
||||
auto_reconnect=False,
|
||||
async=False
|
||||
)
|
||||
|
||||
sync_conn.connect(autocommit=False)
|
||||
|
||||
# This returns generator of records.
|
||||
status, gen = sync_conn.execute_on_server_as_csv(sql, records=2000)
|
||||
|
||||
if not status:
|
||||
conn.manager.release(conn_id=conn_id, did=trans_obj.did)
|
||||
return internal_server_error(errormsg=str(gen))
|
||||
|
||||
def cleanup():
|
||||
conn.manager.connections[sync_conn.conn_id]._release()
|
||||
del conn.manager.connections[sync_conn.conn_id]
|
||||
|
||||
r = Response(gen(), mimetype='text/csv')
|
||||
|
||||
if 'filename' in data and data['filename'] != "":
|
||||
filename = data['filename']
|
||||
else:
|
||||
import time
|
||||
filename = str(int(time.time())) + ".csv"
|
||||
|
||||
r.headers["Content-Disposition"] = "attachment;filename={0}".format(filename)
|
||||
|
||||
r.call_on_close(cleanup)
|
||||
|
||||
return r
|
||||
|
||||
except Exception as e:
|
||||
conn.manager.release(conn_id=conn_id, did=trans_obj.did)
|
||||
return internal_server_error(errormsg=str(e))
|
||||
else:
|
||||
return internal_server_error(errormsg=gettext("Transaction status check failed."))
|
||||
|
@ -2602,76 +2602,63 @@ define(
|
||||
|
||||
// This function will download the grid data as CSV file.
|
||||
_download: function() {
|
||||
var self = this;
|
||||
var coll = self.collection.fullCollection === undefined ? self.collection : self.collection.fullCollection;
|
||||
var self = this,
|
||||
selected_code = self.gridView.query_tool_obj.getSelection(),
|
||||
sql = "";
|
||||
|
||||
if (self.columns != undefined &&
|
||||
coll != undefined &&
|
||||
coll.length > 0)
|
||||
{
|
||||
var csv_col = _.indexBy(self.columns, 'name'),
|
||||
labels = _.pluck(self.columns, 'label'),
|
||||
keys = _.pluck(self.columns, 'name');
|
||||
if (selected_code.length > 0)
|
||||
sql = selected_code;
|
||||
else
|
||||
sql = self.gridView.query_tool_obj.getValue();
|
||||
|
||||
// Fetch the items from fullCollection and convert it as csv format
|
||||
var csv = keys.join(',') + '\n';
|
||||
csv += coll.map(function(item) {
|
||||
return _.map(keys, function(key) {
|
||||
var cell = csv_col [key].cell,
|
||||
// suppose you want to preserve custom formatters
|
||||
formatter = cell.prototype && cell.prototype.formatter;
|
||||
// If it is an empty query, do nothing.
|
||||
if (sql.length <= 0) return;
|
||||
|
||||
return formatter && formatter.fromRaw ?
|
||||
formatter.fromRaw(item.get(key), item) : item.get(key);
|
||||
}).join(',');
|
||||
}).join('\n');
|
||||
|
||||
// Download the file.
|
||||
var encodedUri = encodeURI('data:text/csv&charset=utf-8&filename=download.csv&value=' + csv),
|
||||
link = document.createElement('a');
|
||||
link.setAttribute('href', encodedUri);
|
||||
|
||||
/* If download is from view data then file name should be
|
||||
* the object name for which data is to be displayed.
|
||||
*/
|
||||
if (!self.is_query_tool) {
|
||||
$.ajax({
|
||||
url: "{{ url_for('sqleditor.index') }}" + "object/get/" + self.transId,
|
||||
method: 'GET',
|
||||
success: function(res) {
|
||||
if (res.data.status) {
|
||||
filename = res.data.result + '.csv';
|
||||
link.setAttribute('download', filename);
|
||||
link.click();
|
||||
}
|
||||
},
|
||||
error: function(e) {
|
||||
if (e.readyState == 0) {
|
||||
alertify.alert('Get Object Name Error',
|
||||
'{{ _('Not connected to the server or the connection to the server has been closed.') }}'
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
var msg = e.responseText;
|
||||
if (e.responseJSON != undefined &&
|
||||
e.responseJSON.errormsg != undefined)
|
||||
msg = e.responseJSON.errormsg;
|
||||
|
||||
alertify.alert('Get Object Name Error', msg);
|
||||
/* If download is from view data then file name should be
|
||||
* the object name for which data is to be displayed.
|
||||
*/
|
||||
if (!self.is_query_tool) {
|
||||
$.ajax({
|
||||
url: "{{ url_for('sqleditor.index') }}" + "object/get/" + self.transId,
|
||||
method: 'GET',
|
||||
success: function(res) {
|
||||
if (res.data.status) {
|
||||
filename = res.data.result + '.csv';
|
||||
self._trigger_csv_download(sql, filename);
|
||||
}
|
||||
},
|
||||
error: function(e) {
|
||||
if (e.readyState == 0) {
|
||||
alertify.alert('Get Object Name Error',
|
||||
'{{ _('Not connected to the server or the connection to the server has been closed.') }}'
|
||||
);
|
||||
return;
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
var cur_time = new Date();
|
||||
var filename = 'data-' + cur_time.getTime() + '.csv';
|
||||
link.setAttribute('download', filename);
|
||||
link.click();
|
||||
}
|
||||
}
|
||||
else {
|
||||
alertify.alert('Download Data', 'No data is available to download');
|
||||
}
|
||||
|
||||
var msg = e.responseText;
|
||||
if (e.responseJSON != undefined &&
|
||||
e.responseJSON.errormsg != undefined)
|
||||
msg = e.responseJSON.errormsg;
|
||||
|
||||
alertify.alert('Get Object Name Error', msg);
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
var cur_time = new Date();
|
||||
var filename = 'data-' + cur_time.getTime() + '.csv';
|
||||
self._trigger_csv_download(sql, filename);
|
||||
}
|
||||
|
||||
},
|
||||
// Trigger query result download to csv.
|
||||
_trigger_csv_download: function(query, filename) {
|
||||
var self = this,
|
||||
link = $(this.container).find("#download-csv"),
|
||||
url = "{{ url_for('sqleditor.index') }}" + "query_tool/download/" + self.transId;
|
||||
|
||||
url +="?" + $.param({query:query, filename:filename});
|
||||
link.attr("src", url);
|
||||
},
|
||||
|
||||
_auto_rollback: function() {
|
||||
|
@ -17,6 +17,8 @@ import datetime
|
||||
import os
|
||||
import random
|
||||
import select
|
||||
import sys
|
||||
import csv
|
||||
|
||||
import psycopg2
|
||||
import psycopg2.extras
|
||||
@ -32,6 +34,11 @@ from .keywords import ScanKeyword
|
||||
from ..abstract import BaseDriver, BaseConnection
|
||||
from .cursor import DictCursor
|
||||
|
||||
if sys.version_info < (3,):
|
||||
from StringIO import StringIO
|
||||
else:
|
||||
from io import StringIO
|
||||
|
||||
_ = gettext
|
||||
|
||||
ASYNC_WAIT_TIMEOUT = 0.01 # in seconds or 10 milliseconds
|
||||
@ -284,7 +291,10 @@ Failed to connect to the database server(#{server_id}) for connection ({conn_id}
|
||||
# autocommit flag does not work with asynchronous connections.
|
||||
# By default asynchronous connection runs in autocommit mode.
|
||||
if self.async == 0:
|
||||
self.conn.autocommit = True
|
||||
if 'autocommit' in kwargs and kwargs['autocommit'] == False:
|
||||
self.conn.autocommit = False
|
||||
else:
|
||||
self.conn.autocommit = True
|
||||
register_date_typecasters(self.conn)
|
||||
|
||||
status, res = self.execute_scalar("""
|
||||
@ -384,11 +394,12 @@ WHERE
|
||||
|
||||
return True, None
|
||||
|
||||
def __cursor(self):
|
||||
def __cursor(self, server_cursor=False):
|
||||
cur = getattr(g, str(self.manager.sid) + '#' + self.conn_id, None)
|
||||
|
||||
if self.connected() and cur and not cur.closed:
|
||||
return True, cur
|
||||
if not server_cursor or (server_cursor and cur.name):
|
||||
return True, cur
|
||||
|
||||
if not self.connected():
|
||||
status = False
|
||||
@ -419,7 +430,13 @@ Attempt to reconnect failed with the error:
|
||||
return False, msg
|
||||
|
||||
try:
|
||||
cur = self.conn.cursor(cursor_factory=DictCursor)
|
||||
if server_cursor:
|
||||
# Providing name to cursor will create server side cursor.
|
||||
cursor_name = "CURSOR:{0}".format(self.conn_id)
|
||||
cur = self.conn.cursor(name=cursor_name,
|
||||
cursor_factory=DictCursor)
|
||||
else:
|
||||
cur = self.conn.cursor(cursor_factory=DictCursor)
|
||||
except psycopg2.Error as pe:
|
||||
errmsg = gettext("""
|
||||
Failed to create cursor for psycopg2 connection with error message for the \
|
||||
@ -471,6 +488,76 @@ Attempt to reconnect it failed with the error:
|
||||
if self.async == 1:
|
||||
self._wait(cur.connection)
|
||||
|
||||
|
||||
def execute_on_server_as_csv(self, query, params=None, formatted_exception_msg=False, records=2000):
|
||||
status, cur = self.__cursor(server_cursor=True)
|
||||
self.row_count = 0
|
||||
|
||||
if not status:
|
||||
return False, str(cur)
|
||||
query_id = random.randint(1, 9999999)
|
||||
|
||||
current_app.logger.log(25,
|
||||
"Execute (with server cursor) for server #{server_id} - {conn_id} (Query-id: {query_id}):\n{query}".format(
|
||||
server_id=self.manager.sid,
|
||||
conn_id=self.conn_id,
|
||||
query=query,
|
||||
query_id=query_id
|
||||
)
|
||||
)
|
||||
try:
|
||||
self.__internal_blocking_execute(cur, query, params)
|
||||
except psycopg2.Error as pe:
|
||||
cur.close()
|
||||
errmsg = self._formatted_exception_msg(pe, formatted_exception_msg)
|
||||
current_app.logger.error(
|
||||
"Failed to execute query ((with server cursor) for the server #{server_id} - {conn_id} (Query-id: {query_id}):\nError Message:{errmsg}".format(
|
||||
server_id=self.manager.sid,
|
||||
conn_id=self.conn_id,
|
||||
query=query,
|
||||
errmsg=errmsg,
|
||||
query_id=query_id
|
||||
)
|
||||
)
|
||||
return False, errmsg
|
||||
|
||||
def gen():
|
||||
|
||||
results = cur.fetchmany(records)
|
||||
if not results:
|
||||
if not cur.closed:
|
||||
cur.close()
|
||||
return
|
||||
|
||||
header = [c.to_dict()['name'] for c in cur.ordered_description()]
|
||||
|
||||
res_io = StringIO()
|
||||
|
||||
csv_writer = csv.DictWriter(
|
||||
res_io, fieldnames=header, delimiter=str(','), quoting=csv.QUOTE_NONNUMERIC
|
||||
)
|
||||
csv_writer.writeheader()
|
||||
csv_writer.writerows(results)
|
||||
|
||||
yield res_io.getvalue().strip(str('\r\n'))
|
||||
|
||||
while True:
|
||||
results = cur.fetchmany(records)
|
||||
|
||||
if not results:
|
||||
if not cur.closed:
|
||||
cur.close()
|
||||
break
|
||||
res_io = StringIO()
|
||||
|
||||
csv_writer = csv.DictWriter(
|
||||
res_io, fieldnames=header, delimiter=str(','), quoting=csv.QUOTE_NONNUMERIC
|
||||
)
|
||||
csv_writer.writerows(results)
|
||||
yield res_io.getvalue().strip(str('\r\n'))
|
||||
|
||||
return True, gen
|
||||
|
||||
def execute_scalar(self, query, params=None, formatted_exception_msg=False):
|
||||
status, cur = self.__cursor()
|
||||
self.row_count = 0
|
||||
@ -1151,7 +1238,8 @@ class ServerManager(object):
|
||||
raise Exception("Information is not available.")
|
||||
|
||||
def connection(
|
||||
self, database=None, conn_id=None, auto_reconnect=True, did=None
|
||||
self, database=None, conn_id=None, auto_reconnect=True, did=None,
|
||||
async=None
|
||||
):
|
||||
msg_active_conn = gettext(
|
||||
"Server has no active connection. Please connect to the server."
|
||||
@ -1197,7 +1285,10 @@ WHERE db.oid = {0}""".format(did))
|
||||
if my_id in self.connections:
|
||||
return self.connections[my_id]
|
||||
else:
|
||||
async = 1 if conn_id is not None else 0
|
||||
if async is None:
|
||||
async = 1 if conn_id is not None else 0
|
||||
else:
|
||||
async = 1 if async is True else 0
|
||||
self.connections[my_id] = Connection(
|
||||
self, my_id, database, auto_reconnect, async
|
||||
)
|
||||
|
Loading…
Reference in New Issue
Block a user