Make the Query Tool history persistent across sessions. Fixes #4017

This commit is contained in:
Dave Page
2019-03-13 13:37:34 +00:00
parent a502019e20
commit ab9a3a57ad
16 changed files with 581 additions and 66 deletions

View File

@@ -18,7 +18,7 @@ import simplejson as json
from flask import Response, url_for, render_template, session, request, \
current_app
from flask_babelex import gettext
from flask_security import login_required
from flask_security import login_required, current_user
from config import PG_DEFAULT_DRIVER, ON_DEMAND_RECORD_COUNT
from pgadmin.misc.file_manager import Filemanager
@@ -42,6 +42,7 @@ from pgadmin.tools.sqleditor.utils.query_tool_preferences import \
from pgadmin.tools.sqleditor.utils.query_tool_fs_utils import \
read_file_generator
from pgadmin.tools.sqleditor.utils.filter_dialog import FilterDialog
from pgadmin.tools.sqleditor.utils.query_history import QueryHistory
MODULE_NAME = 'sqleditor'
@@ -113,7 +114,10 @@ class SqlEditorModule(PgAdminModule):
'sqleditor.query_tool_download',
'sqleditor.connection_status',
'sqleditor.get_filter_data',
'sqleditor.set_filter_data'
'sqleditor.set_filter_data',
'sqleditor.get_query_history',
'sqleditor.add_query_history',
'sqleditor.clear_query_history',
]
def register_preferences(self):
@@ -1504,3 +1508,64 @@ def set_filter_data(trans_id):
request=request,
trans_id=trans_id
)
@blueprint.route(
'/query_history/<int:trans_id>',
methods=["POST"], endpoint='add_query_history'
)
@login_required
def add_query_history(trans_id):
"""
This method adds to query history for user/server/database
Args:
sid: server id
did: database id
"""
status, error_msg, conn, trans_obj, session_ob = \
check_transaction_status(trans_id)
return QueryHistory.save(current_user.id, trans_obj.sid, conn.db,
request=request)
@blueprint.route(
'/query_history/<int:trans_id>',
methods=["DELETE"], endpoint='clear_query_history'
)
@login_required
def clear_query_history(trans_id):
"""
This method returns clears history for user/server/database
Args:
sid: server id
did: database id
"""
status, error_msg, conn, trans_obj, session_ob = \
check_transaction_status(trans_id)
return QueryHistory.clear(current_user.id, trans_obj.sid, conn.db)
@blueprint.route(
'/query_history/<int:trans_id>',
methods=["GET"], endpoint='get_query_history'
)
@login_required
def get_query_history(trans_id):
"""
This method returns query history for user/server/database
Args:
sid: server id
did: database id
"""
status, error_msg, conn, trans_obj, session_ob = \
check_transaction_status(trans_id)
return QueryHistory.get(current_user.id, trans_obj.sid, conn.db)

View File

@@ -193,11 +193,11 @@ define('tools.querytool', [
});
sql_panel.load(main_docker);
var sql_panel_obj = main_docker.addPanel('sql_panel', wcDocker.DOCK.TOP);
self.sql_panel_obj = main_docker.addPanel('sql_panel', wcDocker.DOCK.TOP);
var text_container = $('<textarea id="sql_query_tool" tabindex: "-1"></textarea>');
var output_container = $('<div id="output-panel" tabindex: "0"></div>').append(text_container);
sql_panel_obj.$container.find('.pg-panel-content').append(output_container);
self.sql_panel_obj.$container.find('.pg-panel-content').append(output_container);
self.query_tool_obj = CodeMirror.fromTextArea(text_container.get(0), {
tabindex: '0',
@@ -222,7 +222,7 @@ define('tools.querytool', [
// Refresh Code mirror on SQL panel resize to
// display its value properly
sql_panel_obj.on(wcDocker.EVENT.RESIZE_ENDED, function() {
self.sql_panel_obj.on(wcDocker.EVENT.RESIZE_ENDED, function() {
setTimeout(function() {
if (self && self.query_tool_obj) {
self.query_tool_obj.refresh();
@@ -312,8 +312,8 @@ define('tools.querytool', [
geometry_viewer.load(main_docker);
// Add all the panels to the docker
self.scratch_panel = main_docker.addPanel('scratch', wcDocker.DOCK.RIGHT, sql_panel_obj);
self.history_panel = main_docker.addPanel('history', wcDocker.DOCK.STACKED, sql_panel_obj);
self.scratch_panel = main_docker.addPanel('scratch', wcDocker.DOCK.RIGHT, self.sql_panel_obj);
self.history_panel = main_docker.addPanel('history', wcDocker.DOCK.STACKED, self.sql_panel_obj);
self.data_output_panel = main_docker.addPanel('data_output', wcDocker.DOCK.BOTTOM);
self.explain_panel = main_docker.addPanel('explain', wcDocker.DOCK.STACKED, self.data_output_panel);
self.messages_panel = main_docker.addPanel('messages', wcDocker.DOCK.STACKED, self.data_output_panel);
@@ -1309,13 +1309,51 @@ define('tools.querytool', [
if(!self.historyComponent) {
self.historyComponent = new QueryHistory($('#history_grid'), self.history_collection);
/* Copy query to query editor, set the focus to editor and move cursor to end */
self.historyComponent.onCopyToEditorClick((query)=>{
self.query_tool_obj.setValue(query);
self.sql_panel_obj.focus();
setTimeout(() => {
self.query_tool_obj.focus();
self.query_tool_obj.setCursor(self.query_tool_obj.lineCount(), 0);
}, 100);
});
self.historyComponent.render();
self.history_panel.off(wcDocker.EVENT.VISIBILITY_CHANGED);
self.history_panel.on(wcDocker.EVENT.VISIBILITY_CHANGED, function() {
if (self.history_panel.isVisible()) {
setTimeout(()=>{
self.historyComponent.focus();
}, 100);
}
});
}
self.history_panel.off(wcDocker.EVENT.VISIBILITY_CHANGED);
self.history_panel.on(wcDocker.EVENT.VISIBILITY_CHANGED, function() {
self.historyComponent.focus();
});
// Make ajax call to get history data except view/edit data
if(self.handler.is_query_tool) {
$.ajax({
url: url_for('sqleditor.get_query_history', {
'trans_id': self.handler.transId,
}),
method: 'GET',
contentType: 'application/json',
})
.done(function(res) {
res.data.result.map((entry) => {
let newEntry = JSON.parse(entry);
newEntry.start_time = new Date(newEntry.start_time);
self.history_collection.add(newEntry);
});
})
.fail(function() {
/* history fetch fail should not affect query tool */
});
} else {
self.historyComponent.setEditorPref({'copy_to_editor':false});
}
},
// Callback function for Add New Row button click.
@@ -1637,11 +1675,26 @@ define('tools.querytool', [
}
alertify.confirm(gettext('Clear history'),
gettext('Are you sure you wish to clear the history?'),
gettext('Are you sure you wish to clear the history?') + '</br>' +
gettext('This will remove all of your query history from this and other sessions for this database.'),
function() {
if (self.history_collection) {
self.history_collection.reset();
}
if(self.handler.is_query_tool) {
$.ajax({
url: url_for('sqleditor.clear_query_history', {
'trans_id': self.handler.transId,
}),
method: 'DELETE',
contentType: 'application/json',
})
.done(function() {})
.fail(function() {
/* history clear fail should not affect query tool */
});
}
setTimeout(() => { self.query_tool_obj.focus(); }, 200);
},
function() {
@@ -2573,14 +2626,34 @@ define('tools.querytool', [
self.query_start_time,
new Date());
}
self.gridView.history_collection.add({
let hist_entry = {
'status': status,
'start_time': self.query_start_time,
'query': self.query,
'row_affected': self.rows_affected,
'total_time': self.total_time,
'message': msg,
});
};
/* Make ajax call to save the history data
* Do not bother query tool if failed to save
* Not applicable for view/edit data
*/
if(self.is_query_tool) {
$.ajax({
url: url_for('sqleditor.add_query_history', {
'trans_id': self.transId,
}),
method: 'POST',
contentType: 'application/json',
data: JSON.stringify(hist_entry),
})
.done(function() {})
.fail(function() {});
}
self.gridView.history_collection.add(hist_entry);
}
},

View File

@@ -143,7 +143,7 @@
height: 0;
position: relative;
.copy-all, .was-copied {
.copy-all, .was-copied, .copy-to-editor {
float: left;
position: relative;
z-index: 10;

View File

@@ -0,0 +1,105 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2019, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import json
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression.python_test_utils import test_utils as utils
class TestEditorHistory(BaseTestGenerator):
""" This class will test the query tool polling. """
scenarios = [
('When first query is hit',
dict(
entry="""{
query: 'first sql statement',
start_time: '2017-05-03T14:03:15.150Z',
status: true,
row_affected: 12345,
total_time: '14 msec',
message: 'something important ERROR: message
from first sql query',
}""",
clear=False,
expected_len=1
)),
('When second query is hit',
dict(
entry="""{
query: 'second sql statement',
start_time: '2016-04-03T14:03:15.99Z',
status: true,
row_affected: 12345,
total_time: '14 msec',
message: 'something important ERROR: message from
second sql query',
}""",
clear=False,
expected_len=2
)),
('When cleared',
dict(
clear=True,
expected_len=0
))
]
def setUp(self):
""" This function will check messages return by query tool polling. """
database_info = parent_node_dict["database"][-1]
self.server_id = database_info["server_id"]
self.db_id = database_info["db_id"]
db_con = database_utils.connect_database(self,
utils.SERVER_GROUP,
self.server_id,
self.db_id)
if not db_con["info"] == "Database connected.":
raise Exception("Could not connect to the database.")
# Initialize query tool
url = '/datagrid/initialize/query_tool/{0}/{1}/{2}'.format(
utils.SERVER_GROUP, self.server_id, self.db_id)
response = self.tester.post(url)
self.assertEquals(response.status_code, 200)
response_data = json.loads(response.data.decode('utf-8'))
self.trans_id = response_data['data']['gridTransId']
def runTest(self):
url = '/sqleditor/query_history/{0}'.format(self.trans_id)
if not self.clear:
response = self.tester.post(url, data=self.entry)
self.assertEquals(response.status_code, 200)
response = self.tester.get(url)
self.assertEquals(response.status_code, 200)
response_data = json.loads(response.data.decode('utf-8'))
self.assertEquals(len(response_data['data']['result']),
self.expected_len)
else:
response = self.tester.delete(url)
self.assertEquals(response.status_code, 200)
response = self.tester.get(url)
self.assertEquals(response.status_code, 200)
response_data = json.loads(response.data.decode('utf-8'))
self.assertEquals(len(response_data['data']['result']),
self.expected_len)
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@@ -0,0 +1,137 @@
from pgadmin.utils.ajax import make_json_response
from pgadmin.model import db, QueryHistoryModel
from config import MAX_QUERY_HIST_STORED
class QueryHistory:
@staticmethod
def get(uid, sid, dbname):
result = db.session \
.query(QueryHistoryModel.query_info) \
.filter(QueryHistoryModel.uid == uid,
QueryHistoryModel.sid == sid,
QueryHistoryModel.dbname == dbname) \
.all()
return make_json_response(
data={
'status': True,
'msg': '',
'result': [rec.query_info for rec in result]
}
)
@staticmethod
def update_history_dbname(uid, sid, old_dbname, new_dbname):
try:
db.session \
.query(QueryHistoryModel) \
.filter(QueryHistoryModel.uid == uid,
QueryHistoryModel.sid == sid,
QueryHistoryModel.dbname == old_dbname) \
.update({QueryHistoryModel.dbname: new_dbname})
db.session.commit()
except Exception:
db.session.rollback()
# do not affect query execution if history clear fails
@staticmethod
def save(uid, sid, dbname, request):
try:
max_srno = db.session\
.query(db.func.max(QueryHistoryModel.srno)) \
.filter(QueryHistoryModel.uid == uid,
QueryHistoryModel.sid == sid,
QueryHistoryModel.dbname == dbname)\
.scalar()
# if no records present
if max_srno is None:
new_srno = 1
else:
new_srno = max_srno + 1
# last updated flag is used to recognise the last
# inserted/updated record.
# It is helpful to cycle the records
last_updated_rec = db.session.query(QueryHistoryModel) \
.filter(QueryHistoryModel.uid == uid,
QueryHistoryModel.sid == sid,
QueryHistoryModel.dbname == dbname,
QueryHistoryModel.last_updated_flag == 'Y') \
.first()
# there should be a last updated record
# if not present start from sr no 1
if last_updated_rec is not None:
last_updated_rec.last_updated_flag = 'N'
# if max limit reached then recycle
if new_srno > MAX_QUERY_HIST_STORED:
new_srno = (
last_updated_rec.srno % MAX_QUERY_HIST_STORED) + 1
else:
new_srno = 1
# if the limit is lowered and number of records present is
# more, then cleanup
if max_srno > MAX_QUERY_HIST_STORED:
db.session.query(QueryHistoryModel)\
.filter(QueryHistoryModel.uid == uid,
QueryHistoryModel.sid == sid,
QueryHistoryModel.dbname == dbname,
QueryHistoryModel.srno >
MAX_QUERY_HIST_STORED)\
.delete()
history_entry = QueryHistoryModel(
srno=new_srno, uid=uid, sid=sid, dbname=dbname,
query_info=request.data, last_updated_flag='Y')
db.session.merge(history_entry)
db.session.commit()
except Exception:
db.session.rollback()
# do not affect query execution if history saving fails
return make_json_response(
data={
'status': True,
'msg': 'Success',
}
)
@staticmethod
def clear_history(uid, sid, dbname=None):
try:
if dbname is not None:
db.session.query(QueryHistoryModel) \
.filter(QueryHistoryModel.uid == uid,
QueryHistoryModel.sid == sid,
QueryHistoryModel.dbname == dbname) \
.delete()
db.session.commit()
else:
db.session.query(QueryHistoryModel) \
.filter(QueryHistoryModel.uid == uid,
QueryHistoryModel.sid == sid)\
.delete()
db.session.commit()
except Exception:
db.session.rollback()
# do not affect query execution if history clear fails
@staticmethod
def clear(uid, sid, dbname=None):
QueryHistory.clear_history(uid, sid, dbname)
return make_json_response(
data={
'status': True,
'msg': 'Success',
}
)