Add support in query history to show internal queries generated by pgAdmin during save data operations. Fixes #4612

This commit is contained in:
Yosry Muhammad
2019-08-16 17:17:12 +05:30
committed by Akshay Joshi
parent 4403f326e9
commit 687204771c
17 changed files with 712 additions and 363 deletions

BIN
docs/en_US/images/query_output_history.png Executable file → Normal file

Binary file not shown.

Before

Width:  |  Height:  |  Size: 70 KiB

After

Width:  |  Height:  |  Size: 127 KiB

View File

@@ -211,6 +211,10 @@ The Query History tab displays information about recent commands:
* The amount of time it took the server to process the query and return a
result set.
* Messages returned by the server (not noted on the *Messages* tab).
* The source of the query (indicated by icons corresponding to the toolbar).
You can show or hide the queries generated internally by pgAdmin (during
'View/Edit Data' or 'Save Data' operations).
To erase the content of the *Query History* tab, select *Clear history* from
the *Clear* drop-down menu.

View File

@@ -17,6 +17,7 @@ New features
| `Issue #4566 <https://redmine.postgresql.org/issues/4566>`_ - Allow enhanced cookie protection to be disabled for compatibility with dynamically addressed hosting environments.
| `Issue #4570 <https://redmine.postgresql.org/issues/4570>`_ - Add an optimisation to the internal code responsible for searching for treeview nodes.
| `Issue #4574 <https://redmine.postgresql.org/issues/4574>`_ - Display the row count in the popup message when counting table rows, not just in the properties list.
| `Issue #4612 <https://redmine.postgresql.org/issues/4612>`_ - Add support in query history to show internal queries generated by pgAdmin during save data operations.
Housekeeping
************

View File

@@ -1,5 +1,6 @@
class QueryToolLocatorsCss:
btn_save_file = "#btn-save-file"
btn_save_data = "#btn-save-data"
btn_execute_query = "#btn-flash"
btn_query_dropdown = "#btn-query-dropdown"
btn_auto_rollback = "#btn-auto-rollback"
@@ -16,8 +17,16 @@ class QueryToolLocatorsCss:
btn_explain_timing = "#btn-explain-timing"
btn_clear_dropdown = "#btn-clear-dropdown"
btn_clear = "#btn-clear"
btn_commit = "#btn-commit"
query_editor_panel = "#output-panel"
query_history_selected = "#query_list .selected"
query_history_selected_icon = '#query_list .selected #query_source_icon'
query_history_detail = "#query_detail"
query_history_generated_queries_toggle = '#generated-queries-toggle'
editor_panel = "#output-panel"
query_messages_panel = ".sql-editor-message"
execute_icon = "fa-bolt"
explain_icon = "fa-hand-pointer-o"
explain_analyze_icon = "fa-list-alt"
save_data_icon = "icon-save-data-changes"
commit_icon = "icon-commit"

View File

@@ -70,6 +70,14 @@ class QueryToolJourneyTest(BaseFeatureTest):
self._test_history_tab()
print(" OK.", file=sys.stderr)
# Insert data into test editable table
self._insert_data_into_test_editable_table()
print("History query sources and generated queries toggle...",
file=sys.stderr, end="")
self._test_query_sources_and_generated_queries()
print(" OK.", file=sys.stderr)
print("Updatable resultsets...", file=sys.stderr, end="")
self._test_updatable_resultset()
print(" OK.", file=sys.stderr)
@@ -173,24 +181,49 @@ class QueryToolJourneyTest(BaseFeatureTest):
self._assert_clickable(query_we_need_to_scroll_to)
self.page.click_tab("Query Editor")
self.__clear_query_tool()
self.page.click_element(editor_input)
self.page.fill_codemirror_area_with("SELECT * FROM hats")
for _ in range(15):
self.page.find_by_css_selector(
QueryToolLocatorsCss.btn_execute_query).click()
self.page.wait_for_query_tool_loading_indicator_to_disappear()
def _test_query_sources_and_generated_queries(self):
self.__clear_query_history()
self._test_history_query_sources()
self._test_toggle_generated_queries()
self.page.click_tab("History")
query_we_need_to_scroll_to = self.page.find_by_xpath(
"//*[@id='query_list']/div/ul/li[17]"
def _test_history_query_sources(self):
self.page.click_tab("Query Editor")
self._execute_sources_test_queries()
self.page.click_tab("Query History")
history_entries_icons = [
QueryToolLocatorsCss.commit_icon,
QueryToolLocatorsCss.save_data_icon,
QueryToolLocatorsCss.save_data_icon,
QueryToolLocatorsCss.execute_icon,
QueryToolLocatorsCss.explain_analyze_icon,
QueryToolLocatorsCss.explain_icon
]
history_entries_queries = [
"COMMIT;",
"UPDATE public.%s SET normal_column = '10'::numeric "
"WHERE pk_column = '1';" % self.test_editable_table_name,
"BEGIN;",
"SELECT * FROM %s" % self.test_editable_table_name,
"SELECT * FROM %s" % self.test_editable_table_name,
"SELECT * FROM %s" % self.test_editable_table_name
]
self._check_history_queries_and_icons(history_entries_queries,
history_entries_icons)
def _test_toggle_generated_queries(self):
xpath = '//li[contains(@class, "pgadmin-query-history-entry")]'
self.assertTrue(self.page.check_if_element_exist_by_xpath(xpath))
toggle_el = self.page.find_by_xpath(
'//input[@id ="generated-queries-toggle"]/..'
)
for _ in range(17):
ActionChains(self.page.driver) \
.send_keys(Keys.ARROW_DOWN) \
.perform()
self._assert_clickable(query_we_need_to_scroll_to)
toggle_el.click()
self.assertFalse(self.page.check_if_element_exist_by_xpath(xpath))
toggle_el.click()
self.assertTrue(self.page.check_if_element_exist_by_xpath(xpath))
def _test_updatable_resultset(self):
if self.driver_version < 2.8:
@@ -198,13 +231,6 @@ class QueryToolJourneyTest(BaseFeatureTest):
self.page.click_tab("Query Editor")
# Insert data into test table
self.__clear_query_tool()
self._execute_query(
"INSERT INTO %s VALUES (1, 1), (2, 2);"
% self.test_editable_table_name
)
# Select all data (contains the primary key -> should be editable)
self.__clear_query_tool()
query = "SELECT pk_column, normal_column FROM %s" \
@@ -216,6 +242,85 @@ class QueryToolJourneyTest(BaseFeatureTest):
query = "SELECT normal_column FROM %s" % self.test_editable_table_name
self._check_query_results_editable(query, False)
def _execute_sources_test_queries(self):
self.__clear_query_tool()
self._explain_query(
"SELECT * FROM %s;"
% self.test_editable_table_name
)
self._explain_analyze_query(
"SELECT * FROM %s;"
% self.test_editable_table_name
)
self._execute_query(
"SELECT * FROM %s;"
% self.test_editable_table_name
)
# Turn off autocommit
query_options = self.page.find_by_css_selector(
QueryToolLocatorsCss.btn_query_dropdown)
query_options.click()
self.page.find_by_css_selector(
QueryToolLocatorsCss.btn_auto_commit).click()
query_options.click() # Click again to close dropdown
self._update_numeric_cell(2, 10)
self._commit_transaction()
# Turn on autocommit
query_options = self.page.find_by_css_selector(
QueryToolLocatorsCss.btn_query_dropdown)
query_options.click()
self.page.find_by_css_selector(
QueryToolLocatorsCss.btn_auto_commit).click()
query_options.click() # Click again to close dropdown
def _check_history_queries_and_icons(self, history_queries, history_icons):
# Select first query history entry
self.page.find_by_xpath("//*[@id='query_list']/div/ul/li[1]").click()
for icon, query in zip(history_icons, history_queries):
# Check query
query_history_selected_item = self.page.find_by_css_selector(
QueryToolLocatorsCss.query_history_selected
)
self.assertIn(query, query_history_selected_item.text)
# Check source icon
query_history_selected_icon = self.page.find_by_css_selector(
QueryToolLocatorsCss.query_history_selected_icon)
icon_classes = query_history_selected_icon.get_attribute('class')
icon_classes = icon_classes.split(" ")
self.assertTrue(icon in icon_classes)
# Move to next entry
ActionChains(self.page.driver) \
.send_keys(Keys.ARROW_DOWN) \
.perform()
def _update_numeric_cell(self, cell_index, value):
"""
Updates a numeric cell in the first row of the resultset
"""
xpath = '//div[contains(@class, "slick-row") and ' \
'contains(@style, "top:0px")]'
xpath += '/div[contains(@class, "slick-cell") and ' \
'contains(@class, "r' + str(cell_index) + '")]'
cell_el = self.page.find_by_xpath(xpath)
ActionChains(self.driver).double_click(cell_el).perform()
ActionChains(self.driver).send_keys(value). \
send_keys(Keys.ENTER).perform()
self.page.find_by_css_selector(
QueryToolLocatorsCss.btn_save_data).click()
def _insert_data_into_test_editable_table(self):
self.page.click_tab("Query Editor")
self.__clear_query_tool()
self._execute_query(
"INSERT INTO %s VALUES (1, 1), (2, 2);"
% self.test_editable_table_name
)
def __clear_query_tool(self):
self.page.click_element(
self.page.find_by_xpath("//*[@id='btn-clear-dropdown']")
@@ -228,6 +333,19 @@ class QueryToolJourneyTest(BaseFeatureTest):
)
self.page.click_modal('Yes')
def __clear_query_history(self):
self.page.click_element(
self.page.find_by_xpath("//*[@id='btn-clear-dropdown']")
)
ActionChains(self.driver)\
.move_to_element(
self.page.find_by_xpath(
"//*[@id='btn-clear-history']")).perform()
self.page.click_element(
self.page.find_by_xpath("//*[@id='btn-clear-history']")
)
self.page.click_modal('Yes')
def _navigate_to_query_tool(self):
self.page.toggle_open_tree_item(self.server['name'])
self.page.toggle_open_tree_item('Databases')
@@ -240,35 +358,48 @@ class QueryToolJourneyTest(BaseFeatureTest):
self.page.find_by_css_selector(
QueryToolLocatorsCss.btn_execute_query).click()
def _explain_query(self, query):
self.page.fill_codemirror_area_with(query)
self.page.find_by_css_selector(
QueryToolLocatorsCss.btn_explain).click()
def _explain_analyze_query(self, query):
self.page.fill_codemirror_area_with(query)
self.page.find_by_css_selector(
QueryToolLocatorsCss.btn_explain_analyze).click()
def _commit_transaction(self):
self.page.find_by_css_selector(
QueryToolLocatorsCss.btn_commit).click()
def _assert_clickable(self, element):
self.page.click_element(element)
def _check_query_results_editable(self, query, should_be_editable):
self._execute_query(query)
self.page.wait_for_spinner_to_disappear()
# Check if the first cell in the first row is editable
is_editable = self._check_cell_editable(1)
self.assertEqual(is_editable, should_be_editable)
# Check that new rows cannot be added
can_add_rows = self._check_can_add_row()
self.assertEqual(can_add_rows, should_be_editable)
def _check_cell_editable(self, cell_index):
xpath = '//div[contains(@class, "slick-cell") and ' \
'contains(@class, "r' + str(cell_index) + '")]'
"""
Checks if a cell in the first row of the resultset is editable
"""
xpath = '//div[contains(@class, "slick-row") and ' \
'contains(@style, "top:0px")]'
xpath += '/div[contains(@class, "slick-cell") and ' \
'contains(@class, "r' + str(cell_index) + '")]'
cell_el = self.page.find_by_xpath(xpath)
cell_classes = cell_el.get_attribute('class')
cell_classes = cell_classes.split(" ")
self.assertFalse('editable' in cell_classes)
# Get existing value
cell_value = int(cell_el.text)
new_value = cell_value + 1
# Try to update value
ActionChains(self.driver).double_click(cell_el).perform()
cell_classes = cell_el.get_attribute('class')
cell_classes = cell_classes.split(" ")
return 'editable' in cell_classes
def _check_can_add_row(self):
return self.page.check_if_element_exist_by_xpath(
'//div[contains(@class, "new-row")]')
ActionChains(self.driver).send_keys(new_value). \
send_keys(Keys.ENTER).perform()
# Check if the value was updated
return int(cell_el.text) == new_value
def after(self):
self.page.close_query_tool()

View File

@@ -304,7 +304,8 @@ CREATE TABLE public.nonintpkey
)
time.sleep(0.2)
self._update_cell(cell_xpath, data[str(idx)])
self.page.find_by_id("btn-save-data").click() # Save data
self.page.find_by_css_selector(
QueryToolLocatorsCss.btn_save_data).click()
# There should be some delay after save button is clicked, as it
# takes some time to complete save ajax call otherwise discard unsaved
# changes dialog will appear if we try to execute query before previous

View File

@@ -96,10 +96,13 @@ export default class QueryHistoryDetails {
updateQueryMetaData() {
let itemTemplate = (data, description) => {
return `<div class='item'>
<span class='value'>${data}</span>
<span class='description'>${description}</span>
</div>`;
if(data)
return `<div class='item'>
<span class='value'>${data}</span>
<span class='description'>${description}</span>
</div>`;
else
return '';
};
this.$metaData.empty().append(
@@ -134,8 +137,23 @@ export default class QueryHistoryDetails {
}
}
updateInfoMessage() {
if (this.entry.info) {
this.$infoMsgBlock.removeClass('d-none');
this.$infoMsgBlock.empty().append(
`<div class='history-info-text'>
${this.entry.info}
</div>`
);
} else {
this.$infoMsgBlock.addClass('d-none');
this.$infoMsgBlock.empty();
}
}
selectiveRender() {
this.updateErrorMessage();
this.updateInfoMessage();
this.updateCopyButton(false);
this.updateQueryMetaData();
this.query_codemirror.setValue(this.entry.query);
@@ -147,6 +165,7 @@ export default class QueryHistoryDetails {
this.parentNode.empty().append(
`<div id='query_detail' class='query-detail'>
<div class='error-message-block'></div>
<div class='info-message-block'></div>
<div class='metadata-block'></div>
<div class='query-statement-block'>
<div id='history-detail-query'>
@@ -168,6 +187,7 @@ export default class QueryHistoryDetails {
);
this.$errMsgBlock = this.parentNode.find('.error-message-block');
this.$infoMsgBlock = this.parentNode.find('.info-message-block');
this.$copyBtn = this.parentNode.find('#history-detail-query .btn-copy');
this.$copyBtn.off('click').on('click', this.copyAllHandler.bind(this));
this.$copyToEditor = this.parentNode.find('#history-detail-query .btn-copy-editor');

View File

@@ -1,6 +1,7 @@
import moment from 'moment';
import $ from 'jquery';
import _ from 'underscore';
import 'bootstrap.toggle';
const ARROWUP = 38;
const ARROWDOWN = 40;
@@ -65,11 +66,14 @@ export class QueryHistoryItem {
return this.formatDate(this.entry.start_time);
}
render() {
render(is_pgadmin_queries_shown) {
this.$el = $(
`<li class='list-item' tabindex='0' data-key='${this.dataKey()}'>
<div class='entry ${this.entry.status ? '' : 'error'}'>
<div class='query'>${_.escape(this.entry.query)}</div>
<div class='query'>
<i id="query_source_icon" class="query-history-icon sql-icon-lg"></i>
${_.escape(this.entry.query)}
</div>
<div class='other-info'>
<div class='timestamp'>${this.formatDate(this.entry.start_time)}</div>
</div>
@@ -80,6 +84,16 @@ export class QueryHistoryItem {
.on('click', e => {
this.onClickHandler($(e.currentTarget));
});
let query_source = this.entry.query_source;
if(query_source)
this.$el.find('#query_source_icon').addClass(query_source.ICON_CSS_CLASS);
if(this.entry.is_pgadmin_query) {
this.$el.addClass('pgadmin-query-history-entry');
if(!is_pgadmin_queries_shown)
this.$el.addClass('d-none');
}
}
}
@@ -90,6 +104,7 @@ export class QueryHistoryEntries {
this.groupKeyFormat = 'YYYY MM DD';
this.$el = null;
this.is_pgadmin_queries_shown = null;
}
onSelectedChange(onSelectedChangeHandler) {
@@ -98,10 +113,10 @@ export class QueryHistoryEntries {
focus() {
if (!this.$selectedItem) {
this.setSelectedListItem(this.$el.find('.list-item').first());
this.setSelectedListItem(this.$entriesEl.find('.list-item').first());
}
this.$selectedItem.trigger('click');
this.$el[0].focus();
this.$entriesEl.focus();
}
isArrowDown(event) {
@@ -175,7 +190,7 @@ export class QueryHistoryEntries {
addEntry(entry) {
/* Add the entry in respective date group in descending sorted order. */
let groups = this.$el.find('.query-group');
let groups = this.$entriesEl.find('.query-group');
let groupsKeys = $.map(groups, group => {
return $(group).attr('data-key');
});
@@ -189,7 +204,7 @@ export class QueryHistoryEntries {
entry.start_time,
entryGroupKey
).render();
this.$el.prepend($groupEl);
this.$entriesEl.prepend($groupEl);
} else if (groupIdx < 0 && groups.length != 0) {
/* if groups are present, but this is a new group */
$groupEl = new QueryHistoryEntryDateGroup(
@@ -206,7 +221,7 @@ export class QueryHistoryEntries {
i++;
}
if(i == groupsKeys.length) {
this.$el.append($groupEl);
this.$entriesEl.append($groupEl);
}
} else if (groupIdx >= 0) {
/* if the group is present */
@@ -215,18 +230,46 @@ export class QueryHistoryEntries {
let newItem = new QueryHistoryItem(entry);
newItem.onClick(this.setSelectedListItem.bind(this));
newItem.render();
newItem.render(this.is_pgadmin_queries_shown);
$groupEl.find('.query-entries').prepend(newItem.$el);
this.setSelectedListItem(newItem.$el);
}
toggleGeneratedQueries() {
this.$el.find('.pgadmin-query-history-entry').each(function() {
$(this).toggleClass('d-none');
});
this.is_pgadmin_queries_shown = !this.is_pgadmin_queries_shown;
}
render() {
let self = this;
self.$el = $(`
<div id='query_list' class='query-history' tabindex='0'>
<div class="toggle-and-history-container">
<div class="query-history-toggle">
<label class="control-label">
Show queries generated internally by pgAdmin?
</label>
<input id="generated-queries-toggle" type="checkbox"
class="pgadmin-controls" data-style="quick"
data-size="mini" data-on="Yes" data-off="No"
data-onstyle="success" data-offstyle="primary" checked>
</div>
`).on('keydown', this.navigateUpAndDown.bind(this));
<div id='query_list' class='query-history' tabindex='0'></div>
</div>
`);
self.$entriesEl = self.$el.find('#query_list');
self.$entriesEl.on('keydown', this.navigateUpAndDown.bind(this));
self.is_pgadmin_queries_shown = true;
self.$el.find('#generated-queries-toggle').bootstrapToggle().change(
function() {
self.toggleGeneratedQueries();
}
);
self.parentNode.empty().append(self.$el);
}

View File

@@ -0,0 +1,35 @@
/////////////////////////////////////////////////////////////
//
// pgAdmin 4 - PostgreSQL Tools
//
// Copyright (C) 2013 - 2019, The pgAdmin Development Team
// This software is released under the PostgreSQL Licence
//
//////////////////////////////////////////////////////////////
/* This file contains the source of the queries in the history and their
respective icons css classes */
export const QuerySources = {
EXECUTE: {
ICON_CSS_CLASS: 'fa fa-bolt',
},
EXPLAIN: {
ICON_CSS_CLASS: 'fa fa-hand-pointer-o',
},
EXPLAIN_ANALYZE: {
ICON_CSS_CLASS: 'fa fa-list-alt',
},
COMMIT: {
ICON_CSS_CLASS: 'icon-commit',
},
ROLLBACK: {
ICON_CSS_CLASS: 'icon-rollback',
},
SAVE_DATA: {
ICON_CSS_CLASS: 'icon-save-data-changes',
},
VIEW_DATA: {
ICON_CSS_CLASS: 'icon-view-data',
},
};

View File

@@ -696,7 +696,7 @@ def generate_client_primary_key_name(columns_info):
@login_required
def save(trans_id):
"""
This method is used to save the changes to the server
This method is used to save the data changes to the server
Args:
trans_id: unique transaction id
@@ -746,7 +746,7 @@ def save(trans_id):
return make_json_response(
data={'status': status, 'result': u"{}".format(msg)}
)
status, res, query_res, _rowid = trans_obj.save(
status, res, query_results, _rowid = trans_obj.save(
changed_data,
session_obj['columns_info'],
session_obj['client_primary_key'],
@@ -754,7 +754,7 @@ def save(trans_id):
else:
status = False
res = error_msg
query_res = None
query_results = None
_rowid = None
transaction_status = conn.transaction_status()
@@ -763,7 +763,7 @@ def save(trans_id):
data={
'status': status,
'result': res,
'query_result': query_res,
'query_results': query_results,
'_rowid': _rowid,
'transaction_status': transaction_status
}

View File

@@ -291,7 +291,7 @@ input.editor-checkbox:focus {
background-image: url('../img/disconnect.svg');
}
.icon-commit, .icon-rollback, .icon-save-data-changes {
.icon-commit, .icon-rollback, .icon-save-data-changes, .icon-view-data {
display: inline-block;
align-content: center;
vertical-align: middle;
@@ -315,6 +315,10 @@ input.editor-checkbox:focus {
background-image: url('../img/save_data_changes.svg') !important;
}
.icon-view-data {
background-image: url('../img/view_data.svg') !important;
}
.ajs-body .warn-header {
font-size: 13px;
font-weight: bold;

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 8.3 KiB

View File

@@ -27,6 +27,7 @@ define('tools.querytool', [
'sources/sqleditor/geometry_viewer',
'sources/sqleditor/history/history_collection.js',
'sources/sqleditor/history/query_history',
'sources/sqleditor/history/query_sources',
'sources/keyboard_shortcuts',
'sources/sqleditor/query_tool_actions',
'sources/sqleditor/query_tool_notifications',
@@ -49,7 +50,7 @@ define('tools.querytool', [
babelPollyfill, gettext, url_for, $, jqueryui, jqueryui_position, _, S, alertify, pgAdmin, Backbone, codemirror,
pgExplain, GridSelector, ActiveCellCapture, clipboard, copyData, RangeSelectionHelper, handleQueryOutputKeyboardEvent,
XCellSelectionModel, setStagedRows, SqlEditorUtils, ExecuteQuery, httpErrorHandler, FilterHandler,
GeometryViewer, historyColl, queryHist,
GeometryViewer, historyColl, queryHist, querySources,
keyboardShortcuts, queryToolActions, queryToolNotifications, Datagrid,
modifyAnimation, calculateQueryRunTime, callRenderAfterPoll, queryToolPref, csrfToken, panelTitleFunc) {
/* Return back, this has been called more than once */
@@ -63,7 +64,8 @@ define('tools.querytool', [
CodeMirror = codemirror.default,
Slick = window.Slick,
HistoryCollection = historyColl.default,
QueryHistory = queryHist.default;
QueryHistory = queryHist.default,
QuerySources = querySources.QuerySources;
csrfToken.setPGCSRFToken(pgAdmin.csrf_token_header, pgAdmin.csrf_token);
@@ -1361,26 +1363,26 @@ define('tools.querytool', [
});
}
// Make ajax call to get history data except view/edit data
if(self.handler.is_query_tool) {
$.ajax({
url: url_for('sqleditor.get_query_history', {
'trans_id': self.handler.transId,
}),
method: 'GET',
contentType: 'application/json',
})
.done(function(res) {
res.data.result.map((entry) => {
let newEntry = JSON.parse(entry);
newEntry.start_time = new Date(newEntry.start_time);
self.history_collection.add(newEntry);
});
})
.fail(function() {
/* history fetch fail should not affect query tool */
// Make ajax call to get history data
$.ajax({
url: url_for('sqleditor.get_query_history', {
'trans_id': self.handler.transId,
}),
method: 'GET',
contentType: 'application/json',
})
.done(function(res) {
res.data.result.map((entry) => {
let newEntry = JSON.parse(entry);
newEntry.start_time = new Date(newEntry.start_time);
self.history_collection.add(newEntry);
});
} else {
})
.fail(function() {
/* history fetch fail should not affect query tool */
});
if(!self.handler.is_query_tool) {
self.historyComponent.setEditorPref({'copy_to_editor':false});
}
},
@@ -1611,11 +1613,15 @@ define('tools.querytool', [
// Callback function for Save Data Changes button click.
on_save_data: function() {
this.handler.history_query_source = QuerySources.SAVE_DATA;
queryToolActions.saveDataChanges(this.handler);
},
// Callback function for the flash button click.
on_flash: function() {
this.handler.history_query_source = QuerySources.EXECUTE;
queryToolActions.executeQuery(this.handler);
},
@@ -1771,6 +1777,7 @@ define('tools.querytool', [
this._stopEventPropogation(event);
this._closeDropDown(event);
this.handler.history_query_source = QuerySources.EXPLAIN;
queryToolActions.explain(this.handler);
},
@@ -1779,6 +1786,7 @@ define('tools.querytool', [
this._stopEventPropogation(event);
this._closeDropDown(event);
this.handler.history_query_source = QuerySources.EXPLAIN_ANALYZE;
queryToolActions.explainAnalyze(this.handler);
},
@@ -1903,12 +1911,16 @@ define('tools.querytool', [
// Callback function for the commit button click.
on_commit_transaction: function() {
this.handler.close_on_idle_transaction = false;
this.handler.history_query_source = QuerySources.COMMIT;
queryToolActions.executeCommit(this.handler);
},
// Callback function for the rollback button click.
on_rollback_transaction: function() {
this.handler.close_on_idle_transaction = false;
this.handler.history_query_source = QuerySources.ROLLBACK;
queryToolActions.executeRollback(this.handler);
},
});
@@ -2710,36 +2722,48 @@ define('tools.querytool', [
new Date());
}
let hist_entry = {
if(_.isUndefined(self.history_query_source)) {
self.history_query_source = QuerySources.VIEW_DATA;
}
let history_entry = {
'status': status,
'start_time': self.query_start_time,
'query': self.query,
'row_affected': self.rows_affected,
'total_time': self.total_time,
'message': msg,
'query_source': self.history_query_source,
'is_pgadmin_query': !self.is_query_tool,
};
/* Make ajax call to save the history data
* Do not bother query tool if failed to save
* Not applicable for view/edit data
*/
if(self.is_query_tool) {
$.ajax({
url: url_for('sqleditor.add_query_history', {
'trans_id': self.transId,
}),
method: 'POST',
contentType: 'application/json',
data: JSON.stringify(hist_entry),
})
.done(function() {})
.fail(function() {});
if(!self.is_query_tool) {
var info_msg = gettext('This query was generated by pgAdmin as part of a "View/Edit Data" operation');
history_entry.info = info_msg;
}
self.gridView.history_collection.add(hist_entry);
self.add_to_history(history_entry);
}
},
/* Make ajax call to save the history data */
add_to_history: function(history_entry) {
var self = this;
$.ajax({
url: url_for('sqleditor.add_query_history', {
'trans_id': self.transId,
}),
method: 'POST',
contentType: 'application/json',
data: JSON.stringify(history_entry),
})
.done(function() {})
.fail(function() {});
self.gridView.history_collection.add(history_entry);
},
/* This function is used to check whether cell
* is editable or not depending on primary keys
* and staged_rows flag
@@ -2900,7 +2924,7 @@ define('tools.querytool', [
var req_data = self.data_store, view = self.gridView;
req_data.columns = view ? view.handler.columns : self.columns;
var save_successful = false;
var save_successful = false, save_start_time = new Date();
// Make ajax call to save the data
$.ajax({
@@ -2949,7 +2973,7 @@ define('tools.querytool', [
if(is_added) {
// Update the rows in a grid after addition
dataView.beginUpdate();
_.each(res.data.query_result, function(r) {
_.each(res.data.query_results, function(r) {
if (!_.isNull(r.row_added)) {
// Fetch temp_id returned by server after addition
var row_id = Object.keys(r.row_added)[0];
@@ -3043,6 +3067,23 @@ define('tools.querytool', [
grid.gotoCell(_row_index, 1);
}
var query_history_info_msg = gettext('This query was generated by pgAdmin as part of a "Save Data" operation');
_.each(res.data.query_results, function(r) {
var history_entry = {
'status': r.status,
'start_time': save_start_time,
'query': r.sql,
'row_affected': r.rows_affected,
'total_time': null,
'message': r.result,
'query_source': QuerySources.SAVE_DATA,
'is_pgadmin_query': true,
'info': query_history_info_msg,
};
self.add_to_history(history_entry);
});
self.trigger('pgadmin-sqleditor:loading-icon:hide');
grid.invalidate();
@@ -3635,7 +3676,6 @@ define('tools.querytool', [
mode_disabled = true;
}
$('#btn-clear-dropdown').prop('disabled', mode_disabled);
$('#btn-explain').prop('disabled', mode_disabled);
$('#btn-explain-analyze').prop('disabled', mode_disabled);
$('#btn-explain-options-dropdown').prop('disabled', mode_disabled);

View File

@@ -1,6 +1,6 @@
.query-history {
height: 100%;
overflow: auto;
.list-item {
border-bottom: $panel-border;
background-color: $color-bg-theme;
@@ -30,6 +30,11 @@
overflow: hidden;
white-space: nowrap;
user-select: initial;
.query-history-icon {
width: 18px;
text-align: center;
}
}
}
@@ -111,6 +116,17 @@
}
}
.info-message-block {
background: $sql-history-detail-bg;
flex: 0.3;
padding-left: 20px;
.history-info-text {
@extend .text-12;
padding: 7px 0;
}
}
.metadata-block {
flex: 0.4;
padding: 10px 20px;
@@ -219,4 +235,15 @@
cursor: ew-resize;
}
}
.toggle-and-history-container {
display: flex;
flex-direction: column;
height: 100%;
.query-history-toggle {
padding-top: 4px;
padding-bottom: 4px;
}
}
}

View File

@@ -32,9 +32,7 @@ def save_changed_data(changed_data, columns_info, conn, command_obj,
"""
status = False
res = None
query_res = dict()
count = 0
list_of_rowid = []
query_results = []
operations = ('added', 'updated', 'deleted')
list_of_sql = {}
_rowid = None
@@ -44,267 +42,279 @@ def save_changed_data(changed_data, columns_info, conn, command_obj,
for col_name, col_info in columns_info.items()
}
if conn.connected():
is_savepoint = False
# Start the transaction if the session is idle
if conn.transaction_status() == TX_STATUS_IDLE:
conn.execute_void('BEGIN;')
else:
conn.execute_void('SAVEPOINT save_data;')
is_savepoint = True
is_savepoint = False
# Start the transaction if the session is idle
if conn.transaction_status() == TX_STATUS_IDLE:
sql = 'BEGIN;'
else:
sql = 'SAVEPOINT save_data;'
is_savepoint = True
# Iterate total number of records to be updated/inserted
for of_type in changed_data:
# No need to go further if its not add/update/delete operation
if of_type not in operations:
continue
# if no data to be save then continue
if len(changed_data[of_type]) < 1:
continue
status, res = execute_void_wrapper(conn, sql, query_results)
if not status:
return status, res, query_results, None
column_type = {}
column_data = {}
for each_col in columns_info:
if (
columns_info[each_col]['not_null'] and
not columns_info[each_col]['has_default_val']
):
column_data[each_col] = None
column_type[each_col] = \
columns_info[each_col]['type_name']
else:
column_type[each_col] = \
columns_info[each_col]['type_name']
# Iterate total number of records to be updated/inserted
for of_type in changed_data:
# No need to go further if its not add/update/delete operation
if of_type not in operations:
continue
# if no data to be save then continue
if len(changed_data[of_type]) < 1:
continue
# For newly added rows
if of_type == 'added':
# Python dict does not honour the inserted item order
# So to insert data in the order, we need to make ordered
# list of added index We don't need this mechanism in
# updated/deleted rows as it does not matter in
# those operations
added_index = OrderedDict(
sorted(
changed_data['added_index'].items(),
key=lambda x: int(x[0])
)
column_type = {}
column_data = {}
for each_col in columns_info:
if (
columns_info[each_col]['not_null'] and
not columns_info[each_col]['has_default_val']
):
column_data[each_col] = None
column_type[each_col] = \
columns_info[each_col]['type_name']
else:
column_type[each_col] = \
columns_info[each_col]['type_name']
# For newly added rows
if of_type == 'added':
# Python dict does not honour the inserted item order
# So to insert data in the order, we need to make ordered
# list of added index We don't need this mechanism in
# updated/deleted rows as it does not matter in
# those operations
added_index = OrderedDict(
sorted(
changed_data['added_index'].items(),
key=lambda x: int(x[0])
)
list_of_sql[of_type] = []
)
list_of_sql[of_type] = []
# When new rows are added, only changed columns data is
# sent from client side. But if column is not_null and has
# no_default_value, set column to blank, instead
# of not null which is set by default.
column_data = {}
pk_names, primary_keys = command_obj.get_primary_keys()
has_oids = 'oid' in column_type
# When new rows are added, only changed columns data is
# sent from client side. But if column is not_null and has
# no_default_value, set column to blank, instead
# of not null which is set by default.
column_data = {}
pk_names, primary_keys = command_obj.get_primary_keys()
has_oids = 'oid' in column_type
for each_row in added_index:
# Get the row index to match with the added rows
# dict key
tmp_row_index = added_index[each_row]
data = changed_data[of_type][tmp_row_index]['data']
# Remove our unique tracking key
data.pop(client_primary_key, None)
data.pop('is_row_copied', None)
list_of_rowid.append(data.get(client_primary_key))
for each_row in added_index:
# Get the row index to match with the added rows
# dict key
tmp_row_index = added_index[each_row]
data = changed_data[of_type][tmp_row_index]['data']
# Remove our unique tracking key
data.pop(client_primary_key, None)
data.pop('is_row_copied', None)
# Update columns value with columns having
# not_null=False and has no default value
column_data.update(data)
sql = render_template(
"/".join([command_obj.sql_path, 'insert.sql']),
data_to_be_saved=column_data,
pgadmin_alias=pgadmin_alias,
primary_keys=None,
object_name=command_obj.object_name,
nsp_name=command_obj.nsp_name,
data_type=column_type,
pk_names=pk_names,
has_oids=has_oids
)
select_sql = render_template(
"/".join([command_obj.sql_path, 'select.sql']),
object_name=command_obj.object_name,
nsp_name=command_obj.nsp_name,
primary_keys=primary_keys,
has_oids=has_oids
)
list_of_sql[of_type].append({
'sql': sql, 'data': data,
'client_row': tmp_row_index,
'select_sql': select_sql
})
# Reset column data
column_data = {}
# For updated rows
elif of_type == 'updated':
list_of_sql[of_type] = []
for each_row in changed_data[of_type]:
data = changed_data[of_type][each_row]['data']
pk_escaped = {
pk: pk_val.replace('%', '%%') if hasattr(
pk_val, 'replace') else pk_val
for pk, pk_val in
changed_data[of_type][each_row]['primary_keys'].items()
}
sql = render_template(
"/".join([command_obj.sql_path, 'update.sql']),
data_to_be_saved=data,
pgadmin_alias=pgadmin_alias,
primary_keys=pk_escaped,
object_name=command_obj.object_name,
nsp_name=command_obj.nsp_name,
data_type=column_type
)
list_of_sql[of_type].append({'sql': sql, 'data': data})
list_of_rowid.append(data.get(client_primary_key))
# For deleted rows
elif of_type == 'deleted':
list_of_sql[of_type] = []
is_first = True
rows_to_delete = []
keys = None
no_of_keys = None
for each_row in changed_data[of_type]:
rows_to_delete.append(changed_data[of_type][each_row])
# Fetch the keys for SQL generation
if is_first:
# We need to covert dict_keys to normal list in
# Python3
# In Python2, it's already a list & We will also
# fetch column names using index
keys = list(
changed_data[of_type][each_row].keys()
)
no_of_keys = len(keys)
is_first = False
# Map index with column name for each row
for row in rows_to_delete:
for k, v in row.items():
# Set primary key with label & delete index based
# mapped key
try:
row[changed_data['columns']
[int(k)]['name']] = v
except ValueError:
continue
del row[k]
# Update columns value with columns having
# not_null=False and has no default value
column_data.update(data)
sql = render_template(
"/".join([command_obj.sql_path, 'delete.sql']),
data=rows_to_delete,
primary_key_labels=keys,
no_of_keys=no_of_keys,
"/".join([command_obj.sql_path, 'insert.sql']),
data_to_be_saved=column_data,
pgadmin_alias=pgadmin_alias,
primary_keys=None,
object_name=command_obj.object_name,
nsp_name=command_obj.nsp_name
nsp_name=command_obj.nsp_name,
data_type=column_type,
pk_names=pk_names,
has_oids=has_oids
)
list_of_sql[of_type].append({'sql': sql, 'data': {}})
for opr, sqls in list_of_sql.items():
for item in sqls:
if item['sql']:
item['data'] = {
pgadmin_alias[k] if k in pgadmin_alias else k: v
for k, v in item['data'].items()
}
select_sql = render_template(
"/".join([command_obj.sql_path, 'select.sql']),
object_name=command_obj.object_name,
nsp_name=command_obj.nsp_name,
primary_keys=primary_keys,
has_oids=has_oids
)
row_added = None
list_of_sql[of_type].append({
'sql': sql, 'data': data,
'client_row': tmp_row_index,
'select_sql': select_sql,
'row_id': data.get(client_primary_key)
})
# Reset column data
column_data = {}
def failure_handle(res):
if is_savepoint:
conn.execute_void('ROLLBACK TO SAVEPOINT '
'save_data;')
msg = 'Query ROLLBACK, but the current ' \
'transaction is still ongoing.'
else:
conn.execute_void('ROLLBACK;')
msg = 'Transaction ROLLBACK'
# If we roll backed every thing then update the
# message for each sql query.
for val in query_res:
if query_res[val]['status']:
query_res[val]['result'] = msg
# If list is empty set rowid to 1
try:
if list_of_rowid:
_rowid = list_of_rowid[count]
else:
_rowid = 1
except Exception:
_rowid = 0
return status, res, query_res, _rowid
# For updated rows
elif of_type == 'updated':
list_of_sql[of_type] = []
for each_row in changed_data[of_type]:
data = changed_data[of_type][each_row]['data']
pk_escaped = {
pk: pk_val.replace('%', '%%') if hasattr(
pk_val, 'replace') else pk_val
for pk, pk_val in
changed_data[of_type][each_row]['primary_keys'].items()
}
sql = render_template(
"/".join([command_obj.sql_path, 'update.sql']),
data_to_be_saved=data,
pgadmin_alias=pgadmin_alias,
primary_keys=pk_escaped,
object_name=command_obj.object_name,
nsp_name=command_obj.nsp_name,
data_type=column_type
)
list_of_sql[of_type].append({'sql': sql,
'data': data,
'row_id':
data.get(client_primary_key)})
# For deleted rows
elif of_type == 'deleted':
list_of_sql[of_type] = []
is_first = True
rows_to_delete = []
keys = None
no_of_keys = None
for each_row in changed_data[of_type]:
rows_to_delete.append(changed_data[of_type][each_row])
# Fetch the keys for SQL generation
if is_first:
# We need to covert dict_keys to normal list in
# Python3
# In Python2, it's already a list & We will also
# fetch column names using index
keys = list(
changed_data[of_type][each_row].keys()
)
no_of_keys = len(keys)
is_first = False
# Map index with column name for each row
for row in rows_to_delete:
for k, v in row.items():
# Set primary key with label & delete index based
# mapped key
try:
# Fetch oids/primary keys
if 'select_sql' in item and item['select_sql']:
status, res = conn.execute_dict(
item['sql'], item['data'])
else:
status, res = conn.execute_void(
item['sql'], item['data'])
except Exception as _:
failure_handle(res)
raise
row[changed_data['columns']
[int(k)]['name']] = v
except ValueError:
continue
del row[k]
sql = render_template(
"/".join([command_obj.sql_path, 'delete.sql']),
data=rows_to_delete,
primary_key_labels=keys,
no_of_keys=no_of_keys,
object_name=command_obj.object_name,
nsp_name=command_obj.nsp_name
)
list_of_sql[of_type].append({'sql': sql, 'data': {}})
def failure_handle(res, row_id):
mogrified_sql = conn.mogrify(item['sql'], item['data'])
mogrified_sql = mogrified_sql if mogrified_sql is not None \
else item['sql']
query_results.append({
'status': False,
'result': res,
'sql': mogrified_sql,
'rows_affected': 0,
'row_added': None
})
if is_savepoint:
sql = 'ROLLBACK TO SAVEPOINT save_data;'
msg = 'A ROLLBACK was done for the save operation only. ' \
'The active transaction is not affected.'
else:
sql = 'ROLLBACK;'
msg = 'A ROLLBACK was done for the save transaction.'
rollback_status, rollback_result = \
execute_void_wrapper(conn, sql, query_results)
if not rollback_status:
return rollback_status, rollback_result, query_results, None
# If we roll backed every thing then update the
# message for each sql query.
for query in query_results:
if query['status']:
query['result'] = msg
return False, res, query_results, row_id
for opr, sqls in list_of_sql.items():
for item in sqls:
if item['sql']:
item['data'] = {
pgadmin_alias[k] if k in pgadmin_alias else k: v
for k, v in item['data'].items()
}
row_added = None
try:
# Fetch oids/primary keys
if 'select_sql' in item and item['select_sql']:
status, res = conn.execute_dict(
item['sql'], item['data'])
else:
status, res = conn.execute_void(
item['sql'], item['data'])
except Exception as _:
failure_handle(res, item.get('row_id', 0))
raise
if not status:
return failure_handle(res, item.get('row_id', 0))
# Select added row from the table
if 'select_sql' in item:
status, sel_res = conn.execute_dict(
item['select_sql'], res['rows'][0])
if not status:
return failure_handle(res)
return failure_handle(sel_res, item.get('row_id', 0))
# Select added row from the table
if 'select_sql' in item:
status, sel_res = conn.execute_dict(
item['select_sql'], res['rows'][0])
if 'rows' in sel_res and len(sel_res['rows']) > 0:
row_added = {
item['client_row']: sel_res['rows'][0]}
if not status:
if is_savepoint:
conn.execute_void('ROLLBACK TO SAVEPOINT'
' save_data;')
msg = 'Query ROLLBACK, the current' \
' transaction is still ongoing.'
else:
conn.execute_void('ROLLBACK;')
msg = 'Transaction ROLLBACK'
# If we roll backed every thing then update
# the message for each sql query.
for val in query_res:
if query_res[val]['status']:
query_res[val]['result'] = msg
rows_affected = conn.rows_affected()
mogrified_sql = conn.mogrify(item['sql'], item['data'])
mogrified_sql = mogrified_sql if mogrified_sql is not None \
else item['sql']
# store the result of each query in dictionary
query_results.append({
'status': status,
'result': None if row_added else res,
'sql': mogrified_sql,
'rows_affected': rows_affected,
'row_added': row_added
})
# If list is empty set rowid to 1
try:
if list_of_rowid:
_rowid = list_of_rowid[count]
else:
_rowid = 1
except Exception:
_rowid = 0
# Commit the transaction if no error is found & autocommit is activated
if auto_commit:
sql = 'COMMIT;'
status, res = execute_void_wrapper(conn, sql, query_results)
if not status:
return status, res, query_results, None
return status, sel_res, query_res, _rowid
return status, res, query_results, _rowid
if 'rows' in sel_res and len(sel_res['rows']) > 0:
row_added = {
item['client_row']: sel_res['rows'][0]}
rows_affected = conn.rows_affected()
# store the result of each query in dictionary
query_res[count] = {
'status': status,
'result': None if row_added else res,
'sql': item['sql'], 'rows_affected': rows_affected,
'row_added': row_added
}
count += 1
# Commit the transaction if no error is found & autocommit is activated
if auto_commit:
conn.execute_void('COMMIT;')
return status, res, query_res, _rowid
def execute_void_wrapper(conn, sql, query_results):
"""
Executes a sql query with no return and adds it to query_results
:param sql: Sql query
:param query_results: A list of query results in the save operation
:return: status, result
"""
status, res = conn.execute_void(sql)
if status:
query_results.append({
'status': status,
'result': res,
'sql': sql, 'rows_affected': 0,
'row_added': None
})
return status, res

View File

@@ -116,8 +116,9 @@ class TestSaveChangedData(BaseTestGenerator):
]
},
save_status=False,
check_sql=None,
check_result=None
check_sql="SELECT * FROM %s "
"WHERE pk_col = 1 AND normal_col = 'four'",
check_result='SELECT 0'
)),
('When updating a row in a valid way', dict(
save_payload={
@@ -171,9 +172,9 @@ class TestSaveChangedData(BaseTestGenerator):
"updated": {
"1":
{"err": False,
"data": {"pk_col": "2"},
"data": {"pk_col": "1"},
"primary_keys":
{"pk_col": 1}
{"pk_col": 2}
}
},
"added": {},
@@ -210,8 +211,9 @@ class TestSaveChangedData(BaseTestGenerator):
]
},
save_status=False,
check_sql=None,
check_result=None
check_sql="SELECT * FROM %s "
"WHERE pk_col = 1 AND normal_col = 'two'",
check_result='SELECT 0'
)),
('When deleting a row', dict(
save_payload={
@@ -283,20 +285,19 @@ class TestSaveChangedData(BaseTestGenerator):
save_status = response_data['data']['status']
self.assertEquals(save_status, self.save_status)
if self.check_sql:
# Execute check sql
# Add test table name to the query
check_sql = self.check_sql % self.test_table_name
is_success, response_data = \
execute_query(tester=self.tester,
query=check_sql,
start_query_tool_url=self.start_query_tool_url,
poll_url=self.poll_url)
self.assertEquals(is_success, True)
# Execute check sql
# Add test table name to the query
check_sql = self.check_sql % self.test_table_name
is_success, response_data = \
execute_query(tester=self.tester,
query=check_sql,
start_query_tool_url=self.start_query_tool_url,
poll_url=self.poll_url)
self.assertEquals(is_success, True)
# Check table for updates
result = response_data['data']['result']
self.assertEquals(result, self.check_result)
# Check table for updates
result = response_data['data']['result']
self.assertEquals(result, self.check_result)
def tearDown(self):
# Disconnect the database

View File

@@ -1917,3 +1917,17 @@ Failed to reset the connection to the server due to following error:
)
return enc_password
def mogrify(self, query, parameters):
"""
This function will return the sql query after parameters binding
:param query: sql query before parameters (variables) binding
:param parameters: query parameters / variables
:return:
"""
status, cursor = self.__cursor()
if not status:
return None
else:
mogrified_sql = cursor.mogrify(query, parameters)
return mogrified_sql