Query tool/data editor initial version

This commit is contained in:
Akshay Joshi 2016-04-14 15:04:03 +01:00 committed by Dave Page
parent 0420210076
commit c6acbcb5ad
16 changed files with 4816 additions and 0 deletions

View File

@ -528,6 +528,10 @@ fieldset[disabled] .form-control {
background-color: #AAA;
}
.backgrid th.renderable, .backgrid td.renderable {
white-space: pre-wrap;
}
.subnode-header {
background-color: #2C76B4;
height: 35px;
@ -952,3 +956,8 @@ ul.nav.nav-tabs {
.pgadmin-controls.SQL>.CodeMirror {
height: 500px!important;
}
.wcPanelTab > div .wcTabIcon.fa {
padding-left: 0px !important;
color: black;
}

View File

@ -0,0 +1,266 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""A blueprint module implementing the datagrid frame."""
MODULE_NAME = 'datagrid'
import pickle
import random
import json
from flask import Response, url_for, session, request, make_response
from flask.ext.babel import gettext
from flask.ext.security import login_required
from pgadmin.tools.sqleditor.command import *
from pgadmin.utils import PgAdminModule
from pgadmin.utils.ajax import make_json_response, bad_request, internal_server_error
class DataGridModule(PgAdminModule):
"""
class DataGridModule(PgAdminModule)
A module class for Edit Grid derived from PgAdminModule.
"""
LABEL = "Data Grid"
def get_own_menuitems(self):
return {}
def get_own_javascripts(self):
return [{
'name': 'pgadmin.datagrid',
'path': url_for('datagrid.index') + "datagrid",
'when': None
}]
def get_panels(self):
return []
blueprint = DataGridModule(MODULE_NAME, __name__, static_url_path='/static')
@blueprint.route("/")
@login_required
def index():
return bad_request(errormsg=gettext('User can not call this URL directly'))
@blueprint.route("/css/datagrid.css")
def datagrid_css():
return make_response(
render_template('datagrid/css/datagrid.css'),
200, {'Content-Type': 'text/css'}
)
@blueprint.route("/filter")
@login_required
def show_filter():
return render_template(MODULE_NAME + '/filter.html')
@blueprint.route(
'/initialize/datagrid/<int:cmd_type>/<obj_type>/<int:sid>/<int:did>/<int:obj_id>',
methods=["PUT", "POST"]
)
@login_required
def initialize_datagrid(cmd_type, obj_type, sid, did, obj_id):
"""
This method is responsible for creating an asynchronous connection.
After creating the connection it will instantiate and initialize
the object as per the object type. It will also create a unique
transaction id and store the information into session variable.
Args:
cmd_type: Contains value for which menu item is clicked.
obj_type: Contains type of selected object for which data grid to be render
sid: Server Id
did: Database Id
obj_id: Id of currently selected object
"""
if request.data:
filter_sql = json.loads(request.data.decode())
else:
filter_sql = request.args or request.form
# Create asynchronous connection using random connection id.
conn_id = str(random.randint(1, 9999999))
try:
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid)
conn = manager.connection(did=did, conn_id=conn_id)
except Exception as e:
return internal_server_error(errormsg=str(e))
# Connect the Server
status, msg = conn.connect()
if not status:
return internal_server_error(errormsg=str(msg))
try:
# Get the object as per the object type
command_obj = ObjectRegistry.get_object(obj_type, conn_id=conn_id, sid=sid,
did=did, obj_id=obj_id, cmd_type=cmd_type,
sql_filter=filter_sql)
except Exception as e:
return internal_server_error(errormsg=str(e))
# Create a unique id for the transaction
trans_id = str(random.randint(1, 9999999))
if 'gridData' not in session:
sql_grid_data = dict()
else:
sql_grid_data = session['gridData']
# Use pickle to store the command object which will be used
# later by the sql grid module.
sql_grid_data[trans_id] = {
'command_obj': pickle.dumps(command_obj, -1) # -1 specify the highest protocol version available
}
# Store the grid dictionary into the session variable
session['gridData'] = sql_grid_data
return make_json_response(data={'gridTransId': trans_id})
@blueprint.route('/panel/<int:trans_id>/<is_query_tool>/<editor_title>', methods=["GET"])
def panel(trans_id, is_query_tool, editor_title):
"""
This method calls index.html to render the data grid.
Args:
trans_id: unique transaction id
is_query_tool: True if panel calls when query tool menu is clicked.
editor_title: Title of the editor
"""
return render_template("datagrid/index.html", _=gettext, uniqueId=trans_id,
is_query_tool=is_query_tool, editor_title=editor_title)
@blueprint.route(
'/initialize/query_tool/<int:sid>/<int:did>',
methods=["PUT", "POST"]
)
@login_required
def initialize_query_tool(sid, did):
"""
This method is responsible for instantiating and initializing
the query tool object. It will also create a unique
transaction id and store the information into session variable.
Args:
sid: Server Id
did: Database Id
"""
try:
command_obj = ObjectRegistry.get_object('query_tool', sid=sid, did=did)
except Exception as e:
return internal_server_error(errormsg=str(e))
# Create a unique id for the transaction
trans_id = str(random.randint(1, 9999999))
if 'gridData' not in session:
sql_grid_data = dict()
else:
sql_grid_data = session['gridData']
# Use pickle to store the command object which will be used
# later by the sql grid module.
sql_grid_data[trans_id] = {
'command_obj': pickle.dumps(command_obj, -1) # -1 specify the highest protocol version available
}
# Store the grid dictionary into the session variable
session['gridData'] = sql_grid_data
return make_json_response(data={'gridTransId': trans_id})
@blueprint.route('/close/<int:trans_id>', methods=["GET"])
def close(trans_id):
"""
This method is used to close the asynchronous connection
and remove the information of unique transaction id from
the session variable.
Args:
trans_id: unique transaction id
"""
grid_data = session['gridData']
# Return from the function if transaction id not found
if str(trans_id) not in grid_data:
return make_json_response(data={'status': True})
cmd_obj_str = grid_data[str(trans_id)]['command_obj']
# Use pickle.loads function to get the command object
cmd_obj = pickle.loads(cmd_obj_str)
# if connection id is None then no need to release the connection
if cmd_obj.conn_id is not None:
try:
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(cmd_obj.sid)
conn = manager.connection(did=cmd_obj.did, conn_id=cmd_obj.conn_id)
except Exception as e:
return internal_server_error(errormsg=str(e))
# Release the connection
if conn.connected():
manager.release(did=cmd_obj.did, conn_id=cmd_obj.conn_id)
# Remove the information of unique transaction id from the session variable.
grid_data.pop(str(trans_id), None)
session['gridData'] = grid_data
return make_json_response(data={'status': True})
@blueprint.route('/filter/validate/<int:sid>/<int:did>/<int:obj_id>',
methods=["PUT", "POST"])
@login_required
def validate_filter(sid, did, obj_id):
"""
This method is used to validate the sql filter.
Args:
sid: Server Id
did: Database Id
obj_id: Id of currently selected object
"""
if request.data:
filter_sql = json.loads(request.data.decode())
else:
filter_sql = request.args or request.form
try:
# Create object of SQLFilter class
sql_filter_obj = SQLFilter(sid=sid, did=did, obj_id=obj_id)
# Call validate_filter method to validate the SQL.
status, res = sql_filter_obj.validate_filter(filter_sql)
except Exception as e:
return internal_server_error(errormsg=str(e))
return make_json_response(data={'status': status, 'result': res})
@blueprint.route("/datagrid.js")
@login_required
def script():
"""render the required javascript"""
return Response(response=render_template("datagrid/js/datagrid.js", _=gettext),
status=200,
mimetype="application/javascript")

View File

@ -0,0 +1,9 @@
<div class="filter-textarea">
<textarea id="sql_filter" row="5"></textarea>
<style>
.filter-textarea .CodeMirror-scroll {
min-height: 120px;
max-height: 120px;
}
</style>
</div>

View File

@ -0,0 +1,53 @@
{% extends "base.html" %}
{% block title %}{{ config.APP_NAME }} - Datagrid{% endblock %}
{% block css_link %}
<link type="text/css" rel="stylesheet" href="{{ url_for('sqleditor.static', filename='css/sqleditor.css') }}">
{% endblock %}
{% block body %}
<style>
body {
padding: 0px;
}
</style>
<div id="main-editor_panel">
<div id="fetching_data" class="sql-editor-busy-fetching hide">',
<span class="sql-editor-busy-icon"><img src="{{ url_for('browser.static', filename='css/aciTree/image/load-root.gif') }}"></span>
<span class="sql-editor-busy-text"></span>
</div>
<div class="sql-editor" data-trans-id="{{ uniqueId }}"></div>
</div>
{% endblock %}
{% block init_script %}
try {
require(
['jquery', 'pgadmin', 'pgadmin.sqleditor'],
function($, pgAdmin) {
var editorPanel = $('.sql-editor'),
loadingDiv = $('#fetching_data'),
msgDiv = loadingDiv.find('.sql-editor-busy-text');
// Get the controller object from pgAdmin.SqlEditor
var sqlEditorController = pgAdmin.SqlEditor.create(editorPanel);
// Listen on events to show/hide loading-icon and change messages.
sqlEditorController.on('pgadmin-sqleditor:loading-icon:message', function(msg) {
msgDiv.text(msg);
}).on('pgadmin-sqleditor:loading-icon:show', function(msg) {
loadingDiv.removeClass('hide');
msgDiv.text(msg);
}).on('pgadmin-sqleditor:loading-icon:hide', function() {
if (!loadingDiv.hasClass('hide')) {
loadingDiv.addClass('hide');
}
});
// Start the query tool.
sqlEditorController.start({{ is_query_tool }}, "{{ editor_title }}");
});
} catch (err) {
/* Show proper error dialog */
console.log(err);
}
{% endblock %}

View File

@ -0,0 +1,428 @@
define(
['jquery','alertify', 'pgadmin', 'pgadmin.browser', 'wcdocker'],
function($, alertify, pgAdmin) {
// Some scripts do export their object in the window only.
// Generally the one, which do no have AMD support.
var wcDocker = window.wcDocker,
pgBrowser = pgAdmin.Browser;
/* Return back, this has been called more than once */
if (pgAdmin.DataGrid)
return pgAdmin.DataGrid;
pgAdmin.DataGrid = {
init: function() {
if (this.initialized)
return;
this.initialized = true;
this.title_index = 1;
// Define list of nodes on which view data option appears
var supported_nodes = [
'table', 'view',
'foreign-table', 'catalog_object'
],
/* Enable/disable View data menu in tools based
* on node selected. if selected node is present
* in supported_nodes, menu will be enabled
* otherwise disabled.
*/
view_menu_enabled = function(obj) {
if(!_.isUndefined(obj) && !_.isNull(obj))
return (_.indexOf(supported_nodes, obj._type) !== -1 ? true: false);
else
return false;
};
// Define list of nodes on which Query tool option doesn't appears
var unsupported_nodes = [
'server-group', 'server', 'coll-tablespace', 'tablespace',
'coll-role', 'role', 'coll-resource_group', 'resource_group',
'coll-database'
],
/* Enable/disable Query tool menu in tools based
* on node selected. if selected node is present
* in unsupported_nodes, menu will be disabled
* otherwise enabled.
*/
query_tool_menu_enabled = function(obj) {
if(!_.isUndefined(obj) && !_.isNull(obj))
return (_.indexOf(unsupported_nodes, obj._type) !== -1 ? false: true);
else
return false;
};
// Define the nodes on which the menus to be appear
var menus = [{
name: 'query_tool', module: this, applies: ['tools'],
callback: 'show_query_tool', enable: query_tool_menu_enabled,
priority: 1, label: '{{ _('Query tool') }}',
icon: 'fa fa-bolt'
}];
// Create context menu
for (var idx = 0; idx < supported_nodes.length; idx++) {
menus.push({
name: 'view_first_100_rows_context_' + supported_nodes[idx],
node: supported_nodes[idx], module: this, data: {mnuid: 1},
applies: ['context', 'object'], callback: 'show_data_grid', enable: view_menu_enabled,
category: 'view_data', priority: 101, label: '{{ _('View First 100 Rows') }}'
},{
name: 'view_last_100_rows_context_' + supported_nodes[idx],
node: supported_nodes[idx], module: this, data: {mnuid: 2},
applies: ['context', 'object'], callback: 'show_data_grid', enable: view_menu_enabled,
category: 'view_data', priority: 102, label: '{{ _('View Last 100 Rows') }}'
},{
name: 'view_all_rows_context_' + supported_nodes[idx],
node: supported_nodes[idx], module: this, data: {mnuid: 3},
applies: ['context', 'object'], callback: 'show_data_grid', enable: view_menu_enabled,
category: 'view_data', priority: 103, label: '{{ _('View All Rows') }}'
},{
name: 'view_filtered_rows_context_' + supported_nodes[idx],
node: supported_nodes[idx], module: this, data: {mnuid: 4},
applies: ['context', 'object'], callback: 'show_filtered_row', enable: view_menu_enabled,
category: 'view_data', priority: 104, label: '{{ _('View Filtered Rows...') }}'
});
}
pgAdmin.Browser.add_menu_category('view_data', '{{ _('View Data') }}', 100, 'fa fa-th');
pgAdmin.Browser.add_menus(menus);
// Creating a new pgAdmin.Browser frame to show the data.
var dataGridFrameType = new pgAdmin.Browser.Frame({
name: 'frm_datagrid',
title: 'Edit Data',
showTitle: true,
isCloseable: true,
isPrivate: true,
url: 'about:blank'
});
// Load the newly created frame
dataGridFrameType.load(pgBrowser.docker);
},
// This is a callback function to show data when user click on menu item.
show_data_grid: function(data, i) {
var self = this,
d = pgAdmin.Browser.tree.itemData(i);
if (d === undefined) {
alertify.alert(
'Data Grid Error',
'No object selected.'
);
return;
}
// Get the parent data from the tree node hierarchy.
var node = pgBrowser.Nodes[d._type],
parentData = node.getTreeNodeHierarchy(i);
// If server, database or schema is undefined then return from the function.
if (parentData.server === undefined || parentData.database === undefined) {
return;
}
// If schema, view, catalog object all are undefined then return from the function.
if (parentData.schema === undefined && parentData.view === undefined &&
parentData.catalog === undefined) {
return;
}
var nsp_name = '';
if (parentData.schema != undefined) {
nsp_name = parentData.schema.label;
}
else if (parentData.view != undefined) {
nsp_name = parentData.view.label;
}
else if (parentData.catalog != undefined) {
nsp_name = parentData.catalog.label;
}
var baseUrl = "{{ url_for('datagrid.index') }}" + "initialize/datagrid/" + data.mnuid + "/" + d._type + "/" +
parentData.server._id + "/" + parentData.database._id + "/" + d._id;
var grid_title = parentData.server.label + '-' + parentData.database.label + '-'
+ nsp_name + '.' + d.label;
// Initialize the data grid.
self.initialize_data_grid(baseUrl, grid_title, '');
},
// This is a callback function to show filtered data when user click on menu item.
show_filtered_row: function(data, i) {
var self = this,
d = pgAdmin.Browser.tree.itemData(i);
if (d === undefined) {
alertify.alert(
'Data Grid Error',
'No object selected.'
);
return;
}
// Get the parent data from the tree node hierarchy.
var node = pgBrowser.Nodes[d._type],
parentData = node.getTreeNodeHierarchy(i);
// If server or database is undefined then return from the function.
if (parentData.server === undefined || parentData.database === undefined) {
return;
}
// If schema, view, catalog object all are undefined then return from the function.
if (parentData.schema === undefined && parentData.view === undefined &&
parentData.catalog === undefined) {
return;
}
var nsp_name = '';
if (parentData.schema != undefined) {
nsp_name = parentData.schema.label;
}
else if (parentData.view != undefined) {
nsp_name = parentData.view.label;
}
else if (parentData.catalog != undefined) {
nsp_name = parentData.catalog.label;
}
// Create base url to initialize the edit grid
var baseUrl = "{{ url_for('datagrid.index') }}" + "initialize/datagrid/" + data.mnuid + "/" + d._type + "/" +
parentData.server._id + "/" + parentData.database._id + "/" + d._id;
// Create url to validate the SQL filter
var validateUrl = "{{ url_for('datagrid.index') }}" + "filter/validate/" +
parentData.server._id + "/" + parentData.database._id + "/" + d._id;
var grid_title = parentData.server.label + '-' + parentData.database.label + '-'
+ nsp_name + '.' + d.label;
// Create filter dialog using alertify
if (!alertify.filterDialog) {
alertify.dialog('filterDialog', function factory() {
return {
main: function(title, message) {
this.set('title', title);
this.message = message;
},
setup:function() {
return {
buttons:[
{ text: "OK", className: "btn btn-primary" },
{ text: "Cancel", className: "btn btn-danger" }
],
options: { modal: 0, resizable: false, maximizable: false, pinnable: false}
};
},
build:function() {},
prepare:function() {
var $content = $(this.message),
$sql_filter = $content.find('#sql_filter');
this.setContent($content.get(0));
// Apply CodeMirror to filter text area.
this.filter_obj = CodeMirror.fromTextArea($sql_filter.get(0), {
lineNumbers: true,
lineWrapping: true,
matchBrackets: true,
indentUnit: 4,
mode: "text/x-sql"
});
},
callback: function(closeEvent) {
if (closeEvent.button.text == "{{ _('OK') }}") {
var sql = this.filter_obj.getValue();
// Make ajax call to include the filter by selection
$.ajax({
url: validateUrl,
method: 'POST',
async: false,
contentType: "application/json",
data: JSON.stringify(sql),
success: function(res) {
if (res.data.status) {
// Initialize the data grid.
self.initialize_data_grid(baseUrl, grid_title, sql);
}
else {
alertify.alert(
'Validation Error',
res.data.result
);
}
},
error: function(e) {
alertify.alert(
'Validation Error',
e
);
}
});
}
}
};
});
}
var content = '';
$.get("{{ url_for('datagrid.index') }}" + "filter",
function(data) {
alertify.filterDialog('Data Filter', data).resizeTo(600, 400);
}
);
},
initialize_data_grid: function(baseUrl, grid_title, sql_filter) {
var self = this;
/* Ajax call to initialize the edit grid, which creates
* an asynchronous connection and create appropriate query
* for the selected node.
*/
$.ajax({
url: baseUrl,
method: 'POST',
dataType: 'json',
contentType: "application/json",
data: JSON.stringify(sql_filter),
success: function(res) {
/* On successfully initialization find the dashboard panel,
* create new panel and add it to the dashboard panel.
*/
var panel_title = ' Query-' + self.title_index;
self.title_index += 1;
var dashboardPanel = pgBrowser.docker.findPanels('dashboard');
dataGridPanel = pgBrowser.docker.addPanel('frm_datagrid', wcDocker.DOCK.STACKED, dashboardPanel[0]);
dataGridPanel.title(panel_title);
dataGridPanel.icon('fa fa-bolt');
dataGridPanel.focus();
// Listen on the panel closed event.
dataGridPanel.on(wcDocker.EVENT.CLOSED, function() {
$.ajax({
url: "{{ url_for('datagrid.index') }}" + "close/" + res.data.gridTransId,
method: 'GET'
});
});
// Open the panel if frame is initialized
baseUrl = "{{ url_for('datagrid.index') }}" + "panel/" + res.data.gridTransId + "/false/" + grid_title;
var openDataGridURL = function(j) {
setTimeout(function() {
var frameInitialized = j.data('frameInitialized');
if (frameInitialized) {
var frame = j.data('embeddedFrame');
if (frame) {
frame.openURL(baseUrl);
}
} else {
openDataGridURL(j);
}
}, 100);
};
openDataGridURL($(dataGridPanel));
},
error: function(e) {
alertify.alert(
'SQL Tool Initialize Error',
e.responseJSON.errormsg
);
}
});
},
// This is a callback function to show query tool when user click on menu item.
show_query_tool: function(data, i) {
var self = this,
d = pgAdmin.Browser.tree.itemData(i);
if (d === undefined) {
alertify.alert(
'Query tool Error',
'No object selected.'
);
return;
}
// Get the parent data from the tree node hierarchy.
var node = pgBrowser.Nodes[d._type],
parentData = node.getTreeNodeHierarchy(i);
// If server, database is undefined then return from the function.
if (parentData.server === undefined || parentData.database === undefined) {
return;
}
var baseUrl = "{{ url_for('datagrid.index') }}" + "initialize/query_tool/" + parentData.server._id +
"/" + parentData.database._id;
var grid_title = parentData.database.label + ' on ' + parentData.server.user.name + '@' +
parentData.server.label ;
var panel_title = ' Query-' + self.title_index;
self.title_index += 1;
$.ajax({
url: baseUrl,
method: 'POST',
dataType: 'json',
contentType: "application/json",
success: function(res) {
/* On successfully initialization find the dashboard panel,
* create new panel and add it to the dashboard panel.
*/
var dashboardPanel = pgBrowser.docker.findPanels('dashboard');
queryToolPanel = pgBrowser.docker.addPanel('frm_datagrid', wcDocker.DOCK.STACKED, dashboardPanel[0]);
queryToolPanel.title(panel_title);
queryToolPanel.icon('fa fa-bolt');
queryToolPanel.focus();
// Listen on the panel closed event.
queryToolPanel.on(wcDocker.EVENT.CLOSED, function() {
$.ajax({
url: "{{ url_for('datagrid.index') }}" + "close/" + res.data.gridTransId,
method: 'GET'
});
});
// Open the panel if frame is initialized
baseUrl = "{{ url_for('datagrid.index') }}" + "panel/" + res.data.gridTransId + "/true/" + grid_title;
var openQueryToolURL = function(j) {
setTimeout(function() {
var frameInitialized = j.data('frameInitialized');
if (frameInitialized) {
var frame = j.data('embeddedFrame');
if (frame) {
frame.openURL(baseUrl);
}
} else {
openQueryToolURL(j);
}
}, 100);
};
openQueryToolURL($(queryToolPanel));
},
error: function(e) {
alertify.alert(
'Query Tool Initialize Error',
e.responseJSON.errormsg
);
}
});
}
};
return pgAdmin.DataGrid;
});

View File

@ -0,0 +1,951 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""A blueprint module implementing the sqleditor frame."""
MODULE_NAME = 'sqleditor'
import json
import pickle
import random
from flask import Response, url_for, render_template, session, request
from flask.ext.babel import gettext
from flask.ext.security import login_required
from pgadmin.utils import PgAdminModule
from pgadmin.utils.ajax import make_json_response, bad_request, success_return, internal_server_error
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.tools.sqleditor.command import QueryToolCommand
# Async Constants
ASYNC_OK = 1
ASYNC_READ_TIMEOUT = 2
ASYNC_WRITE_TIMEOUT = 3
ASYNC_NOT_CONNECTED = 4
ASYNC_EXECUTION_ABORTED = 5
# Transaction status constants
TX_STATUS_IDLE = 0
TX_STATUS__ACTIVE = 1
TX_STATUS_INTRANS = 2
TX_STATUS_INERROR = 3
class SqlEditorModule(PgAdminModule):
"""
class SqlEditorModule(PgAdminModule)
A module class for SQL Grid derived from PgAdminModule.
"""
LABEL = "SQL Editor"
def get_own_menuitems(self):
return {}
def get_own_javascripts(self):
return [{
'name': 'pgadmin.sqleditor',
'path': url_for('sqleditor.index') + "sqleditor",
'when': None
}]
def get_panels(self):
return []
def register_preferences(self):
self.items_per_page = self.preference.register(
'display', 'items_per_page',
gettext("Items per page in grid"), 'integer', 50,
category_label=gettext('Display')
)
blueprint = SqlEditorModule(MODULE_NAME, __name__, static_url_path='/static')
@blueprint.route('/')
@login_required
def index():
return bad_request(errormsg=gettext('User can not call this url directly'))
def update_session_grid_transaction(trans_id, data):
grid_data = session['gridData']
grid_data[str(trans_id)] = data
session['gridData'] = grid_data
def check_transaction_status(trans_id):
"""
This function is used to check the transaction id
is available in the session object and connection
status.
Args:
trans_id:
Returns: status and connection object
"""
grid_data = session['gridData']
# Return from the function if transaction id not found
if str(trans_id) not in grid_data:
return False, gettext('Transaction ID not found in the session.'), None, None, None
# Fetch the object for the specified transaction id.
# Use pickle.loads function to get the command object
session_obj = grid_data[str(trans_id)]
trans_obj = pickle.loads(session_obj['command_obj'])
try:
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid)
conn = manager.connection(did=trans_obj.did, conn_id=trans_obj.conn_id)
except Exception as e:
return False, internal_server_error(errormsg=str(e)), None, None, None
if conn.connected():
return True, None, conn, trans_obj, session_obj
else:
return False, gettext('Not connected to server or connection with the server has been closed.'), \
None, trans_obj, session_obj
@blueprint.route('/view_data/start/<int:trans_id>', methods=["GET"])
@login_required
def start_view_data(trans_id):
"""
This method is used to execute query using asynchronous connection.
Args:
trans_id: unique transaction id
"""
limit = -1
# Check the transaction and connection status
status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id)
if status and conn is not None \
and trans_obj is not None and session_obj is not None:
try:
# Fetch the sql and primary_keys from the object
sql = trans_obj.get_sql()
pk_names, primary_keys = trans_obj.get_primary_keys()
# Fetch the applied filter.
filter_applied = trans_obj.is_filter_applied()
# Fetch the limit for the SQL query
limit = trans_obj.get_limit()
can_edit = trans_obj.can_edit()
can_filter = trans_obj.can_filter()
# Store the primary keys to the session object
session_obj['primary_keys'] = primary_keys
update_session_grid_transaction(trans_id, session_obj)
# Execute sql asynchronously
status, result = conn.execute_async(sql)
except Exception as e:
return internal_server_error(errormsg=str(e))
else:
status = False
result = error_msg
filter_applied = False
can_edit = False
can_filter = False
sql = None
return make_json_response(
data={
'status': status, 'result': result,
'filter_applied': filter_applied,
'limit': limit, 'can_edit': can_edit,
'can_filter': can_filter, 'sql': sql,
'items_per_page': blueprint.items_per_page.get()
}
)
@blueprint.route('/query_tool/start/<int:trans_id>', methods=["PUT", "POST"])
@login_required
def start_query_tool(trans_id):
"""
This method is used to execute query using asynchronous connection.
Args:
trans_id: unique transaction id
"""
if request.data:
sql = json.loads(request.data.decode())
else:
sql = request.args or request.form
grid_data = session['gridData']
# Return from the function if transaction id not found
if str(trans_id) not in grid_data:
return make_json_response(
data={
'status': False, 'result': gettext('Transaction ID not found in the session.'),
'can_edit': False, 'can_filter': False
}
)
# Fetch the object for the specified transaction id.
# Use pickle.loads function to get the command object
session_obj = grid_data[str(trans_id)]
trans_obj = pickle.loads(session_obj['command_obj'])
can_edit = False
can_filter = False
if trans_obj is not None and session_obj is not None:
conn_id = trans_obj.conn_id
# if conn_id is None then we will have to create a new connection
if conn_id is None:
# Create asynchronous connection using random connection id.
conn_id = str(random.randint(1, 9999999))
try:
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid)
conn = manager.connection(did=trans_obj.did, conn_id=conn_id)
except Exception as e:
return internal_server_error(errormsg=str(e))
# Connect to the Server if not connected.
if not conn.connected():
status, msg = conn.connect()
if not status:
return internal_server_error(errormsg=str(msg))
if conn.connected():
# on successful connection set the connection id to the
# transaction object
trans_obj.set_connection_id(conn_id)
# As we changed the transaction object we need to
# restore it and update the session variable.
session_obj['command_obj'] = pickle.dumps(trans_obj, -1)
update_session_grid_transaction(trans_id, session_obj)
# If auto commit is False and transaction status is Idle
# then call is_begin_not_required() function to check BEGIN
# is required or not.
if not trans_obj.auto_commit \
and conn.transaction_status() == TX_STATUS_IDLE \
and is_begin_required(sql):
conn.execute_void("BEGIN;")
# Execute sql asynchronously with params is None
# and formatted_error is True.
status, result = conn.execute_async(sql)
# If the transaction aborted for some reason and
# Auto RollBack is True then issue a rollback to cleanup.
trans_status = conn.transaction_status()
if trans_status == TX_STATUS_INERROR and trans_obj.auto_rollback:
conn.execute_void("ROLLBACK;")
else:
status = False
result = gettext('Not connected to server or connection with the server has been closed.')
can_edit = trans_obj.can_edit()
can_filter = trans_obj.can_filter()
else:
status = False
result = gettext('Either Transaction object or Session object not found.')
return make_json_response(
data={
'status': status, 'result': result,
'can_edit': can_edit, 'can_filter': can_filter,
'items_per_page': blueprint.items_per_page.get()
}
)
@blueprint.route('/poll/<int:trans_id>', methods=["GET"])
@login_required
def poll(trans_id):
"""
This method polls the result of the asynchronous query and returns the result.
Args:
trans_id: unique transaction id
"""
col_info = None
primary_keys = None
rows_affected = 0
# Check the transaction and connection status
status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id)
if status and conn is not None and session_obj is not None:
status, result, col_info = conn.poll()
if status == ASYNC_OK:
status = 'Success'
if 'primary_keys' in session_obj:
primary_keys = session_obj['primary_keys']
# if transaction object is instance of QueryToolCommand
# and transaction aborted for some reason then issue a
# rollback to cleanup
if isinstance(trans_obj, QueryToolCommand):
trans_status = conn.transaction_status()
if trans_status == TX_STATUS_INERROR and trans_obj.auto_rollback:
conn.execute_void("ROLLBACK;")
elif status == ASYNC_EXECUTION_ABORTED:
status = 'Cancel'
else:
status = 'Busy'
else:
status = 'NotConnected'
result = error_msg
# Check column info is available or not
if col_info is not None and len(col_info) > 0:
columns = dict()
rows_affected = conn.rows_affected()
for col in col_info:
col_type = dict()
col_type['type_code'] = col[1]
col_type['type_name'] = None
columns[col[0]] = col_type
# As we changed the transaction object we need to
# restore it and update the session variable.
session_obj['columns_info'] = columns
update_session_grid_transaction(trans_id, session_obj)
else:
if result is None:
result = conn.status_message()
rows_affected = conn.rows_affected()
return make_json_response(data={'status': status, 'result': result,
'colinfo': col_info, 'primary_keys': primary_keys,
'rows_affected': rows_affected})
@blueprint.route('/fetch/types/<int:trans_id>', methods=["GET"])
@login_required
def fetch_pg_types(trans_id):
"""
This method is used to fetch the pg types, which is required
to map the data type comes as a result of the query.
Args:
trans_id: unique transaction id
"""
# Check the transaction and connection status
status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id)
if status and conn is not None \
and trans_obj is not None and session_obj is not None:
# List of oid for which we need type name from pg_type
oid = ''
for col in session_obj['columns_info']:
type_obj = session_obj['columns_info'][col]
oid += str(type_obj['type_code']) + ','
# Remove extra comma
oid = oid[:-1]
status, res = conn.execute_dict(
"""SELECT oid, format_type(oid,null) as typname FROM pg_type WHERE oid IN ({0}) ORDER BY oid;
""".format(oid))
if status:
# iterate through pg_types and update the type name in session object
for record in res['rows']:
for col in session_obj['columns_info']:
type_obj = session_obj['columns_info'][col]
if type_obj['type_code'] == record['oid']:
type_obj['type_name'] = record['typname']
update_session_grid_transaction(trans_id, session_obj)
else:
status = False
res = error_msg
return make_json_response(data={'status': status, 'result': res})
@blueprint.route('/save/<int:trans_id>', methods=["PUT", "POST"])
@login_required
def save(trans_id):
"""
This method is used to save the changes to the server
Args:
trans_id: unique transaction id
"""
if request.data:
changed_data = json.loads(request.data.decode())
else:
changed_data = request.args or request.form
# Check the transaction and connection status
status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id)
if status and conn is not None \
and trans_obj is not None and session_obj is not None:
# If there is no primary key found then return from the function.
if len(session_obj['primary_keys']) <= 0 or len(changed_data) <= 0:
return make_json_response(
data={'status': False,
'result': gettext('No primary key found for this object, so unable to save records.')}
)
status, res, query_res = trans_obj.save(changed_data)
else:
status = False
res = error_msg
query_res = None
return make_json_response(data={'status': status, 'result': res, 'query_result': query_res})
@blueprint.route('/filter/get/<int:trans_id>', methods=["GET"])
@login_required
def get_filter(trans_id):
"""
This method is used to get the existing filter.
Args:
trans_id: unique transaction id
"""
# Check the transaction and connection status
status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id)
if status and conn is not None \
and trans_obj is not None and session_obj is not None:
res = trans_obj.get_filter()
else:
status = False
res = error_msg
return make_json_response(data={'status': status, 'result': res})
@blueprint.route('/filter/apply/<int:trans_id>', methods=["PUT", "POST"])
@login_required
def apply_filter(trans_id):
"""
This method is used to apply the filter.
Args:
trans_id: unique transaction id
"""
if request.data:
filter_sql = json.loads(request.data.decode())
else:
filter_sql = request.args or request.form
# Check the transaction and connection status
status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id)
if status and conn is not None \
and trans_obj is not None and session_obj is not None:
status, res = trans_obj.set_filter(filter_sql)
# As we changed the transaction object we need to
# restore it and update the session variable.
session_obj['command_obj'] = pickle.dumps(trans_obj, -1)
update_session_grid_transaction(trans_id, session_obj)
else:
status = False
res = error_msg
return make_json_response(data={'status': status, 'result': res})
@blueprint.route('/filter/inclusive/<int:trans_id>', methods=["PUT", "POST"])
@login_required
def append_filter_inclusive(trans_id):
"""
This method is used to append and apply the filter.
Args:
trans_id: unique transaction id
"""
if request.data:
filter_data = json.loads(request.data.decode())
else:
filter_data = request.args or request.form
# Check the transaction and connection status
status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id)
if status and conn is not None \
and trans_obj is not None and session_obj is not None:
res = None
filter_sql = ''
driver = get_driver(PG_DEFAULT_DRIVER)
for column_name in filter_data:
column_value = filter_data[column_name]
if column_value is None:
filter_sql = driver.qtIdent(conn, column_name) + ' IS NULL '
else:
filter_sql = driver.qtIdent(conn, column_name) + ' = ' + driver.qtLiteral(column_value)
trans_obj.append_filter(filter_sql)
# As we changed the transaction object we need to
# restore it and update the session variable.
session_obj['command_obj'] = pickle.dumps(trans_obj, -1)
update_session_grid_transaction(trans_id, session_obj)
else:
status = False
res = error_msg
return make_json_response(data={'status': status, 'result': res})
@blueprint.route('/filter/exclusive/<int:trans_id>', methods=["PUT", "POST"])
@login_required
def append_filter_exclusive(trans_id):
"""
This method is used to append and apply the filter.
Args:
trans_id: unique transaction id
"""
if request.data:
filter_data = json.loads(request.data.decode())
else:
filter_data = request.args or request.form
# Check the transaction and connection status
status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id)
if status and conn is not None \
and trans_obj is not None and session_obj is not None:
res = None
filter_sql = ''
driver = get_driver(PG_DEFAULT_DRIVER)
for column_name in filter_data:
column_value = filter_data[column_name]
if column_value is None:
filter_sql = driver.qtIdent(conn, column_name) + ' IS NOT NULL '
else:
filter_sql = driver.qtIdent(conn, column_name) + ' IS DISTINCT FROM ' + driver.qtLiteral(column_value)
# Call the append_filter method of transaction object
trans_obj.append_filter(filter_sql)
# As we changed the transaction object we need to
# restore it and update the session variable.
session_obj['command_obj'] = pickle.dumps(trans_obj, -1)
update_session_grid_transaction(trans_id, session_obj)
else:
status = False
res = error_msg
return make_json_response(data={'status': status, 'result': res})
@blueprint.route('/filter/remove/<int:trans_id>', methods=["PUT", "POST"])
@login_required
def remove_filter(trans_id):
"""
This method is used to remove the filter.
Args:
trans_id: unique transaction id
"""
# Check the transaction and connection status
status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id)
if status and conn is not None \
and trans_obj is not None and session_obj is not None:
res = None
# Call the remove_filter method of transaction object
trans_obj.remove_filter()
# As we changed the transaction object we need to
# restore it and update the session variable.
session_obj['command_obj'] = pickle.dumps(trans_obj, -1)
update_session_grid_transaction(trans_id, session_obj)
else:
status = False
res = error_msg
return make_json_response(data={'status': status, 'result': res})
@blueprint.route('/limit/<int:trans_id>', methods=["PUT", "POST"])
@login_required
def set_limit(trans_id):
"""
This method is used to set the limit for the SQL.
Args:
trans_id: unique transaction id
"""
if request.data:
limit = json.loads(request.data.decode())
else:
limit = request.args or request.form
# Check the transaction and connection status
status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id)
if status and conn is not None \
and trans_obj is not None and session_obj is not None:
res = None
# Call the set_limit method of transaction object
trans_obj.set_limit(limit)
# As we changed the transaction object we need to
# restore it and update the session variable.
session_obj['command_obj'] = pickle.dumps(trans_obj, -1)
update_session_grid_transaction(trans_id, session_obj)
else:
status = False
res = error_msg
return make_json_response(data={'status': status, 'result': res})
@blueprint.route('/cancel/<int:trans_id>', methods=["PUT", "POST"])
@login_required
def cancel_transaction(trans_id):
"""
This method is used to cancel the running transaction
Args:
trans_id: unique transaction id
"""
grid_data = session['gridData']
# Return from the function if transaction id not found
if str(trans_id) not in grid_data:
return make_json_response(
data={
'status': False, 'result': gettext('Transaction ID not found in the session.')
}
)
# Fetch the object for the specified transaction id.
# Use pickle.loads function to get the command object
session_obj = grid_data[str(trans_id)]
trans_obj = pickle.loads(session_obj['command_obj'])
if trans_obj is not None and session_obj is not None:
# Fetch the main connection object for the database.
try:
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid)
conn = manager.connection(did=trans_obj.did)
except Exception as e:
return internal_server_error(errormsg=str(e))
delete_connection = False
# Connect to the Server if not connected.
if not conn.connected():
status, msg = conn.connect()
if not status:
return internal_server_error(errormsg=str(msg))
delete_connection = True
if conn.connected():
# on successful connection cancel the running transaction
status, result = conn.cancel_transaction(trans_obj.conn_id, trans_obj.did)
# Delete connection if we have created it to
# cancel the transaction
if delete_connection:
manager.release(did=trans_obj.did)
else:
status = False
result = gettext('Not connected to server or connection with the server has been closed.')
else:
status = False
result = gettext('Either Transaction object or Session object not found.')
return make_json_response(
data={
'status': status, 'result': result
}
)
@blueprint.route('/object/get/<int:trans_id>', methods=["GET"])
@login_required
def get_object_name(trans_id):
"""
This method is used to get the object name
Args:
trans_id: unique transaction id
"""
# Check the transaction and connection status
status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id)
if status and conn is not None \
and trans_obj is not None and session_obj is not None:
res = trans_obj.object_name
else:
status = False
res = error_msg
return make_json_response(data={'status': status, 'result': res})
@blueprint.route('/auto_commit/<int:trans_id>', methods=["PUT", "POST"])
@login_required
def set_auto_commit(trans_id):
"""
This method is used to set the value for auto commit .
Args:
trans_id: unique transaction id
"""
if request.data:
auto_commit = json.loads(request.data.decode())
else:
auto_commit = request.args or request.form
# Check the transaction and connection status
status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id)
if status and conn is not None \
and trans_obj is not None and session_obj is not None:
res = None
# Call the set_auto_commit method of transaction object
trans_obj.set_auto_commit(auto_commit)
# As we changed the transaction object we need to
# restore it and update the session variable.
session_obj['command_obj'] = pickle.dumps(trans_obj, -1)
update_session_grid_transaction(trans_id, session_obj)
else:
status = False
res = error_msg
return make_json_response(data={'status': status, 'result': res})
@blueprint.route('/auto_rollback/<int:trans_id>', methods=["PUT", "POST"])
@login_required
def set_auto_rollback(trans_id):
"""
This method is used to set the value for auto commit .
Args:
trans_id: unique transaction id
"""
if request.data:
auto_rollback = json.loads(request.data.decode())
else:
auto_rollback = request.args or request.form
# Check the transaction and connection status
status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id)
if status and conn is not None \
and trans_obj is not None and session_obj is not None:
res = None
# Call the set_auto_rollback method of transaction object
trans_obj.set_auto_rollback(auto_rollback)
# As we changed the transaction object we need to
# restore it and update the session variable.
session_obj['command_obj'] = pickle.dumps(trans_obj, -1)
update_session_grid_transaction(trans_id, session_obj)
else:
status = False
res = error_msg
return make_json_response(data={'status': status, 'result': res})
@blueprint.route("/sqleditor.js")
@login_required
def script():
"""render the required javascript"""
return Response(response=render_template("sqleditor/js/sqleditor.js", _=gettext),
status=200,
mimetype="application/javascript")
def is_begin_required(query):
word_len = 0
query = query.strip()
query_len = len(query)
# Check word length (since "beginx" is not "begin").
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
# Transaction control commands. These should include every keyword that
# gives rise to a TransactionStmt in the backend grammar, except for the
# savepoint-related commands.
#
# (We assume that START must be START TRANSACTION, since there is
# presently no other "START foo" command.)
keyword = query[0:word_len]
if word_len == 5 and keyword.lower() == "abort":
return False
if word_len == 5 and keyword.lower() == "begin":
return False
if word_len == 5 and keyword.lower() == "start":
return False
if word_len == 6 and keyword.lower() == "commit":
return False
if word_len == 3 and keyword.lower() == "end":
return False
if word_len == 8 and keyword.lower() == "rollback":
return False
if word_len == 7 and keyword.lower() == "prepare":
# PREPARE TRANSACTION is a TC command, PREPARE foo is not
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 11 and keyword.lower() == "transaction":
return False
return True
# Commands not allowed within transactions. The statements checked for
# here should be exactly those that call PreventTransactionChain() in the
# backend.
if word_len == 6 and keyword.lower() == "vacuum":
return False
if word_len == 7 and keyword.lower() == "cluster":
# CLUSTER with any arguments is allowed in transactions
query = query[word_len:query_len]
query = query.strip()
if query[0].isalpha():
return True # has additional words
return False # it's CLUSTER without arguments
if word_len == 6 and keyword.lower() == "create":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 8 and keyword.lower() == "database":
return False
if word_len == 10 and keyword.lower() == "tablespace":
return False
# CREATE [UNIQUE] INDEX CONCURRENTLY isn't allowed in xacts
if word_len == 7 and keyword.lower() == "cluster":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 5 and keyword.lower() == "index":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 12 and keyword.lower() == "concurrently":
return False
return True
if word_len == 5 and keyword.lower() == "alter":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
# ALTER SYSTEM isn't allowed in xacts
if word_len == 6 and keyword.lower() == "system":
return False
return True
# Note: these tests will match DROP SYSTEM and REINDEX TABLESPACE, which
# aren't really valid commands so we don't care much. The other four
# possible matches are correct.
if word_len == 4 and keyword.lower() == "drop" \
or word_len == 7 and keyword.lower() == "reindex":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 8 and keyword.lower() == "database":
return False
if word_len == 6 and keyword.lower() == "system":
return False
if word_len == 10 and keyword.lower() == "tablespace":
return False
return True
# DISCARD ALL isn't allowed in xacts, but other variants are allowed.
if word_len == 7 and keyword.lower() == "discard":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 3 and keyword.lower() == "all":
return False
return True
return True

View File

@ -0,0 +1,638 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
""" Implemented classes for the different object type used by data grid """
from abc import ABCMeta, abstractmethod
from flask import render_template
from flask.ext.babel import gettext
from pgadmin.utils.driver import get_driver
from pgadmin.utils.ajax import forbidden
from config import PG_DEFAULT_DRIVER
import six
VIEW_FIRST_100_ROWS = 1
VIEW_LAST_100_ROWS = 2
VIEW_ALL_ROWS = 3
VIEW_FILTERED_ROWS = 4
class ObjectRegistry(ABCMeta):
"""
class ObjectRegistry(ABCMeta)
Every object will be registered automatically by its object type.
Class-level Methods:
----------- -------
* get_object(cls, name, **kwargs)
- This method returns the object based on register object type
else return not implemented error.
"""
registry = dict()
def __init__(cls, name, bases, d):
"""
This method is used to register the objects based on object type.
"""
if d and 'object_type' in d:
ObjectRegistry.registry[d['object_type']] = cls
ABCMeta.__init__(cls, name, bases, d)
@classmethod
def get_object(cls, name, **kwargs):
"""
This method returns the object based on register object type
else return not implemented error
Args:
name: object type for which object to be returned.
**kwargs: N number of parameters
"""
if name in ObjectRegistry.registry:
return (ObjectRegistry.registry[name])(**kwargs)
raise NotImplementedError(
gettext("This feature has not been implemented for object type '{0}'!").format(name)
)
@six.add_metaclass(ObjectRegistry)
class BaseCommand(object):
"""
class BaseCommand
It is a base class for SQL Tools like data grid and query tool.
A different sql tools must implement this to expose abstract methods.
Abstract Methods:
-------- -------
* get_sql()
- This method returns the proper SQL query for the object type.
* can_edit()
- This method returns True/False, specifying whether data is
editable or not.
* can_filter()
- This method returns True/False, specifying whether filter
will be applied on data or not.
"""
def __init__(self, **kwargs):
"""
This method is used to initialize the class and
create a proper object name which will be used
to fetch the data using namespace name and object name.
Args:
**kwargs : N number of parameters
"""
# Save the server id and database id, namespace id, object id
self.sid = kwargs['sid'] if 'sid' in kwargs else None
self.did = kwargs['did'] if 'did' in kwargs else None
@abstractmethod
def get_sql(self):
pass
@abstractmethod
def can_edit(self):
pass
@abstractmethod
def can_filter(self):
pass
class SQLFilter(object):
"""
class SQLFilter
Implementation of filter class for sql grid.
Class-level Methods:
----------- -------
* get_filter()
- This method returns the filter applied.
* set_filter(row_filter)
- This method sets the filter to be applied.
* append_filter(row_filter)
- This method is used to append the filter within existing filter
* remove_filter()
- This method removes the filter applied.
* validate_filter(row_filter)
- This method validates the given filter.
"""
def __init__(self, **kwargs):
"""
This method is used to initialize the class and
create a proper object name which will be used
to fetch the data using namespace name and object name.
Args:
**kwargs : N number of parameters
"""
# Save the server id and database id, namespace id, object id
assert ('sid' in kwargs)
assert ('did' in kwargs)
assert ('obj_id' in kwargs)
self.sid = kwargs['sid']
self.did = kwargs['did']
self.obj_id = kwargs['obj_id']
self.__row_filter = kwargs['sql_filter'] if 'sql_filter' in kwargs else None
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(self.sid)
conn = manager.connection(did=self.did)
ver = manager.version
# we will set template path for sql scripts
if ver >= 90100:
self.sql_path = 'sqleditor/sql/9.1_plus'
if conn.connected():
# Fetch the Namespace Name and object Name
query = render_template("/".join([self.sql_path, 'objectname.sql']), obj_id=self.obj_id)
status, result = conn.execute_dict(query)
if not status:
raise Exception(result)
self.nsp_name = result['rows'][0]['nspname']
self.object_name = result['rows'][0]['relname']
else:
raise Exception(gettext('Not connected to server or connection with the server has been closed.'))
def get_filter(self):
"""
This function returns the filter.
"""
return self.__row_filter
def set_filter(self, row_filter):
"""
This function validates the filter and set the
given filter to member variable.
Args:
row_filter: sql query
"""
status, msg = self.validate_filter(row_filter)
if status:
self.__row_filter = row_filter
return status, msg
def is_filter_applied(self):
"""
This function returns True if filter is applied else False.
"""
if self.__row_filter is None or self.__row_filter == '':
return False
return True
def remove_filter(self):
"""
This function remove the filter by setting value to None.
"""
self.__row_filter = None
def append_filter(self, row_filter):
"""
This function will used to get the existing filter and append
the given filter.
Args:
row_filter: sql query to append
"""
existing_filter = self.get_filter()
if existing_filter is None or existing_filter == '':
self.__row_filter = row_filter
else:
self.__row_filter = existing_filter + ' \n AND ' + row_filter
def validate_filter(self, row_filter):
"""
This function validates the given filter.
Args:
row_filter: sql syntax to validate
"""
status = True
result = None
if row_filter is None or row_filter == '':
return False, gettext('Filter string is empty!')
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(self.sid)
conn = manager.connection(did=self.did)
if conn.connected():
sql = render_template("/".join([self.sql_path, 'validate.sql']),
nsp_name=self.nsp_name, object_name=self.object_name, row_filter=row_filter)
status, result = conn.execute_scalar(sql)
if not status:
result = result.partition("\n")[0]
return status, result
class GridCommand(BaseCommand, SQLFilter):
"""
class GridCommand(object)
It is a base class for different object type used by data grid.
A different object type must implement this to expose abstract methods.
Class-level Methods:
----------- -------
* get_primary_keys()
- Derived class can implement there own logic to get the primary keys.
* save()
- Derived class can implement there own logic to save the data into the database.
* set_limit(limit)
- This method sets the limit for SQL query
* get_limit()
- This method returns the limit.
"""
def __init__(self, **kwargs):
"""
This method is used to call base class init to initialize
the data.
Args:
**kwargs : N number of parameters
"""
BaseCommand.__init__(self, **kwargs)
SQLFilter.__init__(self, **kwargs)
# Save the connection id, command type
self.conn_id = kwargs['conn_id'] if 'conn_id' in kwargs else None
self.cmd_type = kwargs['cmd_type'] if 'cmd_type' in kwargs else None
self.limit = -1
if self.cmd_type == VIEW_FIRST_100_ROWS or self.cmd_type == VIEW_LAST_100_ROWS:
self.limit = 100
def get_primary_keys(self):
return None, None
def save(self, changed_data):
return forbidden(errmsg=gettext("Not allowed to save the data for the selected object!"))
def get_limit(self):
"""
This function returns the limit for the SQL query.
"""
return self.limit
def set_limit(self, limit):
"""
This function sets the limit for the SQL query
Args:
limit: limit to be set for SQL.
"""
self.limit = limit
class TableCommand(GridCommand):
"""
class TableCommand(GridCommand)
It is a derived class for Table type.
"""
object_type = 'table'
def __init__(self, **kwargs):
"""
This method calls the __init__ method of the base class
to get the proper object name.
Args:
**kwargs : N number of parameters
"""
# call base class init to fetch the table name
super(TableCommand, self).__init__(**kwargs)
def get_sql(self):
"""
This method is used to create a proper SQL query
to fetch the data for the specified table
"""
# Fetch the primary keys for the table
pk_names, primary_keys = self.get_primary_keys()
sql_filter = self.get_filter()
if sql_filter is None:
sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name,
nsp_name=self.nsp_name, pk_names=pk_names, cmd_type=self.cmd_type,
limit=self.limit)
else:
sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name,
nsp_name=self.nsp_name, pk_names=pk_names, cmd_type=self.cmd_type,
sql_filter=sql_filter, limit=self.limit)
return sql
def get_primary_keys(self):
"""
This function is used to fetch the primary key columns.
"""
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(self.sid)
conn = manager.connection(did=self.did, conn_id=self.conn_id)
pk_names = ''
primary_keys = dict()
if conn.connected():
# Fetch the primary key column names
query = render_template("/".join([self.sql_path, 'primary_keys.sql']), obj_id=self.obj_id)
status, result = conn.execute_dict(query)
if not status:
raise Exception(result)
for row in result['rows']:
pk_names += row['attname'] + ','
primary_keys[row['attname']] = row['typname']
if pk_names != '':
# Remove last character from the string
pk_names = pk_names[:-1]
else:
raise Exception(gettext('Not connected to server or connection with the server has been closed.'))
return pk_names, primary_keys
def can_edit(self):
return True
def can_filter(self):
return True
def save(self, changed_data):
"""
This function is used to save the data into the database.
Depending on condition it will either update or insert the
new row into the database.
Args:
changed_data: Contains data to be saved
"""
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(self.sid)
conn = manager.connection(did=self.did, conn_id=self.conn_id)
status = False
res = None
query_res = dict()
count = 0
if conn.connected():
# Start the transaction
conn.execute_void('BEGIN;')
# Iterate total number of records to be updated/inserted
for row in changed_data:
# if no data to be saved then continue
if 'data' not in row:
continue
# if 'keys' is present in row then it is and update query
# else it is an insert query.
if 'keys' in row:
# if 'marked_for_deletion' is present in row and it is true then delete
if 'marked_for_deletion' in row and row['marked_for_deletion']:
sql = render_template("/".join([self.sql_path, 'delete.sql']),
primary_keys=row['keys'], object_name=self.object_name,
nsp_name=self.nsp_name)
else:
sql = render_template("/".join([self.sql_path, 'update.sql']), object_name=self.object_name,
data_to_be_saved=row['data'], primary_keys=row['keys'],
nsp_name=self.nsp_name)
else:
sql = render_template("/".join([self.sql_path, 'create.sql']), object_name=self.object_name,
data_to_be_saved=row['data'], nsp_name=self.nsp_name)
status, res = conn.execute_void(sql)
rows_affected = conn.rows_affected()
# store the result of each query in dictionary
query_res[count] = {'status': status, 'result': res,
'sql': sql, 'rows_affected': rows_affected}
count += 1
if not status:
conn.execute_void('ROLLBACK;')
# If we roll backed every thing then update the message for
# each sql query.
for val in query_res:
if query_res[val]['status']:
query_res[val]['result'] = 'Transaction ROLLBACK'
return status, res, query_res
# Commit the transaction if there is no error found
conn.execute_void('COMMIT;')
return status, res, query_res
class ViewCommand(GridCommand):
"""
class ViewCommand(GridCommand)
It is a derived class for View type.
"""
object_type = 'view'
def __init__(self, **kwargs):
"""
This method calls the __init__ method of the base class
to get the proper object name.
Args:
**kwargs : N number of parameters
"""
# call base class init to fetch the table name
super(ViewCommand, self).__init__(**kwargs)
def get_sql(self):
"""
This method is used to create a proper SQL query
to fetch the data for the specified view
"""
sql_filter = self.get_filter()
if sql_filter is None:
sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name,
nsp_name=self.nsp_name, cmd_type=self.cmd_type,
limit=self.limit)
else:
sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name,
nsp_name=self.nsp_name, cmd_type=self.cmd_type,
sql_filter=sql_filter, limit=self.limit)
return sql
def can_edit(self):
return False
def can_filter(self):
return True
class ForeignTableCommand(GridCommand):
"""
class ForeignTableCommand(GridCommand)
It is a derived class for ForeignTable type.
"""
object_type = 'foreign-table'
def __init__(self, **kwargs):
"""
This method calls the __init__ method of the base class
to get the proper object name.
Args:
**kwargs : N number of parameters
"""
# call base class init to fetch the table name
super(ForeignTableCommand, self).__init__(**kwargs)
def get_sql(self):
"""
This method is used to create a proper SQL query
to fetch the data for the specified foreign table
"""
sql_filter = self.get_filter()
if sql_filter is None:
sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name,
nsp_name=self.nsp_name, cmd_type=self.cmd_type,
limit=self.limit)
else:
sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name,
nsp_name=self.nsp_name, cmd_type=self.cmd_type,
sql_filter=sql_filter, limit=self.limit)
return sql
def can_edit(self):
return False
def can_filter(self):
return True
class CatalogCommand(GridCommand):
"""
class CatalogCommand(GridCommand)
It is a derived class for CatalogObject type.
"""
object_type = 'catalog_object'
def __init__(self, **kwargs):
"""
This method calls the __init__ method of the base class
to get the proper object name.
Args:
**kwargs : N number of parameters
"""
# call base class init to fetch the table name
super(CatalogCommand, self).__init__(**kwargs)
def get_sql(self):
"""
This method is used to create a proper SQL query
to fetch the data for the specified catalog object
"""
sql_filter = self.get_filter()
if sql_filter is None:
sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name,
nsp_name=self.nsp_name, cmd_type=self.cmd_type,
limit=self.limit)
else:
sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name,
nsp_name=self.nsp_name, cmd_type=self.cmd_type,
sql_filter=sql_filter, limit=self.limit)
return sql
def can_edit(self):
return False
def can_filter(self):
return True
class QueryToolCommand(BaseCommand):
"""
class QueryToolCommand(BaseCommand)
It is a derived class for Query Tool.
"""
object_type = 'query_tool'
def __init__(self, **kwargs):
# call base class init to fetch the table name
super(QueryToolCommand, self).__init__(**kwargs)
self.conn_id = None
self.auto_rollback = False
self.auto_commit = True
def get_sql(self):
return None
def can_edit(self):
return False
def can_filter(self):
return False
def set_connection_id(self, conn_id):
self.conn_id = conn_id
def set_auto_rollback(self, auto_rollback):
self.auto_rollback = auto_rollback
def set_auto_commit(self, auto_commit):
self.auto_commit = auto_commit

View File

@ -0,0 +1,248 @@
#main-editor_panel {
height: 100%;
width: 100%;
}
.sql-editor {
position: absolute;
left: 0;
right: 0;
top : 0;
bottom: 0;
}
.sql-editor-busy-fetching {
position:absolute;
left: 0;
top: 41px;
bottom: 0;
right: 0;
margin:0;
padding: 0;
background: black;
opacity: 0.4;
z-index: 100;
}
.sql-editor-busy-icon {
position:absolute;
left: 45%;
top: 40%;
}
.sql-editor-busy-text {
position:absolute;
left: 42%;
top: 50%;
font-size: 20px;
}
#editor-panel {
position: absolute;
left: 0;
right: 0;
top : 65px;
bottom: 0;
z-index: 0;
}
.editor-title {
background-color: #2C76B4;
padding: 2px;
color: white;
font-size: 13px;
}
.sql-editor-btn-group {
background-color: #D2D2D2;
border: 2px solid #A9A9A9;
left: 0px;
right: 0px;
padding: 2px;
}
.sql-editor-btn-group button {
padding: 5px;
}
.sql-editor-btn-group button.dropdown-toggle {
padding-left: 5px !important;
padding-right: 5px !important;
}
.sql-editor-btn-group .dropdown-menu {
min-width: initial;
}
.sql-editor-btn-group .backgrid-filter.form-search {
position: relative;
width: 248px;
height: 30px;
float: right;
margin-top: 2px;
margin-right: 10px;
}
#output-panel .wcDocker {
top: 0px;
bottom: 0px;
height: auto;
}
#output-panel .wcFrameCenter {
overflow: hidden;
}
.sql-editor-grid-container {
height: calc(100% - 45px);
overflow: auto;
}
#datagrid-paginator {
bottom: 0px;
width: 100%;
background-color: white;
}
/*Move he original checkbox out of the way */
#datagrid .select-row-cell .sqleditor-checkbox {
position: absolute;
left: -9999px;
}
/*Align the icon and the label.deletable text to same height using tabl-cell display*/
/*If you change the font-size of the text, you may also want to do som padding or alignhment changes here*/
#datagrid .sqleditor-checkbox ~ label.deletable > span {
display: table-cell;
vertical-align: middle;
padding-left: 5px;
}
/*The label.deletable will contain the icon and the text, will grab the focus*/
#datagrid .select-row-cell .sqleditor-checkbox + label.deletable {
cursor: pointer;
display: table;
}
/*The icon container, set it to fixed size and font size, the padding is to align the border*/
/*If you change the font-size of this icon, be sure to adjust the min-width as well*/
#datagrid .select-row-cell .sqleditor-checkbox + label.deletable:before {
font-family: 'FontAwesome';
font-size: small;
font-weight: normal;
display: inline-block;
min-width: 28px;
}
/* toggle font awesome icon*/
#datagrid .select-row-cell .sqleditor-checkbox:checked + label:before {
content: "\f014";
}
#datagrid .select-row-cell .sqleditor-checkbox:not(:checked) + label:before {
content: "\f014";
}
/*Do something on focus, in this case show dashed border*/
#datagrid .select-row-cell .sqleditor-checkbox:focus + label:before {
border: 1px dashed #777;
}
/*Do something on hover, in this case change the image color*/
#datagrid .select-row-cell .sqleditor-checkbox:hover + label:before {
color: #67afe5;
}
.pgadmin-row-deleted td {
color: red !important;
text-decoration: line-through;
}
.filter-container {
position: relative;
background-color: white;
border: 1px solid black;
box-shadow: 0.5px 0.5px 5px #000;
padding-bottom: 30px;
top: 10px;
z-index: 1;
margin: auto;
width: 60%;
}
.filter-container .CodeMirror-scroll {
min-height: 120px;
max-height: 120px;
}
.filter-container .sql-textarea{
box-shadow: 0.1px 0.1px 3px #000;
margin-bottom: 5px;
}
.filter-title {
background-color: #2C76B4;
padding: 2px;
color: white;
font-size: 13px;
}
#filter .btn-group {
margin-right: 2px;
float: right;
}
#filter .btn-group > button {
padding: 3px;
}
#filter .btn-group .btn-primary {
margin: auto !important;
}
.has-select-all table thead tr th:nth-child(1),
.has-select-all table tbody tr td:nth-child(1) {
width: 35px !important;
max-width: 35px !important;
min-width: 35px !important;
}
.sql-editor-message {
white-space:pre-wrap;
font-family: monospace;
padding-top: 5px;
padding-left: 10px;
}
.limit-enabled {
background-color: white;
}
.sql-editor-history-container {
height: 100%;
overflow: auto;
}
.sql-status-cell {
max-width: 30px;
}
.btn-circle {
width: 20px;
height: 20px;
text-align: center;
padding: 0;
font-size: 10px;
line-height: 1.428571429;
border-radius: 10px;
}
.visibility-hidden {
visibility: hidden;
}
.sql-editor-mark {
border-bottom: 2px dotted red;
}
#editor-panel .CodeMirror-activeline-background {
background: #5B9CEF;
color: white;
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,8 @@
{# Insert the new row with primary keys (specified in primary_keys) #}
INSERT INTO {{ conn|qtIdent(nsp_name, object_name) }} (
{% for col in data_to_be_saved %}
{% if not loop.first %}, {% endif %}{{ conn|qtIdent(col) }}{% endfor %}
) VALUES (
{% for col in data_to_be_saved %}
{% if not loop.first %}, {% endif %}{{ data_to_be_saved[col]|qtLiteral }}{% endfor %}
);

View File

@ -0,0 +1,4 @@
{# Delete the row with primary keys (specified in primary_keys) #}
DELETE FROM {{ conn|qtIdent(nsp_name, object_name) }} WHERE
{% for pk in primary_keys %}
{% if not loop.first %} AND {% endif %}{{ conn|qtIdent(pk) }} = {{ primary_keys[pk]|qtLiteral }}{% endfor %};

View File

@ -0,0 +1,7 @@
{# ============= Fetch the schema and object name for given object id ============= #}
{% if obj_id %}
SELECT n.nspname, r.relname
FROM pg_class r
LEFT JOIN pg_namespace n ON (r.relnamespace = n.oid)
WHERE r.oid = {{obj_id}};
{% endif %}

View File

@ -0,0 +1,12 @@
{# SQL query for objects #}
SELECT * FROM {{ conn|qtIdent(nsp_name, object_name) }}
{% if sql_filter %}
WHERE {{ sql_filter }}
{% endif %}
{% if pk_names %}
ORDER BY {{ pk_names }}
{% if cmd_type == 1 or cmd_type == 3 %}ASC {% elif cmd_type == 2 %}DESC {% endif %}
{% endif %}
{% if limit > 0 %}
LIMIT {{ limit }}
{% endif %}

View File

@ -0,0 +1,8 @@
{# ============= Fetch the primary keys for given object id ============= #}
{% if obj_id %}
SELECT at.attname, ty.typname
FROM pg_attribute at LEFT JOIN pg_type ty ON (ty.oid = at.atttypid)
WHERE attrelid={{obj_id}}::oid AND attnum = ANY (
(SELECT con.conkey FROM pg_class rel LEFT OUTER JOIN pg_constraint con ON con.conrelid=rel.oid
AND con.contype='p' WHERE rel.relkind IN ('r','s','t') AND rel.oid = {{obj_id}}::oid)::integer[])
{% endif %}

View File

@ -0,0 +1,7 @@
{# Update the row with primary keys (specified in primary_keys) #}
UPDATE {{ conn|qtIdent(nsp_name, object_name) }} SET
{% for col in data_to_be_saved %}
{% if not loop.first %}, {% endif %}{{ conn|qtIdent(col) }} = {{ data_to_be_saved[col]|qtLiteral }}{% endfor %}
WHERE
{% for pk in primary_keys %}
{% if not loop.first %} AND {% endif %}{{ conn|qtIdent(pk) }} = {{ primary_keys[pk]|qtLiteral }}{% endfor %};

View File

@ -0,0 +1,4 @@
{# Validation query #}
{% if row_filter %}
EXPLAIN SELECT * FROM {{ conn|qtIdent(nsp_name, object_name) }} WHERE {{ row_filter }}
{% endif %}