mirror of
https://github.com/pgadmin-org/pgadmin4.git
synced 2025-02-25 18:55:31 -06:00
Sorting the data during tree data loading using the natural sort
algorithm. This patch takes care of: * Consistent behaviour during create, update operation on any node. - It should return the node data during creating a new object, or updating the existing one. * Now that - we have consistent behaviour during these operations, we can consistently move, update the tree item based the node data from the server. * Implemented the methods for each node to get the information about that particular node only. * Using the above changes to implement the 'refresh' operation on tree node properly. I must thank Surinder, and Harshal for helping me implement the 'node' method, and also changed the behaviour of create & update methods to return the node data for most of the nodes. Fixes #1244
This commit is contained in:
parent
2c6f6609bc
commit
b6f307256b
@ -30,3 +30,4 @@ backgrid-select-all 1a00053 MIT https://github.com/wyuenho/backgrid
|
||||
dropzone 4e20bd4 MIT https://github.com/enyo/dropzone
|
||||
Filemanager 7e060c2 MIT https://github.com/simogeo/Filemanager
|
||||
Unit (Length) d8e6237 MIT https://github.com/heygrady/Units/blob/master/Length.min.js
|
||||
Natural Sort 9565816 MIT https://github.com/javve/natural-sort/blob/master/index.js
|
||||
|
@ -86,7 +86,7 @@ class CollectionNodeModule(PgAdminModule, PGChildModule):
|
||||
"_id": parent_id,
|
||||
"_pid": parent_id,
|
||||
"module": 'pgadmin.node.%s' % self.node_type,
|
||||
"subnodes": sorted([m.node_type for m in self.submodules])
|
||||
"nodes": [self.node_type]
|
||||
}
|
||||
|
||||
for key in kwargs:
|
||||
|
@ -17,7 +17,7 @@ from flask_babel import gettext
|
||||
from flask_security import current_user
|
||||
from pgadmin.browser import BrowserPluginModule
|
||||
from pgadmin.browser.utils import NodeView
|
||||
from pgadmin.utils.ajax import make_json_response, \
|
||||
from pgadmin.utils.ajax import make_json_response, gone, \
|
||||
make_response as ajax_response
|
||||
from pgadmin.utils.menu import MenuItem
|
||||
|
||||
@ -131,7 +131,7 @@ class ServerGroupView(NodeView):
|
||||
|
||||
if sg is None:
|
||||
return make_json_response(
|
||||
status=417,
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'The specified server group could not be found.'
|
||||
@ -178,7 +178,17 @@ class ServerGroupView(NodeView):
|
||||
status=410, success=0, errormsg=e.message
|
||||
)
|
||||
|
||||
return make_json_response(result=request.form)
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
gid,
|
||||
None,
|
||||
servergroup.name,
|
||||
"icon-%s" % self.node_type,
|
||||
True,
|
||||
self.node_type,
|
||||
can_delete=True # This is user created hence can deleted
|
||||
)
|
||||
)
|
||||
|
||||
def properties(self, gid):
|
||||
"""Update the server-group properties"""
|
||||
@ -190,7 +200,7 @@ class ServerGroupView(NodeView):
|
||||
|
||||
if sg is None:
|
||||
return make_json_response(
|
||||
status=417,
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'The specified server group could not be found.'
|
||||
@ -232,7 +242,7 @@ class ServerGroupView(NodeView):
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
"%d" % (sg.id), None,
|
||||
"%d" % (sg.id),None,
|
||||
sg.name,
|
||||
"icon-%s" % self.node_type,
|
||||
True,
|
||||
@ -283,19 +293,29 @@ class ServerGroupView(NodeView):
|
||||
|
||||
if gid is None:
|
||||
groups = ServerGroup.query.filter_by(user_id=current_user.id)
|
||||
else:
|
||||
groups = ServerGroup.query.filter_by(user_id=current_user.id,
|
||||
id=gid).first()
|
||||
|
||||
for group in groups:
|
||||
nodes.append(
|
||||
self.blueprint.generate_browser_node(
|
||||
"%d" % (group.id), None,
|
||||
group.name,
|
||||
"icon-%s" % self.node_type,
|
||||
True,
|
||||
self.node_type
|
||||
for group in groups:
|
||||
nodes.append(
|
||||
self.blueprint.generate_browser_node(
|
||||
"%d" % (group.id), None,
|
||||
group.name,
|
||||
"icon-%s" % self.node_type,
|
||||
True,
|
||||
self.node_type
|
||||
)
|
||||
)
|
||||
else:
|
||||
group = ServerGroup.query.filter_by(user_id=current_user.id,
|
||||
id=gid).first()
|
||||
if not group:
|
||||
return gone(errormsg="Couldn't find the server-group!")
|
||||
|
||||
nodes = self.blueprint.generate_browser_node(
|
||||
"%d" % (group.id), None,
|
||||
group.name,
|
||||
"icon-%s" % self.node_type,
|
||||
True,
|
||||
self.node_type
|
||||
)
|
||||
|
||||
return make_json_response(data=nodes)
|
||||
|
@ -268,6 +268,12 @@ class ServerNode(PGChildNodeView):
|
||||
wal_pause=wal_paused
|
||||
)
|
||||
)
|
||||
|
||||
if not len(res):
|
||||
return gone(errormsg=gettext(
|
||||
'The specified server group with id# {0} could not be found.'
|
||||
))
|
||||
|
||||
return make_json_response(result=res)
|
||||
|
||||
def node(self, gid, sid):
|
||||
@ -282,7 +288,7 @@ class ServerNode(PGChildNodeView):
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
gettext(
|
||||
"Couldn't find the server with id# %s!"
|
||||
"Couldn't find the server with id# {0}!"
|
||||
).format(sid)
|
||||
)
|
||||
)
|
||||
@ -336,6 +342,7 @@ class ServerNode(PGChildNodeView):
|
||||
# TODO:: A server, which is connected, can not be deleted
|
||||
if servers is None:
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'The specified server could not be found.\n'
|
||||
@ -364,6 +371,7 @@ class ServerNode(PGChildNodeView):
|
||||
|
||||
if server is None:
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext("Could not find the required server.")
|
||||
)
|
||||
@ -439,9 +447,8 @@ class ServerNode(PGChildNodeView):
|
||||
if not conn.connected():
|
||||
manager.update(server)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
data=self.blueprint.generate_browser_node(
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
"%d" % (server.id), server.servergroup_id,
|
||||
server.name,
|
||||
"icon-server-not-connected" if not connected else
|
||||
@ -502,6 +509,7 @@ class ServerNode(PGChildNodeView):
|
||||
|
||||
if server is None:
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext("Could not find the required server.")
|
||||
)
|
||||
|
@ -127,6 +127,9 @@ class DatabaseView(PGChildNodeView):
|
||||
def wrapped(self, *args, **kwargs):
|
||||
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(kwargs['sid'])
|
||||
if self.manager is None:
|
||||
return gone(errormsg="Couldn't find the server.")
|
||||
|
||||
if action and action in ["drop"]:
|
||||
self.conn = self.manager.connection()
|
||||
elif 'did' in kwargs:
|
||||
@ -220,6 +223,7 @@ class DatabaseView(PGChildNodeView):
|
||||
@check_precondition(action="nodes")
|
||||
def nodes(self, gid, sid):
|
||||
res = self.get_nodes(gid, sid)
|
||||
|
||||
return make_json_response(
|
||||
data=res,
|
||||
status=200
|
||||
@ -278,6 +282,11 @@ class DatabaseView(PGChildNodeView):
|
||||
)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
_("Couldnot find the database on the server.")
|
||||
)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
@ -525,7 +534,6 @@ class DatabaseView(PGChildNodeView):
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
info = "nothing to update."
|
||||
|
||||
if did is not None:
|
||||
# Fetch the name of database for comparison
|
||||
@ -540,7 +548,7 @@ class DatabaseView(PGChildNodeView):
|
||||
|
||||
if len(rset['rows']) == 0:
|
||||
return gone(
|
||||
_("Couldnot find the database on the server.")
|
||||
_("Could not find the database on the server.")
|
||||
)
|
||||
|
||||
data['old_name'] = (rset['rows'][0])['name']
|
||||
@ -557,8 +565,6 @@ class DatabaseView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=msg)
|
||||
|
||||
info = "Database updated."
|
||||
|
||||
self.conn = self.manager.connection(database=data['name'], auto_reconnect=True)
|
||||
status, errmsg = self.conn.connect()
|
||||
|
||||
@ -569,16 +575,13 @@ class DatabaseView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=msg)
|
||||
|
||||
info = "Database updated."
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info=info,
|
||||
data={
|
||||
'id': did,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
}
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
did,
|
||||
sid,
|
||||
data['name'],
|
||||
"pg-icon-{0}".format(self.node_type)
|
||||
)
|
||||
)
|
||||
|
||||
@check_precondition(action="drop")
|
||||
@ -595,6 +598,7 @@ class DatabaseView(PGChildNodeView):
|
||||
|
||||
if res is None:
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=_(
|
||||
'Error: Object not found.'
|
||||
@ -661,7 +665,7 @@ class DatabaseView(PGChildNodeView):
|
||||
return False, internal_server_error(errormsg=rset)
|
||||
|
||||
if len(rset['rows']) == 0:
|
||||
return False, gone(
|
||||
return gone(
|
||||
_("Could not find the database on the server.")
|
||||
)
|
||||
|
||||
|
@ -278,7 +278,9 @@ class CastView(PGChildNodeView):
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, cid):
|
||||
res = []
|
||||
"""
|
||||
This function will fetch properties of the cast node
|
||||
"""
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'nodes.sql']),
|
||||
cid=cid
|
||||
@ -288,18 +290,17 @@ class CastView(PGChildNodeView):
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
res.append(
|
||||
self.blueprint.generate_browser_node(
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
did,
|
||||
row['name'],
|
||||
icon="icon-fts_template"
|
||||
))
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
return make_json_response(
|
||||
data=res,
|
||||
status=200
|
||||
)
|
||||
return gone(errormsg=gettext("Could not find the specified cast."))
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, cid):
|
||||
@ -325,6 +326,11 @@ class CastView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("Could not find the cast information.")
|
||||
)
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
@ -405,34 +411,20 @@ class CastView(PGChildNodeView):
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
sql = self.get_sql(gid, sid, did, data, cid)
|
||||
try:
|
||||
if sql and sql.strip('\n') and sql.strip(' '):
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
sql, name = self.get_sql(gid, sid, did, data, cid)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Cast updated",
|
||||
data={
|
||||
'id': cid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': cid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
cid,
|
||||
did,
|
||||
name,
|
||||
"icon-{0}".format(self.node_type)
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
@ -464,6 +456,7 @@ class CastView(PGChildNodeView):
|
||||
|
||||
if not res['rows']:
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'Error: Object not found.'
|
||||
@ -509,17 +502,18 @@ class CastView(PGChildNodeView):
|
||||
:return:
|
||||
"""
|
||||
data = request.args
|
||||
sql = self.get_sql(gid, sid, did, data, cid)
|
||||
if isinstance(sql, str) and sql and sql.strip('\n') and sql.strip(' '):
|
||||
try:
|
||||
sql, name = self.get_sql(gid, sid, did, data, cid)
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
if sql == '':
|
||||
sql = "--modified SQL"
|
||||
|
||||
return make_json_response(
|
||||
data=sql,
|
||||
status=200
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
data="--modified SQL",
|
||||
status=200
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, gid, sid, did, data, cid=None):
|
||||
"""
|
||||
@ -531,35 +525,32 @@ class CastView(PGChildNodeView):
|
||||
:param data: model data
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
if cid is not None:
|
||||
last_system_oid = 0 if self.blueprint.show_system_objects else \
|
||||
(self.manager.db_info[did])['datlastsysoid'] \
|
||||
if self.manager.db_info is not none and \
|
||||
did in self.manager.db_info else 0
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
cid=cid,
|
||||
datlastsysoid=last_system_oid,
|
||||
showsysobj=self.blueprint.show_system_objects)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if cid is not None:
|
||||
last_system_oid = 0 if self.blueprint.show_system_objects else \
|
||||
(self.manager.db_info[did])['datlastsysoid'] \
|
||||
if self.manager.db_info is not None and \
|
||||
did in self.manager.db_info else 0
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
cid=cid,
|
||||
datlastsysoid=last_system_oid,
|
||||
showsysobj=self.blueprint.show_system_objects)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
old_data = res['rows'][0]
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data
|
||||
)
|
||||
old_data = res['rows'][0]
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data
|
||||
)
|
||||
return str(sql), data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
if 'srctyp' in data and 'trgtyp' in data:
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']), data=data, conn=self.conn)
|
||||
else:
|
||||
if 'srctyp' in data and 'trgtyp' in data:
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']), data=data, conn=self.conn)
|
||||
else:
|
||||
sql = "-- incomplete definition"
|
||||
return str(sql)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
return "-- incomplete definition", None
|
||||
return str(sql), data['srctyp'] + "->" + data["trgtyp"]
|
||||
|
||||
@check_precondition
|
||||
def get_functions(self, gid, sid, did, cid=None):
|
||||
|
@ -19,6 +19,7 @@ from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.utils.ajax import gone
|
||||
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
|
||||
@ -258,6 +259,39 @@ class EventTriggerView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, etid):
|
||||
"""
|
||||
This function will fetch properties of trigger node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
etid: Event trigger ID
|
||||
|
||||
Returns:
|
||||
Json object of trigger node
|
||||
"""
|
||||
sql = render_template("/".join([self.template_path, 'nodes.sql']),
|
||||
etid=etid)
|
||||
status, res = self.conn.execute_2darray(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in res['rows']:
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
did,
|
||||
row['name'],
|
||||
icon="icon-%s" % self.node_type
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
return gone(gettext("Could not find the specified event trigger."))
|
||||
|
||||
def _formatter(self, result):
|
||||
"""
|
||||
This function is ued to parse security lables
|
||||
@ -294,6 +328,11 @@ class EventTriggerView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("Couldnot find the event trigger information.")
|
||||
)
|
||||
|
||||
result = res['rows'][0]
|
||||
result = self._formatter(result)
|
||||
|
||||
@ -448,6 +487,7 @@ class EventTriggerView(PGChildNodeView):
|
||||
|
||||
if name is None:
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'Error: Object not found.'
|
||||
@ -500,7 +540,8 @@ class EventTriggerView(PGChildNodeView):
|
||||
try:
|
||||
sql = self.get_sql(data, etid)
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
|
||||
if sql == '':
|
||||
sql = "--modified SQL"
|
||||
return make_json_response(
|
||||
data=sql,
|
||||
status=200
|
||||
@ -529,6 +570,12 @@ class EventTriggerView(PGChildNodeView):
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("Couldnot find the event trigger information.")
|
||||
)
|
||||
|
||||
old_data = res['rows'][0]
|
||||
old_data = self._formatter(old_data)
|
||||
|
||||
@ -550,7 +597,7 @@ class EventTriggerView(PGChildNodeView):
|
||||
err.append(required_args.get(arg, arg))
|
||||
if err:
|
||||
return make_json_response(
|
||||
status=400,
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
"Could not find the required parameter %s." % err
|
||||
|
@ -1,3 +1,6 @@
|
||||
SELECT e.oid, e.evtname AS name
|
||||
FROM pg_event_trigger e
|
||||
{% if etid %}
|
||||
WHERE e.oid={{etid}}::int
|
||||
{% endif %}
|
||||
ORDER BY e.evtname
|
||||
|
@ -20,6 +20,7 @@ from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, \
|
||||
make_response as ajax_response, internal_server_error
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.utils.ajax import gone
|
||||
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
|
||||
@ -191,6 +192,30 @@ class ExtensionView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, eid):
|
||||
"""
|
||||
This function will fetch the properties of extension
|
||||
"""
|
||||
SQL = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
eid=eid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['eid'],
|
||||
did,
|
||||
row['name'],
|
||||
'icon-extension'
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
return gone(gettext("Could not find the specified event trigger."))
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, eid):
|
||||
"""
|
||||
@ -202,6 +227,11 @@ class ExtensionView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("Couldnot find the extension information.")
|
||||
)
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
@ -268,35 +298,22 @@ class ExtensionView(PGChildNodeView):
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
SQL = self.getSQL(gid, sid, data, did, eid)
|
||||
|
||||
try:
|
||||
if SQL and isinstance(SQL, basestring) and \
|
||||
SQL.strip('\n') and SQL.strip(' '):
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
SQL, name = self.getSQL(gid, sid, data, did, eid)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Extension updated",
|
||||
data={
|
||||
'id': eid,
|
||||
'sid': sid,
|
||||
'gid': gid
|
||||
}
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
eid,
|
||||
did,
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': did,
|
||||
'sid': sid,
|
||||
'gid': gid
|
||||
}
|
||||
)
|
||||
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@ -316,6 +333,7 @@ class ExtensionView(PGChildNodeView):
|
||||
|
||||
if name is None:
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'Error: Object not found.'
|
||||
@ -352,18 +370,18 @@ class ExtensionView(PGChildNodeView):
|
||||
This function returns modified SQL
|
||||
"""
|
||||
data = request.args.copy()
|
||||
SQL = self.getSQL(gid, sid, data, did, eid)
|
||||
if SQL and isinstance(SQL, basestring) and SQL.strip('\n') \
|
||||
and SQL.strip(' '):
|
||||
try:
|
||||
SQL, name = self.getSQL(gid, sid, data, did, eid)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
data=gettext('-- Modified SQL --'),
|
||||
status=200
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def getSQL(self, gid, sid, data, did, eid=None):
|
||||
"""
|
||||
@ -372,28 +390,33 @@ class ExtensionView(PGChildNodeView):
|
||||
required_args = [
|
||||
'name'
|
||||
]
|
||||
try:
|
||||
if eid is not None:
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'properties.sql']
|
||||
), eid=eid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
old_data = res['rows'][0]
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'update.sql']
|
||||
), data=data, o_data=old_data)
|
||||
else:
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'create.sql']
|
||||
), data=data)
|
||||
return SQL
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
if eid is not None:
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'properties.sql']
|
||||
), eid=eid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("Couldnot find the extension information.")
|
||||
)
|
||||
|
||||
old_data = res['rows'][0]
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'update.sql']
|
||||
), data=data, o_data=old_data)
|
||||
return SQL, data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'create.sql']
|
||||
), data=data)
|
||||
return SQL, data['name']
|
||||
|
||||
@check_precondition
|
||||
def avails(self, gid, sid, did):
|
||||
|
@ -22,6 +22,7 @@ from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.utils.ajax import gone
|
||||
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
|
||||
@ -276,6 +277,36 @@ class ForeignDataWrapperView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, fid):
|
||||
"""
|
||||
This function will fetch properties of foreign data wrapper node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
fid: Foreign data wrapper ID
|
||||
"""
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
conn=self.conn, fid=fid)
|
||||
status, r_set = self.conn.execute_2darray(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=r_set)
|
||||
|
||||
for row in r_set['rows']:
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['fdwoid'],
|
||||
did,
|
||||
row['name'],
|
||||
icon="icon-foreign_data_wrapper"
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
return gone(gettext("Could not find the specified foreign data wrapper."))
|
||||
|
||||
def tokenize_options(self, option_value):
|
||||
"""
|
||||
This function will tokenize the string stored in database
|
||||
@ -311,6 +342,11 @@ class ForeignDataWrapperView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("Couldnot find the foreign data wrapper information.")
|
||||
)
|
||||
|
||||
if res['rows'][0]['fdwoptions'] is not None:
|
||||
res['rows'][0]['fdwoptions'] = self.tokenize_options(res['rows'][0]['fdwoptions'])
|
||||
|
||||
@ -417,35 +453,22 @@ class ForeignDataWrapperView(PGChildNodeView):
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
sql = self.get_sql(gid, sid, data, did, fid)
|
||||
|
||||
try:
|
||||
if sql and sql.strip('\n') and sql.strip(' '):
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
sql, name = self.get_sql(gid, sid, data, did, fid)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Foreign Data Wrapper updated",
|
||||
data={
|
||||
'id': fid,
|
||||
'did': did,
|
||||
'sid': sid,
|
||||
'gid': gid
|
||||
}
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': fid,
|
||||
'did': did,
|
||||
'sid': sid,
|
||||
'gid': gid
|
||||
}
|
||||
)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
fid,
|
||||
did,
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@ -475,6 +498,7 @@ class ForeignDataWrapperView(PGChildNodeView):
|
||||
|
||||
if name is None:
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'Error: Object not found.'
|
||||
@ -521,12 +545,18 @@ class ForeignDataWrapperView(PGChildNodeView):
|
||||
data[k] = json.loads(v, encoding='utf-8')
|
||||
except ValueError:
|
||||
data[k] = v
|
||||
try:
|
||||
sql, name = self.get_sql(gid, sid, data, did, fid)
|
||||
|
||||
sql = self.get_sql(gid, sid, data, did, fid)
|
||||
if sql and sql.strip('\n') and sql.strip(' '):
|
||||
return make_json_response(data=sql, status=200)
|
||||
else:
|
||||
return make_json_response(data='-- Modified SQL --', status=200)
|
||||
if sql == '':
|
||||
sql = "--modified SQL"
|
||||
|
||||
return make_json_response(
|
||||
data=sql,
|
||||
status=200
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, gid, sid, data, did, fid=None):
|
||||
"""
|
||||
@ -542,89 +572,94 @@ class ForeignDataWrapperView(PGChildNodeView):
|
||||
required_args = [
|
||||
'name'
|
||||
]
|
||||
try:
|
||||
if fid is not None:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), fid=fid, conn=self.conn)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if res['rows'][0]['fdwoptions'] is not None:
|
||||
res['rows'][0]['fdwoptions'] = self.tokenize_options(res['rows'][0]['fdwoptions'])
|
||||
if fid is not None:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), fid=fid, conn=self.conn)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for key in ['fdwacl']:
|
||||
if key in data and data[key] is not None:
|
||||
if 'added' in data[key]:
|
||||
data[key]['added'] = parse_priv_to_db(data[key]['added'], ['U'])
|
||||
if 'changed' in data[key]:
|
||||
data[key]['changed'] = parse_priv_to_db(data[key]['changed'], ['U'])
|
||||
if 'deleted' in data[key]:
|
||||
data[key]['deleted'] = parse_priv_to_db(data[key]['deleted'], ['U'])
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("Couldnot find the foreign data wrapper information.")
|
||||
)
|
||||
|
||||
old_data = res['rows'][0]
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
if res['rows'][0]['fdwoptions'] is not None:
|
||||
res['rows'][0]['fdwoptions'] = self.tokenize_options(res['rows'][0]['fdwoptions'])
|
||||
|
||||
new_list_add = []
|
||||
new_list_change = []
|
||||
for key in ['fdwacl']:
|
||||
if key in data and data[key] is not None:
|
||||
if 'added' in data[key]:
|
||||
data[key]['added'] = parse_priv_to_db(data[key]['added'], ['U'])
|
||||
if 'changed' in data[key]:
|
||||
data[key]['changed'] = parse_priv_to_db(data[key]['changed'], ['U'])
|
||||
if 'deleted' in data[key]:
|
||||
data[key]['deleted'] = parse_priv_to_db(data[key]['deleted'], ['U'])
|
||||
|
||||
# Allow user to set the blank value in fdwvalue field in option model
|
||||
if 'fdwoptions' in data and 'added' in data['fdwoptions']:
|
||||
for item in data['fdwoptions']['added']:
|
||||
new_dict_add = {}
|
||||
if item['fdwoption']:
|
||||
if 'fdwvalue' in item and item['fdwvalue'] and item['fdwvalue'] != '':
|
||||
new_dict_add.update(item);
|
||||
else:
|
||||
new_dict_add.update({'fdwoption': item['fdwoption'], 'fdwvalue': ''})
|
||||
old_data = res['rows'][0]
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
|
||||
new_list_add.append(new_dict_add)
|
||||
new_list_add = []
|
||||
new_list_change = []
|
||||
|
||||
data['fdwoptions']['added'] = new_list_add
|
||||
# Allow user to set the blank value in fdwvalue field in option model
|
||||
if 'fdwoptions' in data and 'added' in data['fdwoptions']:
|
||||
for item in data['fdwoptions']['added']:
|
||||
new_dict_add = {}
|
||||
if item['fdwoption']:
|
||||
if 'fdwvalue' in item and item['fdwvalue'] and item['fdwvalue'] != '':
|
||||
new_dict_add.update(item);
|
||||
else:
|
||||
new_dict_add.update({'fdwoption': item['fdwoption'], 'fdwvalue': ''})
|
||||
|
||||
# Allow user to set the blank value in fdwvalue field in option model
|
||||
if 'fdwoptions' in data and 'changed' in data['fdwoptions']:
|
||||
for item in data['fdwoptions']['changed']:
|
||||
new_dict_change = {}
|
||||
if item['fdwoption']:
|
||||
if 'fdwvalue' in item and item['fdwvalue'] and item['fdwvalue'] != '':
|
||||
new_dict_change.update(item);
|
||||
else:
|
||||
new_dict_change.update({'fdwoption': item['fdwoption'], 'fdwvalue': ''})
|
||||
new_list_add.append(new_dict_add)
|
||||
|
||||
new_list_change.append(new_dict_change)
|
||||
data['fdwoptions']['added'] = new_list_add
|
||||
|
||||
data['fdwoptions']['changed'] = new_list_change
|
||||
# Allow user to set the blank value in fdwvalue field in option model
|
||||
if 'fdwoptions' in data and 'changed' in data['fdwoptions']:
|
||||
for item in data['fdwoptions']['changed']:
|
||||
new_dict_change = {}
|
||||
if item['fdwoption']:
|
||||
if 'fdwvalue' in item and item['fdwvalue'] and item['fdwvalue'] != '':
|
||||
new_dict_change.update(item);
|
||||
else:
|
||||
new_dict_change.update({'fdwoption': item['fdwoption'], 'fdwvalue': ''})
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'update.sql']), data=data, o_data=old_data,
|
||||
conn=self.conn)
|
||||
else:
|
||||
for key in ['fdwacl']:
|
||||
if key in data and data[key] is not None:
|
||||
data[key] = parse_priv_to_db(data[key], ['U'])
|
||||
new_list_change.append(new_dict_change)
|
||||
|
||||
new_list = []
|
||||
data['fdwoptions']['changed'] = new_list_change
|
||||
|
||||
# Allow user to set the blank value in fdwvalue field in option model
|
||||
if 'fdwoptions' in data:
|
||||
for item in data['fdwoptions']:
|
||||
new_dict = {}
|
||||
if item['fdwoption']:
|
||||
if 'fdwvalue' in item and item['fdwvalue'] and item['fdwvalue'] != '':
|
||||
new_dict.update(item);
|
||||
else:
|
||||
new_dict.update({'fdwoption': item['fdwoption'], 'fdwvalue': ''})
|
||||
sql = render_template("/".join([self.template_path, 'update.sql']), data=data, o_data=old_data,
|
||||
conn=self.conn)
|
||||
return sql, data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
for key in ['fdwacl']:
|
||||
if key in data and data[key] is not None:
|
||||
data[key] = parse_priv_to_db(data[key], ['U'])
|
||||
|
||||
new_list.append(new_dict)
|
||||
new_list = []
|
||||
|
||||
data['fdwoptions'] = new_list
|
||||
# Allow user to set the blank value in fdwvalue field in option model
|
||||
if 'fdwoptions' in data:
|
||||
for item in data['fdwoptions']:
|
||||
new_dict = {}
|
||||
if item['fdwoption']:
|
||||
if 'fdwvalue' in item and item['fdwvalue'] and item['fdwvalue'] != '':
|
||||
new_dict.update(item);
|
||||
else:
|
||||
new_dict.update({'fdwoption': item['fdwoption'], 'fdwvalue': ''})
|
||||
|
||||
new_list.append(new_dict)
|
||||
|
||||
data['fdwoptions'] = new_list
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']), data=data, conn=self.conn)
|
||||
sql += "\n"
|
||||
return sql, data['name']
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']), data=data, conn=self.conn)
|
||||
sql += "\n"
|
||||
return sql
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, fid):
|
||||
|
@ -22,6 +22,7 @@ from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.utils.ajax import gone
|
||||
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
|
||||
@ -270,6 +271,41 @@ class ForeignServerView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, fid, fsid):
|
||||
"""
|
||||
This function will fetch properites foreign server node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
fid: Foreign data wrapper ID
|
||||
fsid: Foreign server ID
|
||||
"""
|
||||
|
||||
res = []
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
fsid=fsid, conn=self.conn)
|
||||
status, r_set = self.conn.execute_2darray(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=r_set)
|
||||
|
||||
for row in r_set['rows']:
|
||||
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['fsrvid'],
|
||||
fid,
|
||||
row['name'],
|
||||
icon="icon-foreign_server"
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
return gone(gettext("Could not find the specified foreign server."))
|
||||
|
||||
def tokenizeOptions(self, option_value):
|
||||
"""
|
||||
This function will tokenize the string stored in database
|
||||
@ -308,6 +344,11 @@ class ForeignServerView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("Couldnot find the foreign server information.")
|
||||
)
|
||||
|
||||
if res['rows'][0]['fsrvoptions'] is not None:
|
||||
res['rows'][0]['fsrvoptions'] = self.tokenizeOptions(res['rows'][0]['fsrvoptions'])
|
||||
|
||||
@ -422,36 +463,22 @@ class ForeignServerView(PGChildNodeView):
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
sql = self.get_sql(gid, sid, data, did, fid, fsid)
|
||||
try:
|
||||
if sql and sql.strip('\n') and sql.strip(' '):
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Foreign server updated",
|
||||
data={
|
||||
'id': fsid,
|
||||
'fid': fid,
|
||||
'did': did,
|
||||
'sid': sid,
|
||||
'gid': gid
|
||||
}
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': fsid,
|
||||
'fid': fid,
|
||||
'did': did,
|
||||
'sid': sid,
|
||||
'gid': gid
|
||||
}
|
||||
try:
|
||||
sql, name = self.get_sql(gid, sid, data, did, fid, fsid)
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
fsid,
|
||||
fid,
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
@ -484,6 +511,7 @@ class ForeignServerView(PGChildNodeView):
|
||||
|
||||
if name is None:
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'Error: Object not found.'
|
||||
@ -534,18 +562,18 @@ class ForeignServerView(PGChildNodeView):
|
||||
data[k] = json.loads(v, encoding='utf-8')
|
||||
except ValueError:
|
||||
data[k] = v
|
||||
try:
|
||||
sql, name = self.get_sql(gid, sid, data, did, fid, fsid)
|
||||
if sql == '':
|
||||
sql = "--modified SQL"
|
||||
|
||||
sql = self.get_sql(gid, sid, data, did, fid, fsid)
|
||||
if sql and sql.strip('\n') and sql.strip(' '):
|
||||
return make_json_response(
|
||||
data=sql,
|
||||
status=200
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
data='-- Modified SQL --',
|
||||
status=200
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, gid, sid, data, did, fid, fsid=None):
|
||||
"""
|
||||
@ -563,96 +591,96 @@ class ForeignServerView(PGChildNodeView):
|
||||
required_args = [
|
||||
'name'
|
||||
]
|
||||
try:
|
||||
if fsid is not None:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), fsid=fsid, conn=self.conn)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if res['rows'][0]['fsrvoptions'] is not None:
|
||||
res['rows'][0]['fsrvoptions'] = self.tokenizeOptions(res['rows'][0]['fsrvoptions'])
|
||||
if fsid is not None:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), fsid=fsid, conn=self.conn)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for key in ['fsrvacl']:
|
||||
if key in data and data[key] is not None:
|
||||
if 'added' in data[key]:
|
||||
data[key]['added'] = parse_priv_to_db(data[key]['added'], ['U'])
|
||||
if 'changed' in data[key]:
|
||||
data[key]['changed'] = parse_priv_to_db(data[key]['changed'], ['U'])
|
||||
if 'deleted' in data[key]:
|
||||
data[key]['deleted'] = parse_priv_to_db(data[key]['deleted'], ['U'])
|
||||
if res['rows'][0]['fsrvoptions'] is not None:
|
||||
res['rows'][0]['fsrvoptions'] = self.tokenizeOptions(res['rows'][0]['fsrvoptions'])
|
||||
|
||||
old_data = res['rows'][0]
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
for key in ['fsrvacl']:
|
||||
if key in data and data[key] is not None:
|
||||
if 'added' in data[key]:
|
||||
data[key]['added'] = parse_priv_to_db(data[key]['added'], ['U'])
|
||||
if 'changed' in data[key]:
|
||||
data[key]['changed'] = parse_priv_to_db(data[key]['changed'], ['U'])
|
||||
if 'deleted' in data[key]:
|
||||
data[key]['deleted'] = parse_priv_to_db(data[key]['deleted'], ['U'])
|
||||
|
||||
new_list_add = []
|
||||
new_list_change = []
|
||||
old_data = res['rows'][0]
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
|
||||
# Allow user to set the blank value in fsrvvalue field in option model
|
||||
if 'fsrvoptions' in data and 'added' in data['fsrvoptions']:
|
||||
for item in data['fsrvoptions']['added']:
|
||||
new_dict_add = {}
|
||||
if item['fsrvoption']:
|
||||
if 'fsrvvalue' in item and item['fsrvvalue'] and item['fsrvvalue'] != '':
|
||||
new_dict_add.update(item);
|
||||
else:
|
||||
new_dict_add.update({'fsrvoption': item['fsrvoption'], 'fsrvvalue': ''})
|
||||
new_list_add = []
|
||||
new_list_change = []
|
||||
|
||||
new_list_add.append(new_dict_add)
|
||||
# Allow user to set the blank value in fsrvvalue field in option model
|
||||
if 'fsrvoptions' in data and 'added' in data['fsrvoptions']:
|
||||
for item in data['fsrvoptions']['added']:
|
||||
new_dict_add = {}
|
||||
if item['fsrvoption']:
|
||||
if 'fsrvvalue' in item and item['fsrvvalue'] and item['fsrvvalue'] != '':
|
||||
new_dict_add.update(item);
|
||||
else:
|
||||
new_dict_add.update({'fsrvoption': item['fsrvoption'], 'fsrvvalue': ''})
|
||||
|
||||
data['fsrvoptions']['added'] = new_list_add
|
||||
new_list_add.append(new_dict_add)
|
||||
|
||||
# Allow user to set the blank value in fsrvvalue field in option model
|
||||
if 'fsrvoptions' in data and 'changed' in data['fsrvoptions']:
|
||||
for item in data['fsrvoptions']['changed']:
|
||||
new_dict_change = {}
|
||||
if item['fsrvoption']:
|
||||
if 'fsrvvalue' in item and item['fsrvvalue'] and item['fsrvvalue'] != '':
|
||||
new_dict_change.update(item);
|
||||
else:
|
||||
new_dict_change.update({'fsrvoption': item['fsrvoption'], 'fsrvvalue': ''})
|
||||
data['fsrvoptions']['added'] = new_list_add
|
||||
|
||||
new_list_change.append(new_dict_change)
|
||||
# Allow user to set the blank value in fsrvvalue field in option model
|
||||
if 'fsrvoptions' in data and 'changed' in data['fsrvoptions']:
|
||||
for item in data['fsrvoptions']['changed']:
|
||||
new_dict_change = {}
|
||||
if item['fsrvoption']:
|
||||
if 'fsrvvalue' in item and item['fsrvvalue'] and item['fsrvvalue'] != '':
|
||||
new_dict_change.update(item);
|
||||
else:
|
||||
new_dict_change.update({'fsrvoption': item['fsrvoption'], 'fsrvvalue': ''})
|
||||
|
||||
data['fsrvoptions']['changed'] = new_list_change
|
||||
new_list_change.append(new_dict_change)
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'update.sql']), data=data, o_data=old_data,
|
||||
conn=self.conn)
|
||||
else:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), fdwid=fid, conn=self.conn)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
data['fsrvoptions']['changed'] = new_list_change
|
||||
|
||||
fdw_data = res['rows'][0]
|
||||
sql = render_template("/".join([self.template_path, 'update.sql']), data=data, o_data=old_data,
|
||||
conn=self.conn)
|
||||
return sql, data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), fdwid=fid, conn=self.conn)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for key in ['fsrvacl']:
|
||||
if key in data and data[key] is not None:
|
||||
data[key] = parse_priv_to_db(data[key], ['U'])
|
||||
fdw_data = res['rows'][0]
|
||||
|
||||
new_list = []
|
||||
for key in ['fsrvacl']:
|
||||
if key in data and data[key] is not None:
|
||||
data[key] = parse_priv_to_db(data[key], ['U'])
|
||||
|
||||
if 'fsrvoptions' in data:
|
||||
for item in data['fsrvoptions']:
|
||||
new_dict = {}
|
||||
if item['fsrvoption']:
|
||||
if 'fsrvvalue' in item and item['fsrvvalue'] and item['fsrvvalue'] != '':
|
||||
new_dict.update(item);
|
||||
else:
|
||||
new_dict.update({'fsrvoption': item['fsrvoption'], 'fsrvvalue': ''})
|
||||
new_list = []
|
||||
|
||||
new_list.append(new_dict)
|
||||
if 'fsrvoptions' in data:
|
||||
for item in data['fsrvoptions']:
|
||||
new_dict = {}
|
||||
if item['fsrvoption']:
|
||||
if 'fsrvvalue' in item and item['fsrvvalue'] and item['fsrvvalue'] != '':
|
||||
new_dict.update(item);
|
||||
else:
|
||||
new_dict.update({'fsrvoption': item['fsrvoption'], 'fsrvvalue': ''})
|
||||
|
||||
data['fsrvoptions'] = new_list
|
||||
new_list.append(new_dict)
|
||||
|
||||
data['fsrvoptions'] = new_list
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']), data=data, fdwdata=fdw_data,
|
||||
conn=self.conn)
|
||||
sql += "\n"
|
||||
return sql, data['name']
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']), data=data, fdwdata=fdw_data,
|
||||
conn=self.conn)
|
||||
sql += "\n"
|
||||
return sql
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, fid, fsid):
|
||||
|
@ -20,6 +20,7 @@ from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.utils.ajax import gone
|
||||
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
|
||||
@ -284,6 +285,39 @@ class UserMappingView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, fid, fsid, umid):
|
||||
"""
|
||||
This function will fetch properties of user mapping node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
fid: Foreign data wrapper ID
|
||||
fsid: Foreign server ID
|
||||
umid: User mapping ID
|
||||
"""
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
conn=self.conn, umid=umid)
|
||||
status, r_set = self.conn.execute_2darray(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=r_set)
|
||||
|
||||
for row in r_set['rows']:
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['um_oid'],
|
||||
fsid,
|
||||
row['name'],
|
||||
icon="icon-user_mapping"
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
return gone(gettext("Could not find the specified user mapping."))
|
||||
|
||||
def tokenizeOptions(self, option_value):
|
||||
"""
|
||||
This function will tokenize the string stored in database
|
||||
@ -323,6 +357,11 @@ class UserMappingView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("Couldnot find the user mapping information.")
|
||||
)
|
||||
|
||||
if res['rows'][0]['umoptions'] is not None:
|
||||
res['rows'][0]['umoptions'] = self.tokenizeOptions(res['rows'][0]['umoptions'])
|
||||
|
||||
@ -427,38 +466,21 @@ class UserMappingView(PGChildNodeView):
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
sql = self.get_sql(gid, sid, data, did, fid, fsid, umid)
|
||||
try:
|
||||
if sql and sql.strip('\n') and sql.strip(' '):
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
sql, name = self.get_sql(gid, sid, data, did, fid, fsid, umid)
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="User Mapping updated",
|
||||
data={
|
||||
'id': umid,
|
||||
'fsid': fsid,
|
||||
'fid': fid,
|
||||
'did': did,
|
||||
'sid': sid,
|
||||
'gid': gid
|
||||
}
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': umid,
|
||||
'fsid': fsid,
|
||||
'fid': fid,
|
||||
'did': did,
|
||||
'sid': sid,
|
||||
'gid': gid
|
||||
}
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
umid,
|
||||
fsid,
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
@ -492,6 +514,7 @@ class UserMappingView(PGChildNodeView):
|
||||
|
||||
if name is None:
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'Error: Object not found.'
|
||||
@ -508,6 +531,7 @@ class UserMappingView(PGChildNodeView):
|
||||
|
||||
if not res['rows']:
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'The specified user mapping could not be found.\n'
|
||||
@ -559,18 +583,17 @@ class UserMappingView(PGChildNodeView):
|
||||
data[k] = json.loads(v, encoding='utf-8')
|
||||
except ValueError:
|
||||
data[k] = v
|
||||
try:
|
||||
sql, name = self.get_sql(gid, sid, data, did, fid, fsid, umid)
|
||||
if sql == '':
|
||||
sql = "--modified SQL"
|
||||
|
||||
sql = self.get_sql(gid, sid, data, did, fid, fsid, umid)
|
||||
if sql and sql.strip('\n') and sql.strip(' '):
|
||||
return make_json_response(
|
||||
data=sql,
|
||||
status=200
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
data='-- Modified SQL --',
|
||||
status=200
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, gid, sid, data, did, fid, fsid, umid=None):
|
||||
"""
|
||||
@ -589,93 +612,92 @@ class UserMappingView(PGChildNodeView):
|
||||
required_args = [
|
||||
'name'
|
||||
]
|
||||
try:
|
||||
if umid is not None:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), umid=umid, conn=self.conn)
|
||||
|
||||
if umid is not None:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), umid=umid, conn=self.conn)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if res['rows'][0]['umoptions'] is not None:
|
||||
res['rows'][0]['umoptions'] = self.tokenizeOptions(res['rows'][0]['umoptions'])
|
||||
|
||||
old_data = res['rows'][0]
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), fserid=fsid, conn=self.conn)
|
||||
status, res1 = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res1)
|
||||
|
||||
fdw_data = res1['rows'][0]
|
||||
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
|
||||
new_list_add = []
|
||||
new_list_change = []
|
||||
|
||||
# Allow user to set the blank value in fdwvalue field in option model
|
||||
if 'umoptions' in data and 'added' in data['umoptions']:
|
||||
for item in data['umoptions']['added']:
|
||||
new_dict_add = {}
|
||||
if item['umoption']:
|
||||
if 'umvalue' in item and item['umvalue'] and item['umvalue'] != '':
|
||||
new_dict_add.update(item);
|
||||
else:
|
||||
new_dict_add.update({'umoption': item['umoption'], 'umvalue': ''})
|
||||
|
||||
new_list_add.append(new_dict_add)
|
||||
|
||||
data['umoptions']['added'] = new_list_add
|
||||
|
||||
# Allow user to set the blank value in fdwvalue field in option model
|
||||
if 'umoptions' in data and 'changed' in data['umoptions']:
|
||||
for item in data['umoptions']['changed']:
|
||||
new_dict_change = {}
|
||||
if item['umoption']:
|
||||
if 'umvalue' in item and item['umvalue'] and item['umvalue'] != '':
|
||||
new_dict_change.update(item);
|
||||
else:
|
||||
new_dict_change.update({'umoption': item['umoption'], 'umvalue': ''})
|
||||
|
||||
new_list_change.append(new_dict_change)
|
||||
|
||||
data['umoptions']['changed'] = new_list_change
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'update.sql']), data=data, o_data=old_data,
|
||||
fdwdata=fdw_data, conn=self.conn)
|
||||
return sql, data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), fserid=fsid, conn=self.conn)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
fdw_data = res['rows'][0]
|
||||
|
||||
if res['rows'][0]['umoptions'] is not None:
|
||||
res['rows'][0]['umoptions'] = self.tokenizeOptions(res['rows'][0]['umoptions'])
|
||||
new_list = []
|
||||
|
||||
old_data = res['rows'][0]
|
||||
if 'umoptions' in data:
|
||||
for item in data['umoptions']:
|
||||
new_dict = {}
|
||||
if item['umoption']:
|
||||
if 'umvalue' in item and item['umvalue'] \
|
||||
and item['umvalue'] != '':
|
||||
new_dict.update(item);
|
||||
else:
|
||||
new_dict.update(
|
||||
{'umoption': item['umoption'],
|
||||
'umvalue': ''}
|
||||
)
|
||||
new_list.append(new_dict)
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), fserid=fsid, conn=self.conn)
|
||||
status, res1 = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res1)
|
||||
data['umoptions'] = new_list
|
||||
|
||||
fdw_data = res1['rows'][0]
|
||||
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
|
||||
new_list_add = []
|
||||
new_list_change = []
|
||||
|
||||
# Allow user to set the blank value in fdwvalue field in option model
|
||||
if 'umoptions' in data and 'added' in data['umoptions']:
|
||||
for item in data['umoptions']['added']:
|
||||
new_dict_add = {}
|
||||
if item['umoption']:
|
||||
if 'umvalue' in item and item['umvalue'] and item['umvalue'] != '':
|
||||
new_dict_add.update(item);
|
||||
else:
|
||||
new_dict_add.update({'umoption': item['umoption'], 'umvalue': ''})
|
||||
|
||||
new_list_add.append(new_dict_add)
|
||||
|
||||
data['umoptions']['added'] = new_list_add
|
||||
|
||||
# Allow user to set the blank value in fdwvalue field in option model
|
||||
if 'umoptions' in data and 'changed' in data['umoptions']:
|
||||
for item in data['umoptions']['changed']:
|
||||
new_dict_change = {}
|
||||
if item['umoption']:
|
||||
if 'umvalue' in item and item['umvalue'] and item['umvalue'] != '':
|
||||
new_dict_change.update(item);
|
||||
else:
|
||||
new_dict_change.update({'umoption': item['umoption'], 'umvalue': ''})
|
||||
|
||||
new_list_change.append(new_dict_change)
|
||||
|
||||
data['umoptions']['changed'] = new_list_change
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'update.sql']), data=data, o_data=old_data,
|
||||
fdwdata=fdw_data, conn=self.conn)
|
||||
else:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), fserid=fsid, conn=self.conn)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
fdw_data = res['rows'][0]
|
||||
|
||||
new_list = []
|
||||
|
||||
if 'umoptions' in data:
|
||||
for item in data['umoptions']:
|
||||
new_dict = {}
|
||||
if item['umoption']:
|
||||
if 'umvalue' in item and item['umvalue'] \
|
||||
and item['umvalue'] != '':
|
||||
new_dict.update(item);
|
||||
else:
|
||||
new_dict.update(
|
||||
{'umoption': item['umoption'],
|
||||
'umvalue': ''}
|
||||
)
|
||||
new_list.append(new_dict)
|
||||
|
||||
data['umoptions'] = new_list
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']), data=data, fdwdata=fdw_data,
|
||||
conn=self.conn)
|
||||
sql += "\n"
|
||||
return sql
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']), data=data, fdwdata=fdw_data,
|
||||
conn=self.conn)
|
||||
sql += "\n"
|
||||
return sql, data['name']
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, fid, fsid, umid):
|
||||
|
@ -22,6 +22,7 @@ from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.utils.ajax import gone
|
||||
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
|
||||
@ -289,6 +290,36 @@ class LanguageView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, lid):
|
||||
"""
|
||||
This function will fetch properties of the language nodes.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
lid: Language ID
|
||||
"""
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
lid=lid)
|
||||
status, result = self.conn.execute_2darray(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=result)
|
||||
|
||||
for row in result['rows']:
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
did,
|
||||
row['name'],
|
||||
icon="icon-language"
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
return gone(gettext("Could not find the specified language."))
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, lid):
|
||||
"""
|
||||
@ -306,6 +337,11 @@ class LanguageView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("Couldnot find the language information.")
|
||||
)
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'acl.sql']), lid=lid)
|
||||
status, result = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
@ -356,36 +392,22 @@ class LanguageView(PGChildNodeView):
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
sql = self.get_sql(data, lid)
|
||||
|
||||
try:
|
||||
if sql and sql.strip('\n') and sql.strip(' '):
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
sql, name = self.get_sql(data, lid)
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Language updated",
|
||||
data={
|
||||
'id': lid,
|
||||
'did': did,
|
||||
'sid': sid,
|
||||
'gid': gid
|
||||
}
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
lid,
|
||||
did,
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': lid,
|
||||
'did': did,
|
||||
'sid': sid,
|
||||
'gid': gid
|
||||
}
|
||||
)
|
||||
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@ -510,18 +532,17 @@ class LanguageView(PGChildNodeView):
|
||||
data[k] = json.loads(v, encoding='utf-8')
|
||||
except ValueError:
|
||||
data[k] = v
|
||||
try:
|
||||
sql, name = self.get_sql(data, lid)
|
||||
if sql == '':
|
||||
sql = "--modified SQL"
|
||||
|
||||
sql = self.get_sql(data, lid)
|
||||
if sql and sql.strip('\n') and sql.strip(' '):
|
||||
return make_json_response(
|
||||
data=sql,
|
||||
status=200
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
data='-- Modified SQL --',
|
||||
status=200
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, data, lid=None):
|
||||
"""
|
||||
@ -534,39 +555,43 @@ class LanguageView(PGChildNodeView):
|
||||
required_args = [
|
||||
'name', 'lanowner', 'description'
|
||||
]
|
||||
try:
|
||||
sql = ''
|
||||
if lid is not None:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), lid=lid)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for key in ['lanacl']:
|
||||
if key in data and data[key] is not None:
|
||||
if 'added' in data[key]:
|
||||
data[key]['added'] = parse_priv_to_db(data[key]['added'])
|
||||
if 'changed' in data[key]:
|
||||
data[key]['changed'] = parse_priv_to_db(data[key]['changed'])
|
||||
if 'deleted' in data[key]:
|
||||
data[key]['deleted'] = parse_priv_to_db(data[key]['deleted'])
|
||||
if lid is not None:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), lid=lid)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
old_data = res['rows'][0]
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
sql = render_template("/".join([self.template_path, 'update.sql']), data=data,
|
||||
o_data=old_data, conn=self.conn)
|
||||
else:
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("Couldnot find the language information.")
|
||||
)
|
||||
|
||||
if 'lanacl' in data:
|
||||
data['lanacl'] = parse_priv_to_db(data['lanacl'], 'LANGUAGE')
|
||||
for key in ['lanacl']:
|
||||
if key in data and data[key] is not None:
|
||||
if 'added' in data[key]:
|
||||
data[key]['added'] = parse_priv_to_db(data[key]['added'], ["U"])
|
||||
if 'changed' in data[key]:
|
||||
data[key]['changed'] = parse_priv_to_db(data[key]['changed'], ["U"])
|
||||
if 'deleted' in data[key]:
|
||||
data[key]['deleted'] = parse_priv_to_db(data[key]['deleted'], ["U"])
|
||||
|
||||
old_data = res['rows'][0]
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
sql = render_template("/".join([self.template_path, 'update.sql']), data=data,
|
||||
o_data=old_data, conn=self.conn)
|
||||
return sql.strip('\n'), data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
|
||||
if 'lanacl' in data:
|
||||
data['lanacl'] = parse_priv_to_db(data['lanacl'], ["U"])
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']),
|
||||
data=data, conn=self.conn)
|
||||
return sql.strip('\n'), data['name']
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']),
|
||||
data=data, conn=self.conn)
|
||||
return sql.strip('\n')
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def get_functions(self, gid, sid, did):
|
||||
|
@ -126,6 +126,9 @@ def check_precondition(f):
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(
|
||||
kwargs['sid']
|
||||
)
|
||||
if not self.manager:
|
||||
return gone(errormsg="Couldn't find the server.")
|
||||
|
||||
self.conn = self.manager.connection(did=kwargs['did'])
|
||||
# Set the template path for the SQL scripts
|
||||
self.template_path = self.template_initial + '/' + (
|
||||
@ -431,6 +434,53 @@ It may have been removed by another user.
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, scid):
|
||||
"""
|
||||
This function will fetch the properties of the schema node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
scid: Schema ID
|
||||
|
||||
Returns:
|
||||
JSON of given schema child node
|
||||
"""
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'sql/nodes.sql']),
|
||||
show_sysobj=self.blueprint.show_system_objects,
|
||||
_=gettext,
|
||||
scid=scid
|
||||
)
|
||||
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
if scid is not None:
|
||||
if len(rset['rows']) == 0:
|
||||
return gone(gettext("""
|
||||
Could not find the schema in the database.
|
||||
It may have been removed by another user.
|
||||
"""))
|
||||
|
||||
icon = 'icon-{0}'.format(self.node_type)
|
||||
|
||||
for row in rset['rows']:
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
did,
|
||||
row['name'],
|
||||
icon=icon,
|
||||
can_create=row['can_create'],
|
||||
has_usage=row['has_usage']
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, scid):
|
||||
"""
|
||||
@ -458,10 +508,9 @@ It may have been removed by another user.
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""
|
||||
Could not find the schema in the database.
|
||||
It may have been removed by another user.
|
||||
"""))
|
||||
return gone(
|
||||
gettext("Could not find the schema in the database. It may have been removed by another user."
|
||||
))
|
||||
|
||||
# Making copy of output for future use
|
||||
copy_data = dict(res['rows'][0])
|
||||
@ -555,34 +604,21 @@ It may have been removed by another user.
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
try:
|
||||
SQL = self.get_sql(gid, sid, data, scid)
|
||||
SQL, name = self.get_sql(gid, sid, data, scid)
|
||||
|
||||
if SQL and SQL.strip('\n') and SQL.strip(' '):
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Updated",
|
||||
data={
|
||||
'id': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
scid,
|
||||
did,
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@ -612,6 +648,7 @@ It may have been removed by another user.
|
||||
|
||||
if name is None:
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'Error: Object not found.'
|
||||
@ -705,6 +742,7 @@ It may have been removed by another user.
|
||||
"/".join([self.template_path, 'sql/update.sql']),
|
||||
_=gettext, data=data, o_data=old_data, conn=self.conn
|
||||
)
|
||||
return SQL, data['name'] if 'name' in data else old_data['nam']
|
||||
else:
|
||||
required_args = ['name']
|
||||
|
||||
@ -720,7 +758,7 @@ It may have been removed by another user.
|
||||
data=data, conn=self.conn, _=gettext
|
||||
)
|
||||
|
||||
return SQL
|
||||
return SQL, data['name']
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, scid):
|
||||
@ -744,10 +782,7 @@ It may have been removed by another user.
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""
|
||||
Could not find the schema in the database.
|
||||
It may have been removed by another user.
|
||||
"""))
|
||||
return gone(gettext("""Could not find the schema in the database. It may have been removed by another user."""))
|
||||
|
||||
data = res['rows'][0]
|
||||
data = self._formatter(data, scid)
|
||||
@ -961,6 +996,5 @@ It may have been removed by another user.
|
||||
|
||||
return ajax_response(response=SQL.strip("\n"))
|
||||
|
||||
|
||||
SchemaView.register_node_view(schema_blueprint)
|
||||
CatalogView.register_node_view(catalog_blueprint)
|
||||
|
@ -20,6 +20,7 @@ from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.utils.ajax import gone
|
||||
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
|
||||
@ -221,6 +222,42 @@ class CatalogObjectView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, scid, coid):
|
||||
"""
|
||||
This function will fetch properties of catalog objects node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
scid: Schema ID
|
||||
coid: Catalog object ID
|
||||
|
||||
Returns:
|
||||
JSON of given catalog objects child node
|
||||
"""
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'nodes.sql']), coid=coid
|
||||
)
|
||||
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
scid,
|
||||
row['name'],
|
||||
icon="icon-catalog_object"
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
return gone(errormsg=gettext("Could not find the specified catalog object."))
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, scid, coid):
|
||||
"""
|
||||
@ -247,6 +284,9 @@ class CatalogObjectView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""Could not find the specified catalog object."""))
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
|
@ -19,6 +19,7 @@ from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.utils.ajax import gone
|
||||
from pgadmin.utils.preferences import Preferences
|
||||
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
@ -253,12 +254,15 @@ class CatalogObjectColumnsView(PGChildNodeView):
|
||||
JSON of selected column node
|
||||
"""
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']), coid=coid, clid=clid)
|
||||
'properties.sql']), coid=coid, clid=clid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""Could not find the specified column."""))
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
|
@ -2,5 +2,9 @@ SELECT
|
||||
c.oid, c.relname as name
|
||||
FROM
|
||||
pg_class c
|
||||
{% if scid %}
|
||||
WHERE relnamespace = {{scid}}::int
|
||||
{% elif coid %}
|
||||
WHERE c.oid = {{coid}}::int
|
||||
{% endif %}
|
||||
ORDER BY relname;
|
||||
|
@ -1,6 +1,7 @@
|
||||
SELECT
|
||||
c.oid, c.relname as name
|
||||
FROM pg_class c
|
||||
{% if scid %}
|
||||
WHERE relnamespace = {{scid}}::int
|
||||
OR (
|
||||
-- On EnterpriseDB we need to ignore some objects in the catalog, namely, _*, dual and type_object_source.
|
||||
@ -8,4 +9,7 @@ OR (
|
||||
AND
|
||||
(c.relname NOT LIKE '\\_%' AND c.relname = 'dual' AND c.relname = 'type_object_source')
|
||||
)
|
||||
{% elif coid %}
|
||||
WHERE c.oid = {{coid}}::int
|
||||
{% endif %}
|
||||
ORDER BY relname;
|
||||
|
@ -21,6 +21,7 @@ from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.utils.ajax import gone
|
||||
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
|
||||
@ -253,6 +254,41 @@ class CollationView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, scid, coid):
|
||||
"""
|
||||
This function will fetch properties of the collation node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
scid: Schema ID
|
||||
coid: Collation ID
|
||||
|
||||
Returns:
|
||||
JSON of given collation node
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'nodes.sql']), coid=coid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
scid,
|
||||
row['name'],
|
||||
icon="icon-collation"
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
return gone(gettext("Could not find the specified collation."))
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, scid, coid):
|
||||
"""
|
||||
@ -270,22 +306,21 @@ class CollationView(PGChildNodeView):
|
||||
JSON of selected collation node
|
||||
"""
|
||||
|
||||
try:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
scid=scid, coid=coid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
scid=scid, coid=coid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""Could not find the collation object in the database. It may have been removed by another user."""))
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def get_collation(self, gid, sid, did, scid, coid=None):
|
||||
@ -394,31 +429,28 @@ class CollationView(PGChildNodeView):
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'create.sql']),
|
||||
data=data, conn=self.conn)
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'create.sql']),
|
||||
data=data, conn=self.conn)
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
# We need oid to to add object in tree at browser
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_oid.sql']), data=data)
|
||||
status, coid = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=coid)
|
||||
# We need oid to to add object in tree at browser
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_oid.sql']), data=data)
|
||||
status, coid = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=coid)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
coid,
|
||||
scid,
|
||||
data['name'],
|
||||
icon="icon-collation"
|
||||
)
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
coid,
|
||||
scid,
|
||||
data['name'],
|
||||
icon="icon-collation"
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, scid, coid):
|
||||
@ -495,35 +527,31 @@ class CollationView(PGChildNodeView):
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
SQL = self.get_sql(gid, sid, data, scid, coid)
|
||||
try:
|
||||
if SQL and SQL.strip('\n') and SQL.strip(' '):
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
SQL, name = self.get_sql(gid, sid, data, scid, coid)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Collation updated",
|
||||
data={
|
||||
'id': coid,
|
||||
'scid': scid,
|
||||
'did': did
|
||||
}
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': coid,
|
||||
'scid': scid,
|
||||
'did': did
|
||||
}
|
||||
)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
# We need oid to to add object in tree at browser
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_oid.sql']), coid=coid)
|
||||
|
||||
status, res = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
scid = res['rows'][0]['scid']
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
coid,
|
||||
scid,
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def msql(self, gid, sid, did, scid, coid=None):
|
||||
@ -545,11 +573,13 @@ class CollationView(PGChildNodeView):
|
||||
data[k] = v
|
||||
|
||||
try:
|
||||
SQL = self.get_sql(gid, sid, data, scid, coid)
|
||||
if SQL and SQL.strip('\n') and SQL.strip(' '):
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
SQL, name = self.get_sql(gid, sid, data, scid, coid)
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
@ -570,6 +600,7 @@ class CollationView(PGChildNodeView):
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data, conn=self.conn
|
||||
)
|
||||
return SQL.strip('\n'), data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
required_args = [
|
||||
'name'
|
||||
@ -585,7 +616,7 @@ class CollationView(PGChildNodeView):
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'create.sql']),
|
||||
data=data, conn=self.conn)
|
||||
return SQL.strip('\n')
|
||||
return SQL.strip('\n'), data['name']
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, scid, coid):
|
||||
|
@ -1,8 +1,15 @@
|
||||
{# Below will provide oid for newly created collation #}
|
||||
{% if data %}
|
||||
{% if data is defined %}
|
||||
SELECT c.oid
|
||||
FROM pg_collation c, pg_namespace n
|
||||
WHERE c.collnamespace=n.oid AND
|
||||
n.nspname = {{ data.schema|qtLiteral }} AND
|
||||
c.collname = {{ data.name|qtLiteral }}
|
||||
{% endif %}
|
||||
{% elif coid %}
|
||||
SELECT
|
||||
c.collnamespace as scid
|
||||
FROM
|
||||
pg_collation c
|
||||
WHERE
|
||||
c.oid = {{coid}}::oid;
|
||||
{% endif %}
|
||||
|
@ -1,4 +1,8 @@
|
||||
SELECT c.oid, c.collname AS name
|
||||
FROM pg_collation c
|
||||
{% if scid %}
|
||||
WHERE c.collnamespace = {{scid}}::oid
|
||||
{% elif coid %}
|
||||
WHERE c.oid = {{coid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY c.collname;
|
||||
|
@ -328,6 +328,38 @@ class DomainView(PGChildNodeView, DataTypeReader):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, scid, doid):
|
||||
"""
|
||||
This function will fetch the properties of the domain node.
|
||||
|
||||
Args:
|
||||
gid: Server Group Id
|
||||
sid: Server Id
|
||||
did: Database Id
|
||||
scid: Schema Id
|
||||
doid: Domain Id
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([self.template_path, 'node.sql']),
|
||||
doid=doid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
scid,
|
||||
row['name'],
|
||||
icon="icon-domain"
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
return gone(gettext("Could not find the specified domain."))
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, scid, doid):
|
||||
"""
|
||||
@ -498,36 +530,30 @@ AND relkind != 'c'))"""
|
||||
"""
|
||||
|
||||
data = self.request
|
||||
try:
|
||||
status, SQL = self.get_sql(gid, sid, data, scid)
|
||||
SQL, name = self.get_sql(gid, sid, data, scid)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=SQL)
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
# We need oid to to add object in tree at browser, below sql will
|
||||
# gives the same
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_oid.sql']),
|
||||
basensp=data['basensp'],
|
||||
name=data['name'])
|
||||
status, doid = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
# We need oid to to add object in tree at browser, below sql will
|
||||
# gives the same
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_oid.sql']),
|
||||
basensp=data['basensp'],
|
||||
name=data['name'])
|
||||
status, doid = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
doid,
|
||||
scid,
|
||||
data['name'],
|
||||
icon="icon-domain"
|
||||
)
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
doid,
|
||||
scid,
|
||||
data['name'],
|
||||
icon="icon-domain"
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, scid, doid):
|
||||
@ -548,49 +574,46 @@ AND relkind != 'c'))"""
|
||||
else:
|
||||
cascade = False
|
||||
|
||||
try:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'delete.sql']),
|
||||
scid=scid, doid=doid)
|
||||
status, res = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if not res['rows']:
|
||||
return make_json_response(
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'Error: Object not found.'
|
||||
),
|
||||
info=gettext(
|
||||
'The specified domain could not be found.\n'
|
||||
)
|
||||
)
|
||||
|
||||
name = res['rows'][0]['name']
|
||||
basensp = res['rows'][0]['basensp']
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'delete.sql']),
|
||||
name=name, basensp=basensp, cascade=cascade)
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'delete.sql']),
|
||||
scid=scid, doid=doid)
|
||||
status, res = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if not res['rows']:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info=gettext("Domain dropped"),
|
||||
data={
|
||||
'id': doid,
|
||||
'scid': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'Error: Object not found.'
|
||||
),
|
||||
info=gettext(
|
||||
'The specified domain could not be found.\n'
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
name = res['rows'][0]['name']
|
||||
basensp = res['rows'][0]['basensp']
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'delete.sql']),
|
||||
name=name, basensp=basensp, cascade=cascade)
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info=gettext("Domain dropped"),
|
||||
data={
|
||||
'id': doid,
|
||||
'scid': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
@validate_request
|
||||
@ -606,53 +629,31 @@ AND relkind != 'c'))"""
|
||||
doid: Domain Id
|
||||
"""
|
||||
|
||||
status, SQL = self.get_sql(gid, sid, self.request, scid, doid)
|
||||
SQL, name = self.get_sql(gid, sid, self.request, scid, doid)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=SQL)
|
||||
if SQL:
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
try:
|
||||
if SQL:
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
# Get Schema Id
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_oid.sql']),
|
||||
doid=doid)
|
||||
status, res = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
# Get Schema Id
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_oid.sql']),
|
||||
doid=doid)
|
||||
status, res = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
scid = res['rows'][0]['scid']
|
||||
|
||||
scid = res['rows'][0]['scid']
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Domain updated",
|
||||
data={
|
||||
'id': doid,
|
||||
'scid': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
doid,
|
||||
scid,
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': doid,
|
||||
'scid': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, scid, doid=None):
|
||||
@ -726,15 +727,17 @@ AND relkind != 'c'))"""
|
||||
SQL statements to create/update the Domain.
|
||||
"""
|
||||
|
||||
status, SQL = self.get_sql(gid, sid, self.request, scid, doid)
|
||||
try:
|
||||
SQL, name = self.get_sql(gid, sid, self.request, scid, doid)
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
|
||||
if SQL:
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
else:
|
||||
return SQL
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, gid, sid, data, scid, doid=None):
|
||||
"""
|
||||
@ -748,43 +751,40 @@ AND relkind != 'c'))"""
|
||||
doid: Domain Id
|
||||
"""
|
||||
|
||||
try:
|
||||
if doid is not None:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
scid=scid, doid=doid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if doid is not None:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
scid=scid, doid=doid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return False, internal_server_error(errormsg=res)
|
||||
if not status:
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
old_data = res['rows'][0]
|
||||
old_data = res['rows'][0]
|
||||
|
||||
# Get Domain Constraints
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_constraints.sql']),
|
||||
doid=doid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
# Get Domain Constraints
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_constraints.sql']),
|
||||
doid=doid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
con_data = {}
|
||||
for c in res['rows']:
|
||||
con_data[c['conoid']] = c
|
||||
con_data = {}
|
||||
for c in res['rows']:
|
||||
con_data[c['conoid']] = c
|
||||
|
||||
old_data['constraints'] = con_data
|
||||
old_data['constraints'] = con_data
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data)
|
||||
else:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'create.sql']),
|
||||
data=data)
|
||||
return True, SQL.strip('\n')
|
||||
|
||||
except Exception as e:
|
||||
return False, e
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data)
|
||||
return SQL.strip('\n'), data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'create.sql']),
|
||||
data=data)
|
||||
return SQL.strip('\n'), data['name']
|
||||
|
||||
@check_precondition
|
||||
def dependents(self, gid, sid, did, scid, doid):
|
||||
|
@ -21,6 +21,7 @@ from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from pgadmin.utils.ajax import gone
|
||||
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
|
||||
@ -329,6 +330,47 @@ class DomainConstraintView(PGChildNodeView):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, scid, doid, coid):
|
||||
"""
|
||||
Returns all the Domain Constraints.
|
||||
|
||||
Args:
|
||||
gid: Server Group Id
|
||||
sid: Server Id
|
||||
did: Database Id
|
||||
scid: Schema Id
|
||||
doid: Domain Id
|
||||
coid: Domain Constraint Id
|
||||
"""
|
||||
res = []
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
coid=coid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
if 'convalidated' not in row:
|
||||
icon = 'icon-domain_constraints'
|
||||
elif row['convalidated']:
|
||||
icon = 'icon-domain_constraints'
|
||||
else:
|
||||
icon = 'icon-domain_constraints-bad'
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
doid,
|
||||
row['name'],
|
||||
icon=icon
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
return gone(gettext("Could not find the specified domain constraint."))
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, scid, doid, coid):
|
||||
"""
|
||||
@ -350,6 +392,12 @@ class DomainConstraintView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext(
|
||||
"Could not find the specified domain constraint."
|
||||
)
|
||||
)
|
||||
|
||||
data = res['rows'][0]
|
||||
return ajax_response(
|
||||
response=data,
|
||||
|
@ -7,8 +7,13 @@ JOIN
|
||||
pg_type t ON t.oid=contypid
|
||||
JOIN
|
||||
pg_namespace nl ON nl.oid=typnamespace
|
||||
{% if doid %}
|
||||
WHERE
|
||||
contype = 'c' AND contypid = {{doid}}::oid
|
||||
{% if coid %}
|
||||
AND c.oid = {{ coid }}
|
||||
{% endif %}
|
||||
{% elif coid %}
|
||||
WHERE
|
||||
c.oid = {{ coid }}
|
||||
{% endif %}
|
||||
|
@ -10,8 +10,13 @@ JOIN
|
||||
pg_namespace nl ON nl.oid=typnamespace
|
||||
LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=c.oid AND des.classoid='pg_constraint'::regclass)
|
||||
{% if doid %}
|
||||
WHERE
|
||||
contype = 'c' AND contypid = {{doid}}::oid
|
||||
{% if coid %}
|
||||
AND c.oid = {{ coid }}
|
||||
{% endif %}
|
||||
{% elif coid %}
|
||||
WHERE
|
||||
c.oid = {{ coid }}
|
||||
{% endif %}
|
||||
|
@ -7,7 +7,11 @@ JOIN
|
||||
pg_type b ON b.oid = d.typbasetype
|
||||
JOIN
|
||||
pg_namespace bn ON bn.oid=d.typnamespace
|
||||
{% if scid %}
|
||||
WHERE
|
||||
d.typnamespace = {{scid}}::oid
|
||||
{% elif doid %}
|
||||
WHERE d.oid = {{doid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
d.typname;
|
||||
|
@ -7,7 +7,11 @@ JOIN
|
||||
pg_type b ON b.oid = d.typbasetype
|
||||
JOIN
|
||||
pg_namespace bn ON bn.oid=d.typnamespace
|
||||
{% if scid is defined %}
|
||||
WHERE
|
||||
d.typnamespace = {{scid}}::oid
|
||||
{% elif doid %}
|
||||
WHERE d.oid = {{doid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
d.typname;
|
||||
|
@ -409,6 +409,41 @@ class ForeignTableView(PGChildNodeView, DataTypeReader):
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, scid, foid):
|
||||
"""
|
||||
Returns the Foreign Tables to generate the Nodes.
|
||||
|
||||
Args:
|
||||
gid: Server Group Id
|
||||
sid: Server Id
|
||||
did: Database Id
|
||||
scid: Schema Id
|
||||
foid: Foreign Table Id
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'node.sql']), foid=foid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
scid,
|
||||
row['name'],
|
||||
icon="icon-foreign-table"
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
return gone(gettext(
|
||||
'Could not find the specified foreign table.'
|
||||
))
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, scid, foid):
|
||||
"""
|
||||
@ -634,10 +669,7 @@ shifted to the another schema.
|
||||
"""
|
||||
try:
|
||||
# Get SQL to create Foreign Table
|
||||
status, SQL = self.get_sql(gid, sid, did, scid, self.request)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=SQL)
|
||||
|
||||
SQL, name = self.get_sql(gid, sid, did, scid, self.request)
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@ -744,50 +776,31 @@ shifted to the another schema.
|
||||
scid: Schema Id
|
||||
foid: Foreign Table Id
|
||||
"""
|
||||
status, SQL = self.get_sql(gid, sid, did, scid, self.request, foid)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=SQL)
|
||||
|
||||
try:
|
||||
if SQL and SQL.strip('\n') and SQL.strip(' '):
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
SQL, name = self.get_sql(gid, sid, did, scid, self.request, foid)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_oid.sql']),
|
||||
foid=foid)
|
||||
status, res = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_oid.sql']),
|
||||
foid=foid)
|
||||
status, res = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
scid = res['rows'][0]['scid']
|
||||
scid = res['rows'][0]['scid']
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Foreign Table updated",
|
||||
data={
|
||||
'id': foid,
|
||||
'scid': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
foid,
|
||||
scid,
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': foid,
|
||||
'scid': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
)
|
||||
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@ -845,14 +858,17 @@ shifted to the another schema.
|
||||
Returns:
|
||||
SQL statements to create/update the Foreign Table.
|
||||
"""
|
||||
status, SQL = self.get_sql(gid, sid, did, scid, self.request, foid)
|
||||
if status:
|
||||
try:
|
||||
SQL, name = self.get_sql(gid, sid, did, scid, self.request, foid)
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
|
||||
return make_json_response(
|
||||
data=SQL.strip('\n'),
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
else:
|
||||
return SQL
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, gid, sid, did, scid, data, foid=None):
|
||||
"""
|
||||
@ -865,90 +881,88 @@ shifted to the another schema.
|
||||
scid: Schema Id
|
||||
foid: Foreign Table Id
|
||||
"""
|
||||
try:
|
||||
if foid is not None:
|
||||
old_data = self._fetch_properties(gid, sid, did, scid, foid,
|
||||
inherits=True)
|
||||
if foid is not None:
|
||||
old_data = self._fetch_properties(gid, sid, did, scid, foid,
|
||||
inherits=True)
|
||||
|
||||
if not old_data:
|
||||
return gone(gettext("""
|
||||
Could not find the foreign table in the database.
|
||||
It may have been removed by another user or
|
||||
shifted to the another schema.
|
||||
"""))
|
||||
if not old_data:
|
||||
return gone(gettext("""
|
||||
Could not find the foreign table in the database.
|
||||
It may have been removed by another user or
|
||||
shifted to the another schema.
|
||||
"""))
|
||||
|
||||
# Prepare dict of columns with key = column's attnum
|
||||
# Will use this in the update template when any column is
|
||||
# changed, to identify the columns.
|
||||
col_data = {}
|
||||
for c in old_data['columns']:
|
||||
col_data[c['attnum']] = c
|
||||
# Prepare dict of columns with key = column's attnum
|
||||
# Will use this in the update template when any column is
|
||||
# changed, to identify the columns.
|
||||
col_data = {}
|
||||
for c in old_data['columns']:
|
||||
col_data[c['attnum']] = c
|
||||
|
||||
old_data['columns'] = col_data
|
||||
old_data['columns'] = col_data
|
||||
|
||||
if 'columns' in data and 'added' in data['columns']:
|
||||
data['columns']['added'] = self._format_columns(
|
||||
data['columns']['added'])
|
||||
if 'columns' in data and 'added' in data['columns']:
|
||||
data['columns']['added'] = self._format_columns(
|
||||
data['columns']['added'])
|
||||
|
||||
if 'columns' in data and 'changed' in data['columns']:
|
||||
data['columns']['changed'] = self._format_columns(
|
||||
data['columns']['changed'])
|
||||
if 'columns' in data and 'changed' in data['columns']:
|
||||
data['columns']['changed'] = self._format_columns(
|
||||
data['columns']['changed'])
|
||||
|
||||
# Parse Column Options
|
||||
for c in data['columns']['changed']:
|
||||
old_col_options = c['attfdwoptions'] if ('attfdwoptions' in c and c['attfdwoptions']) else []
|
||||
old_col_frmt_options = {}
|
||||
# Parse Column Options
|
||||
for c in data['columns']['changed']:
|
||||
old_col_options = c['attfdwoptions'] if ('attfdwoptions' in c and c['attfdwoptions']) else []
|
||||
old_col_frmt_options = {}
|
||||
|
||||
for o in old_col_options:
|
||||
col_opt = o.split("=")
|
||||
old_col_frmt_options[col_opt[0]] = col_opt[1]
|
||||
for o in old_col_options:
|
||||
col_opt = o.split("=")
|
||||
old_col_frmt_options[col_opt[0]] = col_opt[1]
|
||||
|
||||
c['coloptions_updated'] = {'added': [],
|
||||
'changed': [],
|
||||
'deleted': []}
|
||||
c['coloptions_updated'] = {'added': [],
|
||||
'changed': [],
|
||||
'deleted': []}
|
||||
|
||||
if 'coloptions' in c and len(c['coloptions']) > 0:
|
||||
for o in c['coloptions']:
|
||||
if o['option'] in old_col_frmt_options and o['value'] != old_col_frmt_options[
|
||||
o['option']]:
|
||||
c['coloptions_updated']['changed'].append(o)
|
||||
elif o['option'] not in old_col_frmt_options:
|
||||
c['coloptions_updated']['added'].append(o)
|
||||
if o['option'] in old_col_frmt_options:
|
||||
del old_col_frmt_options[o['option']]
|
||||
if 'coloptions' in c and len(c['coloptions']) > 0:
|
||||
for o in c['coloptions']:
|
||||
if o['option'] in old_col_frmt_options and \
|
||||
o['value'] != old_col_frmt_options[o['option']]:
|
||||
c['coloptions_updated']['changed'].append(o)
|
||||
elif o['option'] not in old_col_frmt_options:
|
||||
c['coloptions_updated']['added'].append(o)
|
||||
if o['option'] in old_col_frmt_options:
|
||||
del old_col_frmt_options[o['option']]
|
||||
|
||||
for o in old_col_frmt_options:
|
||||
c['coloptions_updated']['deleted'].append({'option': o})
|
||||
for o in old_col_frmt_options:
|
||||
c['coloptions_updated']['deleted'].append({'option': o})
|
||||
|
||||
# Parse Privileges
|
||||
if 'acl' in data and 'added' in data['acl']:
|
||||
data['acl']['added'] = parse_priv_to_db(data['acl']['added'],
|
||||
["a", "r", "w", "x"])
|
||||
if 'acl' in data and 'changed' in data['acl']:
|
||||
data['acl']['changed'] = parse_priv_to_db(
|
||||
data['acl']['changed'], ["a", "r", "w", "x"])
|
||||
if 'acl' in data and 'deleted' in data['acl']:
|
||||
data['acl']['deleted'] = parse_priv_to_db(
|
||||
data['acl']['deleted'], ["a", "r", "w", "x"])
|
||||
# Parse Privileges
|
||||
if 'acl' in data and 'added' in data['acl']:
|
||||
data['acl']['added'] = parse_priv_to_db(data['acl']['added'],
|
||||
["a", "r", "w", "x"])
|
||||
if 'acl' in data and 'changed' in data['acl']:
|
||||
data['acl']['changed'] = parse_priv_to_db(
|
||||
data['acl']['changed'], ["a", "r", "w", "x"])
|
||||
if 'acl' in data and 'deleted' in data['acl']:
|
||||
data['acl']['deleted'] = parse_priv_to_db(
|
||||
data['acl']['deleted'], ["a", "r", "w", "x"])
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data
|
||||
)
|
||||
else:
|
||||
data['columns'] = self._format_columns(data['columns'])
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data
|
||||
)
|
||||
return SQL, data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
data['columns'] = self._format_columns(data['columns'])
|
||||
|
||||
# Parse Privileges
|
||||
if 'acl' in data:
|
||||
data['acl'] = parse_priv_to_db(data['acl'],
|
||||
["a", "r", "w", "x"])
|
||||
# Parse Privileges
|
||||
if 'acl' in data:
|
||||
data['acl'] = parse_priv_to_db(data['acl'],
|
||||
["a", "r", "w", "x"])
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'create.sql']), data=data)
|
||||
return True, SQL
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'create.sql']), data=data)
|
||||
return SQL, data['name']
|
||||
|
||||
except Exception as e:
|
||||
return False, e
|
||||
|
||||
@check_precondition
|
||||
def dependents(self, gid, sid, did, scid, foid):
|
||||
|
@ -10,5 +10,9 @@ LEFT OUTER JOIN
|
||||
LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=c.oid AND des.classoid='pg_class'::regclass)
|
||||
WHERE
|
||||
{% if scid %}
|
||||
c.relnamespace = {{scid}}::oid
|
||||
{% elif foid %}
|
||||
c.oid = {{foid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY c.relname;
|
||||
|
@ -10,5 +10,9 @@ LEFT OUTER JOIN
|
||||
LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=c.oid AND des.classoid='pg_class'::regclass)
|
||||
WHERE
|
||||
{% if scid %}
|
||||
c.relnamespace = {{scid}}::oid
|
||||
{% elif foid %}
|
||||
c.oid = {{foid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY c.relname;
|
||||
|
@ -10,5 +10,9 @@ LEFT OUTER JOIN
|
||||
LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=c.oid AND des.classoid='pg_class'::regclass)
|
||||
WHERE
|
||||
{% if scid %}
|
||||
c.relnamespace = {{scid}}::oid
|
||||
{% elif foid %}
|
||||
c.oid = {{foid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY c.relname;
|
||||
|
@ -314,13 +314,14 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
cfgid=cfgid
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
if len(rset['rows']) == 0:
|
||||
return gone(_("""
|
||||
Could not find the FTS Configuration node.
|
||||
"""))
|
||||
return gone(
|
||||
_("""Could not find the FTS Configuration node.""")
|
||||
)
|
||||
|
||||
for row in rset['rows']:
|
||||
return make_json_response(
|
||||
@ -357,22 +358,22 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""
|
||||
Could not find the FTS Configuration node.
|
||||
"""))
|
||||
return gone(
|
||||
_("Could not find the FTS Configuration node in the database node.")
|
||||
)
|
||||
|
||||
# In edit mode fetch token/dictionary list also
|
||||
if cfgid:
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'tokenDictList.sql']),
|
||||
cfgid=cfgid)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'tokenDictList.sql']),
|
||||
cfgid=cfgid
|
||||
)
|
||||
|
||||
status, rset = self.conn.execute_dict(sql)
|
||||
status, rset = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
res['rows'][0]['tokens'] = rset['rows']
|
||||
res['rows'][0]['tokens'] = rset['rows']
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
@ -418,53 +419,52 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'schema.sql']),
|
||||
data=data,
|
||||
conn=self.conn,
|
||||
)
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'schema.sql']),
|
||||
data=data,
|
||||
conn=self.conn,
|
||||
)
|
||||
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
|
||||
# Replace schema oid with schema name before passing to create.sql
|
||||
# To generate proper sql query
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
# Replace schema oid with schema name before passing to create.sql
|
||||
# To generate proper sql query
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn,
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn,
|
||||
)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
# We need cfgid to add object in tree at browser,
|
||||
# Below sql will give the same
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
name=data['name'],
|
||||
scid=data['schema']
|
||||
)
|
||||
status, res = self.conn.execute_2darray(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
res = res['rows'][0]
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
res['oid'],
|
||||
data['schema'],
|
||||
data['name'],
|
||||
icon="icon-fts_configuration"
|
||||
)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
# We need cfgid to add object in tree at browser,
|
||||
# Below sql will give the same
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
name=data['name']
|
||||
)
|
||||
status, cfgid = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=cfgid)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
cfgid,
|
||||
did,
|
||||
data['name'],
|
||||
icon="icon-fts_configuration"
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
current_app.logger.exception(e)
|
||||
return internal_server_error(errormsg=str(e))
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def update(self, gid, sid, did, scid, cfgid):
|
||||
@ -479,60 +479,37 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
|
||||
# Fetch sql query to update fts Configuration
|
||||
sql = self.get_sql(gid, sid, did, scid, data, cfgid)
|
||||
try:
|
||||
if sql and sql.strip('\n') and sql.strip(' '):
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
sql, name = self.get_sql(gid, sid, did, scid, data, cfgid)
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if cfgid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'nodes.sql']),
|
||||
cfgid=cfgid,
|
||||
scid=scid
|
||||
)
|
||||
if cfgid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'nodes.sql']),
|
||||
cfgid=cfgid,
|
||||
scid=data['schema'] if 'scheam' in data else scid
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""
|
||||
Could not find the FTS Configuration node to update.
|
||||
"""))
|
||||
|
||||
data = res['rows'][0]
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="FTS Configuration Updated.",
|
||||
data={
|
||||
'id': cfgid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did,
|
||||
'scid': scid
|
||||
}
|
||||
)
|
||||
# In case FTS Configuration node is not present
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': cfgid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did,
|
||||
'scid': scid
|
||||
}
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
_("Could not find the FTS Configuration node to update.")
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
current_app.logger.exception(e)
|
||||
return internal_server_error(errormsg=str(e))
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
cfgid,
|
||||
data['schema'] if 'schema' in data else scid,
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, scid, cfgid):
|
||||
@ -617,20 +594,18 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
data[k] = json.loads(v, encoding='utf-8')
|
||||
except ValueError:
|
||||
data[k] = v
|
||||
try:
|
||||
# Fetch sql query for modified data
|
||||
SQL, name = self.get_sql(gid, sid, did, scid, data, cfgid)
|
||||
if SQL == '':
|
||||
SQL = "-- No change"
|
||||
|
||||
# Fetch sql query for modified data
|
||||
sql = self.get_sql(gid, sid, did, scid, data, cfgid)
|
||||
|
||||
if isinstance(sql, str) and sql and sql.strip('\n') and sql.strip(' '):
|
||||
return make_json_response(
|
||||
data=sql,
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
data="--modified SQL",
|
||||
status=200
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, gid, sid, did, scid, data, cfgid=None):
|
||||
"""
|
||||
@ -641,87 +616,84 @@ class FtsConfigurationView(PGChildNodeView):
|
||||
:param scid: schema id
|
||||
:param cfgid: fts Configuration id
|
||||
"""
|
||||
try:
|
||||
# Fetch sql for update
|
||||
if cfgid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
cfgid=cfgid,
|
||||
scid=scid
|
||||
)
|
||||
# Fetch sql for update
|
||||
if cfgid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
cfgid=cfgid,
|
||||
scid=scid
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""
|
||||
Could not find the FTS Configuration node.
|
||||
"""))
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""
|
||||
Could not find the FTS Configuration node.
|
||||
"""))
|
||||
|
||||
old_data = res['rows'][0]
|
||||
old_data = res['rows'][0]
|
||||
|
||||
# If user has changed the schema then fetch new schema directly
|
||||
# using its oid otherwise fetch old schema name using its oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data)
|
||||
# If user has changed the schema then fetch new schema directly
|
||||
# using its oid otherwise fetch old schema name using its oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data)
|
||||
|
||||
status, new_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=new_schema)
|
||||
status, new_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=new_schema)
|
||||
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
if 'schema' in new_data:
|
||||
new_data['schema'] = new_schema
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
if 'schema' in new_data:
|
||||
new_data['schema'] = new_schema
|
||||
|
||||
# Fetch old schema name using old schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=old_data
|
||||
)
|
||||
# Fetch old schema name using old schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=old_data
|
||||
)
|
||||
|
||||
status, old_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=old_schema)
|
||||
status, old_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=old_schema)
|
||||
|
||||
# Replace old schema oid with old schema name
|
||||
old_data['schema'] = old_schema
|
||||
# Replace old schema oid with old schema name
|
||||
old_data['schema'] = old_schema
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=new_data, o_data=old_data
|
||||
)
|
||||
# Fetch sql query for modified data
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=new_data, o_data=old_data
|
||||
)
|
||||
# Fetch sql query for modified data
|
||||
return str(sql.strip('\n')), data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data
|
||||
)
|
||||
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
|
||||
if 'name' in new_data and \
|
||||
'schema' in new_data:
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn
|
||||
)
|
||||
else:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data
|
||||
)
|
||||
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
|
||||
if 'name' in new_data and \
|
||||
'schema' in new_data:
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn
|
||||
)
|
||||
else:
|
||||
sql = "-- incomplete definition"
|
||||
return str(sql.strip('\n'))
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
sql = "-- incomplete definition"
|
||||
return str(sql.strip('\n')), data['name']
|
||||
|
||||
@check_precondition
|
||||
def parsers(self, gid, sid, did, scid):
|
||||
|
@ -16,10 +16,11 @@ FROM
|
||||
WHERE
|
||||
{% if scid %}
|
||||
cfg.cfgnamespace = {{scid}}::OID
|
||||
{% elif name %}
|
||||
cfg.cfgname = {{name|qtLiteral}}
|
||||
{% endif %}
|
||||
{% if name %}
|
||||
{% if scid %}AND {% endif %}cfg.cfgname = {{name|qtLiteral}}
|
||||
{% endif %}
|
||||
{% if cfgid %}
|
||||
AND cfg.oid = {{cfgid}}::OID
|
||||
{% if scid %}AND {% else %}{% if name %}AND {% endif %}{% endif %}cfg.oid = {{cfgid}}::OID
|
||||
{% endif %}
|
||||
ORDER BY cfg.cfgname
|
||||
|
@ -41,11 +41,11 @@ ALTER TEXT SEARCH CONFIGURATION {{conn|qtIdent(o_data.schema)}}.{{conn|qtIdent(n
|
||||
{% if 'schema' in data and data.schema != o_data.schema %}
|
||||
{% set schema = data.schema%}
|
||||
ALTER TEXT SEARCH CONFIGURATION {{conn|qtIdent(o_data.schema)}}.{{conn|qtIdent(name)}}
|
||||
SET SCHEMA {{data.schema}};
|
||||
SET SCHEMA {{conn|qtIdent(data.schema)}};
|
||||
|
||||
{% endif %}
|
||||
{% if 'description' in data and data.description != o_data.description %}
|
||||
COMMENT ON TEXT SEARCH CONFIGURATION {{conn|qtIdent(schema)}}.{{conn|qtIdent(name)}}
|
||||
IS {{ data.description|qtLiteral }};
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
@ -300,7 +300,7 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
res.append(
|
||||
self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
did,
|
||||
scid,
|
||||
row['name'],
|
||||
icon="icon-fts_dictionary"
|
||||
))
|
||||
@ -332,15 +332,13 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
if len(rset['rows']) == 0:
|
||||
return gone(_("""
|
||||
Could not find the FTS Dictionary node.
|
||||
"""))
|
||||
return gone(_("Could not find the FTS Dictionary node."))
|
||||
|
||||
for row in rset['rows']:
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
did,
|
||||
dcid,
|
||||
row['schema'],
|
||||
row['name'],
|
||||
icon="icon-fts_dictionary"
|
||||
),
|
||||
@ -372,7 +370,7 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""
|
||||
Could not find the FTS Dictionary node.
|
||||
Could not find the FTS Dictionary node in the database node.
|
||||
"""))
|
||||
|
||||
if res['rows'][0]['options'] is not None:
|
||||
@ -408,54 +406,51 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=_(
|
||||
"Could not find the required parameter (%s)." % arg
|
||||
)
|
||||
errormsg=_("Could not find the required parameter (%s)." % arg)
|
||||
)
|
||||
try:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template("/".join([self.template_path, 'schema.sql']),
|
||||
data=data,
|
||||
conn=self.conn,
|
||||
)
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data,
|
||||
conn=self.conn,
|
||||
)
|
||||
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
|
||||
# Replace schema oid with schema name before passing to create.sql
|
||||
# To generate proper sql query
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn,
|
||||
)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
# Replace schema oid with schema name before passing to create.sql
|
||||
# To generate proper sql query
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn,
|
||||
)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
# We need dcid to add object in tree at browser,
|
||||
# Below sql will give the same
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
name=data['name']
|
||||
# We need dcid to add object in tree at browser,
|
||||
# Below sql will give the same
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
name=data['name'],
|
||||
scid=data['schema']
|
||||
)
|
||||
status, dcid= self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=dcid)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
dcid,
|
||||
data['schema'],
|
||||
data['name'],
|
||||
icon="icon-fts_dictionary"
|
||||
)
|
||||
status, dcid = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=dcid)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
dcid,
|
||||
did,
|
||||
data['name'],
|
||||
icon="icon-fts_dictionary"
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
current_app.logger.exception(e)
|
||||
return internal_server_error(errormsg=str(e))
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def update(self, gid, sid, did, scid, dcid):
|
||||
@ -472,55 +467,35 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
)
|
||||
|
||||
# Fetch sql query to update fts dictionary
|
||||
sql = self.get_sql(gid, sid, did, scid, data, dcid)
|
||||
try:
|
||||
if sql and sql.strip('\n') and sql.strip(' '):
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
sql, name = self.get_sql(gid, sid, did, scid, data, dcid)
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if dcid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
dcid=dcid,
|
||||
scid=scid
|
||||
)
|
||||
if dcid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
dcid=dcid
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""
|
||||
Could not find the FTS Dictionary node to update.
|
||||
"""))
|
||||
|
||||
data = res['rows'][0]
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
dcid,
|
||||
did,
|
||||
data['name'],
|
||||
icon="icon-fts_dictionary"
|
||||
)
|
||||
)
|
||||
# In case FTS Dictionary node is not present
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': dcid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did,
|
||||
'scid': scid
|
||||
}
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
_("Could not find the FTS Dictionary node to update.")
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
current_app.logger.exception(e)
|
||||
return internal_server_error(errormsg=str(e))
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
dcid,
|
||||
res['rows'][0]['schema'],
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, scid, dcid):
|
||||
@ -604,19 +579,18 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
except ValueError:
|
||||
data[k] = v
|
||||
|
||||
# Fetch sql query for modified data
|
||||
sql = self.get_sql(gid, sid, did, scid, data, dcid)
|
||||
try:
|
||||
# Fetch sql query for modified data
|
||||
SQL, name = self.get_sql(gid, sid, did, scid, data, dcid)
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
|
||||
if isinstance(sql, str) and sql and sql.strip('\n') and sql.strip(' '):
|
||||
return make_json_response(
|
||||
data=sql,
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
data="--modified SQL",
|
||||
status=200
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, gid, sid, did, scid, data, dcid=None):
|
||||
"""
|
||||
@ -627,85 +601,83 @@ class FtsDictionaryView(PGChildNodeView):
|
||||
:param scid: schema id
|
||||
:param dcid: fts dictionary id
|
||||
"""
|
||||
try:
|
||||
# Fetch sql for update
|
||||
if dcid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
dcid=dcid,
|
||||
scid=scid
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
# Fetch sql for update
|
||||
if dcid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
dcid=dcid,
|
||||
scid=scid
|
||||
)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""
|
||||
Could not find the FTS Dictionary node.
|
||||
"""))
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
old_data = res['rows'][0]
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""
|
||||
Could not find the FTS Dictionary node.
|
||||
"""))
|
||||
|
||||
# If user has changed the schema then fetch new schema directly
|
||||
# using its oid otherwise fetch old schema name using its oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data)
|
||||
old_data = res['rows'][0]
|
||||
|
||||
status, new_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=new_schema)
|
||||
# If user has changed the schema then fetch new schema directly
|
||||
# using its oid otherwise fetch old schema name using its oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data)
|
||||
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
if 'schema' in new_data:
|
||||
new_data['schema'] = new_schema
|
||||
status, new_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=new_schema)
|
||||
|
||||
# Fetch old schema name using old schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=old_data)
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
if 'schema' in new_data:
|
||||
new_data['schema'] = new_schema
|
||||
|
||||
status, old_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=old_schema)
|
||||
# Fetch old schema name using old schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=old_data)
|
||||
|
||||
# Replace old schema oid with old schema name
|
||||
old_data['schema'] = old_schema
|
||||
status, old_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=old_schema)
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=new_data, o_data=old_data
|
||||
)
|
||||
# Fetch sql query for modified data
|
||||
# Replace old schema oid with old schema name
|
||||
old_data['schema'] = old_schema
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=new_data, o_data=old_data
|
||||
)
|
||||
# Fetch sql query for modified data
|
||||
return str(sql.strip('\n')), data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template("/".join([self.template_path, 'schema.sql']),
|
||||
data=data)
|
||||
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
|
||||
if 'template' in new_data and \
|
||||
'name' in new_data and \
|
||||
'schema' in new_data:
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn
|
||||
)
|
||||
else:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template("/".join([self.template_path, 'schema.sql']),
|
||||
data=data)
|
||||
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
|
||||
if 'template' in new_data and \
|
||||
'name' in new_data and \
|
||||
'schema' in new_data:
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn
|
||||
)
|
||||
else:
|
||||
sql = "-- incomplete definition"
|
||||
return str(sql.strip('\n'))
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
sql = "-- incomplete definition"
|
||||
return str(sql.strip('\n')), data['name']
|
||||
|
||||
@check_precondition
|
||||
def fetch_templates(self, gid, sid, did, scid):
|
||||
|
@ -14,11 +14,12 @@ FROM
|
||||
WHERE
|
||||
{% if scid %}
|
||||
dict.dictnamespace = {{scid}}::OID
|
||||
{% elif name %}
|
||||
dict.dictname = {{name|qtLiteral}}
|
||||
{% endif %}
|
||||
{% if name %}
|
||||
{% if scid %}AND {% endif %}dict.dictname = {{name|qtLiteral}}
|
||||
{% endif %}
|
||||
{% if dcid %}
|
||||
AND dict.oid = {{dcid}}::OID
|
||||
{% if scid %}AND {% else %}{% if name %}AND {% endif %}{% endif %}dict.oid = {{dcid}}::OID
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
dict.dictname
|
||||
dict.dictname
|
||||
|
@ -277,15 +277,13 @@ class FtsParserView(PGChildNodeView):
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
if len(rset['rows']) == 0:
|
||||
return gone(_("""
|
||||
Could not find the FTS Parser node.
|
||||
"""))
|
||||
return gone(_("Could not find the FTS Parser node."))
|
||||
|
||||
for row in rset['rows']:
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
did,
|
||||
row['schema'],
|
||||
row['name'],
|
||||
icon="icon-fts_parser"
|
||||
),
|
||||
@ -306,7 +304,7 @@ class FtsParserView(PGChildNodeView):
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""
|
||||
Could not find the FTS Parser node.
|
||||
Could not find the FTS Parser node in the database node.
|
||||
"""))
|
||||
|
||||
return ajax_response(
|
||||
@ -346,52 +344,49 @@ class FtsParserView(PGChildNodeView):
|
||||
"Could not find the required parameter (%s)." % arg
|
||||
)
|
||||
)
|
||||
try:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data,
|
||||
conn=self.conn,
|
||||
)
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data,
|
||||
conn=self.conn,
|
||||
)
|
||||
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
|
||||
# replace schema oid with schema name before passing to create.sql
|
||||
# to generate proper sql query
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn,
|
||||
)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
# replace schema oid with schema name before passing to create.sql
|
||||
# to generate proper sql query
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn,
|
||||
)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
# we need fts_parser id to to add object in tree at browser,
|
||||
# below sql will give the same
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
name=data['name']
|
||||
)
|
||||
status, pid = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=pid)
|
||||
# we need fts_parser id to to add object in tree at browser,
|
||||
# below sql will give the same
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
name=data['name'],
|
||||
scid=data['schema'] if 'schema' in data else scid
|
||||
)
|
||||
status, pid = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=pid)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
pid,
|
||||
did,
|
||||
data['name'],
|
||||
icon="icon-fts_parser"
|
||||
)
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
pid,
|
||||
data['schema'] if 'schema' in data else scid,
|
||||
data['name'],
|
||||
icon="icon-fts_parser"
|
||||
)
|
||||
except Exception as e:
|
||||
current_app.logger.exception(e)
|
||||
return internal_server_error(errormsg=str(e))
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def update(self, gid, sid, did, scid, pid):
|
||||
@ -406,51 +401,38 @@ class FtsParserView(PGChildNodeView):
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
|
||||
# Fetch sql query to update fts parser
|
||||
sql = self.get_sql(gid, sid, did, scid, data, pid)
|
||||
if sql and sql.strip('\n') and sql.strip(' '):
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
sql, name = self.get_sql(gid, sid, did, scid, data, pid)
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if pid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
pid=pid,
|
||||
scid=data['schema'] if 'schema' in data else scid
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if pid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
pid=pid,
|
||||
scid=scid
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
_("Could not find the FTS Parser node to update.")
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""
|
||||
Could not find the FTS Parser node to update.
|
||||
"""))
|
||||
|
||||
data = res['rows'][0]
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
pid,
|
||||
did,
|
||||
data['name'],
|
||||
icon="icon-fts_parser"
|
||||
)
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': pid,
|
||||
'scid': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
pid,
|
||||
data['schema'] if 'schema' in data else scid,
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, scid, pid):
|
||||
@ -530,20 +512,19 @@ class FtsParserView(PGChildNodeView):
|
||||
:param pid: fts tempate id
|
||||
"""
|
||||
data = request.args
|
||||
|
||||
# Fetch sql query for modified data
|
||||
sql = self.get_sql(gid, sid, did, scid, data, pid)
|
||||
try:
|
||||
# Fetch sql query for modified data
|
||||
SQL, name = self.get_sql(gid, sid, did, scid, data, pid)
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
|
||||
if isinstance(sql, str) and sql and sql.strip('\n') and sql.strip(' '):
|
||||
return make_json_response(
|
||||
data=sql,
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
data="--modified SQL",
|
||||
status=200
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, gid, sid, did, scid, data, pid=None):
|
||||
"""
|
||||
@ -554,91 +535,88 @@ class FtsParserView(PGChildNodeView):
|
||||
:param scid: schema id
|
||||
:param pid: fts tempate id
|
||||
"""
|
||||
try:
|
||||
# Fetch sql for update
|
||||
if pid is not None:
|
||||
|
||||
# Fetch sql for update
|
||||
if pid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
pid=pid,
|
||||
scid=scid
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("Could not find the FTS Parser node."))
|
||||
|
||||
old_data = res['rows'][0]
|
||||
|
||||
# If user has changed the schema then fetch new schema directly
|
||||
# using its oid otherwise fetch old schema name with parser oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data)
|
||||
|
||||
status, new_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=new_schema)
|
||||
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
if 'schema' in new_data:
|
||||
new_data['schema'] = new_schema
|
||||
|
||||
# Fetch old schema name using old schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=old_data
|
||||
)
|
||||
|
||||
status, old_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=old_schema)
|
||||
|
||||
# Replace old schema oid with old schema name
|
||||
old_data['schema'] = old_schema
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=new_data,
|
||||
o_data=old_data
|
||||
)
|
||||
# Fetch sql query for modified data
|
||||
return str(sql.strip('\n')), data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data
|
||||
)
|
||||
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
|
||||
if 'prsstart' in new_data and \
|
||||
'prstoken' in new_data and \
|
||||
'prsend' in new_data and \
|
||||
'prslextype' in new_data and \
|
||||
'name' in new_data and \
|
||||
'schema' in new_data:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
pid=pid,
|
||||
scid=scid
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("Could not find the FTS Parser node."))
|
||||
|
||||
old_data = res['rows'][0]
|
||||
|
||||
# If user has changed the schema then fetch new schema directly
|
||||
# using its oid otherwise fetch old schema name with parser oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data)
|
||||
|
||||
status, new_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=new_schema)
|
||||
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
if 'schema' in new_data:
|
||||
new_data['schema'] = new_schema
|
||||
|
||||
# Fetch old schema name using old schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=old_data
|
||||
)
|
||||
|
||||
status, old_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=old_schema)
|
||||
|
||||
# Replace old schema oid with old schema name
|
||||
old_data['schema'] = old_schema
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
"/".join([self.template_path, 'create.sql']),
|
||||
data=new_data,
|
||||
o_data=old_data
|
||||
conn=self.conn
|
||||
)
|
||||
# Fetch sql query for modified data
|
||||
else:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data
|
||||
)
|
||||
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
|
||||
if 'prsstart' in new_data and \
|
||||
'prstoken' in new_data and \
|
||||
'prsend' in new_data and \
|
||||
'prslextype' in new_data and \
|
||||
'name' in new_data and \
|
||||
'schema' in new_data:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn
|
||||
)
|
||||
else:
|
||||
sql = "-- incomplete definition"
|
||||
return str(sql.strip('\n'))
|
||||
|
||||
except Exception as e:
|
||||
current_app.logger.exception(e)
|
||||
return internal_server_error(errormsg=str(e))
|
||||
sql = "-- incomplete definition"
|
||||
return str(sql.strip('\n')), data['name']
|
||||
|
||||
@check_precondition
|
||||
def start_functions(self, gid, sid, did, scid):
|
||||
|
@ -1,6 +1,6 @@
|
||||
{# FETCH FTS PARSER name statement #}
|
||||
SELECT
|
||||
oid, prsname as name
|
||||
oid, prsname as name, prs.prsnamespace AS schema
|
||||
FROM
|
||||
pg_ts_parser prs
|
||||
WHERE
|
||||
@ -10,4 +10,4 @@ WHERE
|
||||
prs.oid = {{pid}}::OID
|
||||
{% endif %}
|
||||
|
||||
ORDER BY name
|
||||
ORDER BY name
|
||||
|
@ -20,11 +20,11 @@ ON
|
||||
WHERE
|
||||
{% if scid %}
|
||||
prs.prsnamespace = {{scid}}::OID
|
||||
{% elif name %}
|
||||
prs.prsname = {{name|qtLiteral}}
|
||||
{% endif %}
|
||||
{% if name %}
|
||||
{% if scid %}AND {% endif %}prs.prsname = {{name|qtLiteral}}
|
||||
{% endif %}
|
||||
{% if pid %}
|
||||
AND prs.oid = {{pid}}::OID
|
||||
{% if name %}AND {% else %}{% if scid %}AND {% endif %}{% endif %}prs.oid = {{pid}}::OID
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
prs.prsname
|
||||
ORDER BY prs.prsname;
|
||||
|
@ -244,7 +244,7 @@ class FtsTemplateView(PGChildNodeView):
|
||||
res.append(
|
||||
self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
did,
|
||||
scid,
|
||||
row['name'],
|
||||
icon="icon-fts_template"
|
||||
))
|
||||
@ -268,7 +268,7 @@ class FtsTemplateView(PGChildNodeView):
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
did,
|
||||
row['schema'],
|
||||
row['name'],
|
||||
icon="icon-fts_template"
|
||||
),
|
||||
@ -290,6 +290,9 @@ class FtsTemplateView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""Could not find the FTS template node in the database."""))
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'][0],
|
||||
status=200
|
||||
@ -324,49 +327,47 @@ class FtsTemplateView(PGChildNodeView):
|
||||
"Could not find the required parameter (%s)." % arg
|
||||
)
|
||||
)
|
||||
try:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template("/".join([self.template_path, 'schema.sql']),
|
||||
data=data,
|
||||
conn=self.conn,
|
||||
)
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template("/".join([self.template_path, 'schema.sql']),
|
||||
data=data,
|
||||
conn=self.conn,
|
||||
)
|
||||
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
|
||||
# replace schema oid with schema name before passing to create.sql
|
||||
# to generate proper sql query
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn,
|
||||
)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
# replace schema oid with schema name before passing to create.sql
|
||||
# to generate proper sql query
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn,
|
||||
)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
# we need fts_template id to to add object in tree at browser,
|
||||
# below sql will give the same
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
name=data['name']
|
||||
# we need fts_template id to to add object in tree at browser,
|
||||
# below sql will give the same
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
name=data['name'],
|
||||
scid=data['schema'] if 'schema' in data else scid
|
||||
)
|
||||
status, tid = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=tid)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
tid,
|
||||
data['schema'] if 'schema' in data else scid,
|
||||
data['name'],
|
||||
icon="icon-fts_template"
|
||||
)
|
||||
status, tid = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=tid)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
tid,
|
||||
did,
|
||||
data['name'],
|
||||
icon="icon-fts_template"
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def update(self, gid, sid, did, scid, tid):
|
||||
@ -383,39 +384,30 @@ class FtsTemplateView(PGChildNodeView):
|
||||
)
|
||||
|
||||
# Fetch sql query to update fts template
|
||||
sql = self.get_sql(gid, sid, did, scid, data, tid)
|
||||
try:
|
||||
if sql and sql.strip('\n') and sql.strip(' '):
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
sql, name = self.get_sql(gid, sid, did, scid, data, tid)
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="FTS Template updated",
|
||||
data={
|
||||
'id': tid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did,
|
||||
'scid': scid
|
||||
}
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': tid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did,
|
||||
'scid': scid
|
||||
}
|
||||
)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'nodes.sql']),
|
||||
tid=tid
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
rset = rset['rows'][0]
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
tid,
|
||||
rset['schema'],
|
||||
rset['name'],
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, scid, tid):
|
||||
@ -434,51 +426,47 @@ class FtsTemplateView(PGChildNodeView):
|
||||
else:
|
||||
cascade = False
|
||||
|
||||
try:
|
||||
# Get name for template from tid
|
||||
sql = render_template("/".join([self.template_path, 'delete.sql']),
|
||||
tid=tid)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if not res['rows']:
|
||||
return make_json_response(
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'Error: Object not found.'
|
||||
),
|
||||
info=gettext(
|
||||
'The specified FTS template could not be found.\n'
|
||||
)
|
||||
)
|
||||
|
||||
# Drop fts template
|
||||
result = res['rows'][0]
|
||||
sql = render_template("/".join([self.template_path, 'delete.sql']),
|
||||
name=result['name'],
|
||||
schema=result['schema'],
|
||||
cascade=cascade
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
# Get name for template from tid
|
||||
sql = render_template("/".join([self.template_path, 'delete.sql']),
|
||||
tid=tid)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if not res['rows']:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info=gettext("FTS template dropped"),
|
||||
data={
|
||||
'id': tid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did,
|
||||
'scid': scid
|
||||
}
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
'Error: Object not found.'
|
||||
),
|
||||
info=gettext(
|
||||
'The specified FTS template could not be found.\n'
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
# Drop fts template
|
||||
result = res['rows'][0]
|
||||
sql = render_template("/".join([self.template_path, 'delete.sql']),
|
||||
name=result['name'],
|
||||
schema=result['schema'],
|
||||
cascade=cascade
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info=gettext("FTS template dropped"),
|
||||
data={
|
||||
'id': tid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did,
|
||||
'scid': scid
|
||||
}
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def msql(self, gid, sid, did, scid, tid=None):
|
||||
@ -493,17 +481,14 @@ class FtsTemplateView(PGChildNodeView):
|
||||
data = request.args
|
||||
|
||||
# Fetch sql query for modified data
|
||||
sql = self.get_sql(gid, sid, did, scid, data, tid)
|
||||
# Fetch sql query for modified data
|
||||
SQL, name = self.get_sql(gid, sid, did, scid, data, tid)
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
|
||||
if isinstance(sql, str) and sql and sql.strip('\n') and sql.strip(' '):
|
||||
return make_json_response(
|
||||
data=sql,
|
||||
status=200
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
data="--modified SQL",
|
||||
status=200
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
|
||||
def get_sql(self, gid, sid, did, scid, data, tid=None):
|
||||
@ -515,80 +500,78 @@ class FtsTemplateView(PGChildNodeView):
|
||||
:param scid: schema id
|
||||
:param tid: fts tempate id
|
||||
"""
|
||||
try:
|
||||
# Fetch sql for update
|
||||
if tid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
tid=tid,
|
||||
scid=scid
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
# Fetch sql for update
|
||||
if tid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
tid=tid,
|
||||
scid=scid
|
||||
)
|
||||
|
||||
old_data = res['rows'][0]
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
# If user has changed the schema then fetch new schema directly
|
||||
# using its oid otherwise fetch old schema name using fts template oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data)
|
||||
old_data = res['rows'][0]
|
||||
|
||||
status, new_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=new_schema)
|
||||
# If user has changed the schema then fetch new schema directly
|
||||
# using its oid otherwise fetch old schema name using fts template oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=data)
|
||||
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
if 'schema' in new_data:
|
||||
new_data['schema'] = new_schema
|
||||
status, new_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=new_schema)
|
||||
|
||||
# Fetch old schema name using old schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=old_data)
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
if 'schema' in new_data:
|
||||
new_data['schema'] = new_schema
|
||||
|
||||
status, old_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=old_schema)
|
||||
# Fetch old schema name using old schema oid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'schema.sql']),
|
||||
data=old_data)
|
||||
|
||||
# Replace old schema oid with old schema name
|
||||
old_data['schema'] = old_schema
|
||||
status, old_schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=old_schema)
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=new_data, o_data=old_data
|
||||
)
|
||||
# Fetch sql query for modified data
|
||||
# Replace old schema oid with old schema name
|
||||
old_data['schema'] = old_schema
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=new_data, o_data=old_data
|
||||
)
|
||||
# Fetch sql query for modified data
|
||||
return str(sql.strip('\n')), data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template("/".join([self.template_path, 'schema.sql']),
|
||||
data=data)
|
||||
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
|
||||
if 'tmpllexize' in new_data and \
|
||||
'name' in new_data and \
|
||||
'schema' in new_data:
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn
|
||||
)
|
||||
else:
|
||||
# Fetch schema name from schema oid
|
||||
sql = render_template("/".join([self.template_path, 'schema.sql']),
|
||||
data=data)
|
||||
|
||||
status, schema = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=schema)
|
||||
|
||||
# Replace schema oid with schema name
|
||||
new_data = data.copy()
|
||||
new_data['schema'] = schema
|
||||
|
||||
if 'tmpllexize' in new_data and \
|
||||
'name' in new_data and \
|
||||
'schema' in new_data:
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'create.sql']),
|
||||
data=new_data,
|
||||
conn=self.conn
|
||||
)
|
||||
else:
|
||||
sql = "-- incomplete definition"
|
||||
return str(sql.strip('\n'))
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
sql = "-- incomplete definition"
|
||||
return str(sql.strip('\n')), data['name']
|
||||
|
||||
@check_precondition
|
||||
def get_lexize(self, gid, sid, did, scid, tid=None):
|
||||
@ -657,32 +640,28 @@ class FtsTemplateView(PGChildNodeView):
|
||||
:param scid: schema id
|
||||
:param tid: fts tempate id
|
||||
"""
|
||||
try:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'sql.sql']),
|
||||
tid=tid,
|
||||
scid=scid,
|
||||
conn=self.conn
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'sql.sql']),
|
||||
tid=tid,
|
||||
scid=scid,
|
||||
conn=self.conn
|
||||
)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(
|
||||
_(
|
||||
"Could not generate reversed engineered Query for the FTS Template.\n{0}").format(
|
||||
res
|
||||
)
|
||||
)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(
|
||||
_(
|
||||
"Could not generate reversed engineered Query for the FTS Template.\n{0}").format(
|
||||
res
|
||||
)
|
||||
)
|
||||
|
||||
if res is None:
|
||||
return gone(
|
||||
_(
|
||||
"Could not generate reversed engineered Query for FTS Template node.")
|
||||
)
|
||||
if res is None:
|
||||
return gone(
|
||||
_(
|
||||
"Could not generate reversed engineered Query for FTS Template node.")
|
||||
)
|
||||
|
||||
return ajax_response(response=res)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
return ajax_response(response=res)
|
||||
|
||||
@check_precondition
|
||||
def dependents(self, gid, sid, did, scid, tid):
|
||||
|
@ -1,5 +1,5 @@
|
||||
SELECT
|
||||
oid, tmplname as name
|
||||
oid, tmplname as name, tmpl.tmplnamespace AS schema
|
||||
FROM
|
||||
pg_ts_template tmpl
|
||||
WHERE
|
||||
@ -9,4 +9,4 @@ WHERE
|
||||
tmpl.oid = {{tid}}::OID
|
||||
{% endif %}
|
||||
|
||||
ORDER BY name
|
||||
ORDER BY name
|
||||
|
@ -17,11 +17,12 @@ ON
|
||||
WHERE
|
||||
{% if scid %}
|
||||
tmpl.tmplnamespace = {{scid}}::OID
|
||||
{% elif name %}
|
||||
tmpl.tmplname = {{name|qtLiteral}}
|
||||
{% endif %}
|
||||
{% if name %}
|
||||
{% if scid %}AND {% endif %}tmpl.tmplname = {{name|qtLiteral}}
|
||||
{% endif %}
|
||||
{% if tid %}
|
||||
AND tmpl.oid = {{tid}}::OID
|
||||
{% if name %}AND {% else %}{% if scid %}AND {% endif %}{% endif %}tmpl.oid = {{tid}}::OID
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
tmpl.tmplname
|
||||
tmpl.tmplname
|
||||
|
@ -13,10 +13,10 @@ ALTER TEXT SEARCH TEMPLATE {{conn|qtIdent(o_data.schema)}}.{{conn|qtIdent(o_data
|
||||
{% endif %}
|
||||
{% if data.schema and data.schema != o_data.schema %}
|
||||
ALTER TEXT SEARCH TEMPLATE {{conn|qtIdent(o_data.schema)}}.{{conn|qtIdent(name)}}
|
||||
SET SCHEMA {{data.schema}};
|
||||
SET SCHEMA {{conn|qtIdent(data.schema)}};
|
||||
{% endif %}
|
||||
{% if 'description' in data and data.description != o_data.description %}
|
||||
COMMENT ON TEXT SEARCH TEMPLATE {{conn|qtIdent(o_data.schema)}}.{{conn|qtIdent(name)}}
|
||||
IS {{ data.description|qtLiteral }};
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
@ -202,7 +202,7 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
],
|
||||
'delete': [{'delete': 'delete'}],
|
||||
'children': [{'get': 'children'}],
|
||||
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
|
||||
'nodes': [{'get': 'nodes'}, {'get': 'nodes'}],
|
||||
'sql': [{'get': 'sql'}],
|
||||
'msql': [{'get': 'msql'}, {'get': 'msql'}],
|
||||
'stats': [{'get': 'statistics'}, {'get': 'statistics'}],
|
||||
@ -269,31 +269,27 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
list_params = []
|
||||
if request.method == 'GET':
|
||||
list_params = ['arguments', 'variables', 'proacl',
|
||||
'seclabels', 'acl', 'args']
|
||||
list_params = []
|
||||
if request.method == 'GET':
|
||||
list_params = ['arguments', 'variables', 'proacl',
|
||||
'seclabels', 'acl', 'args']
|
||||
|
||||
for key in req:
|
||||
if key in list_params and req[key] != '' \
|
||||
and req[key] is not None:
|
||||
# Coverts string into python list as expected.
|
||||
data[key] = json.loads(req[key], encoding='utf-8')
|
||||
elif (
|
||||
key == 'proretset' or key == 'proisstrict' or
|
||||
key == 'prosecdef' or key == 'proiswindow' or
|
||||
key == 'proleakproof'
|
||||
):
|
||||
data[key] = True if (
|
||||
req[key] == 'true' or req[key] is True) \
|
||||
else False if (req[key] == 'false' or
|
||||
req[key] is False) else ''
|
||||
else:
|
||||
data[key] = req[key]
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
for key in req:
|
||||
if key in list_params and req[key] != '' \
|
||||
and req[key] is not None:
|
||||
# Coverts string into python list as expected.
|
||||
data[key] = json.loads(req[key], encoding='utf-8')
|
||||
elif (
|
||||
key == 'proretset' or key == 'proisstrict' or
|
||||
key == 'prosecdef' or key == 'proiswindow' or
|
||||
key == 'proleakproof'
|
||||
):
|
||||
data[key] = True if (
|
||||
req[key] == 'true' or req[key] is True) \
|
||||
else False if (req[key] == 'false' or
|
||||
req[key] is False) else ''
|
||||
else:
|
||||
data[key] = req[key]
|
||||
|
||||
self.request = data
|
||||
return f(self, **kwargs)
|
||||
@ -374,7 +370,7 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def nodes(self, gid, sid, did, scid):
|
||||
def nodes(self, gid, sid, did, scid, fnid=None):
|
||||
"""
|
||||
Returns all the Functions to generate the Nodes.
|
||||
|
||||
@ -386,13 +382,34 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
"""
|
||||
|
||||
res = []
|
||||
SQL = render_template("/".join([self.sql_template_path,
|
||||
'node.sql']), scid=scid)
|
||||
SQL = render_template(
|
||||
"/".join([self.sql_template_path, 'node.sql']),
|
||||
scid=scid,
|
||||
fnid=fnid
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
if fnid is not None:
|
||||
if len(rset['rows']) == 0:
|
||||
return gone(
|
||||
_("Couldn't find the specified %s").format(self.node_type)
|
||||
)
|
||||
|
||||
row = rset['rows'][0]
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
scid,
|
||||
row['name'],
|
||||
icon="icon-" + self.node_type,
|
||||
funcowner=row['funcowner'],
|
||||
language=row['lanname']
|
||||
)
|
||||
)
|
||||
|
||||
for row in rset['rows']:
|
||||
res.append(
|
||||
self.blueprint.generate_browser_node(
|
||||
@ -424,6 +441,9 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
|
||||
resp_data = self._fetch_properties(gid, sid, did, scid, fnid)
|
||||
|
||||
if len(resp_data) == 0:
|
||||
return gone(gettext("""Could not find the function node in the database."""))
|
||||
|
||||
return ajax_response(
|
||||
response=resp_data,
|
||||
status=200
|
||||
@ -717,38 +737,38 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
Function object in json format.
|
||||
"""
|
||||
|
||||
try:
|
||||
# Get SQL to create Function
|
||||
status, SQL = self._get_sql(gid, sid, did, scid, self.request)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=SQL)
|
||||
# Get SQL to create Function
|
||||
status, SQL = self._get_sql(gid, sid, did, scid, self.request)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=SQL)
|
||||
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
SQL = render_template("/".join([self.sql_template_path,
|
||||
'get_oid.sql']),
|
||||
nspname=self.request['pronamespace'],
|
||||
name=self.request['name'])
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
SQL = render_template(
|
||||
"/".join(
|
||||
[self.sql_template_path, 'get_oid.sql']
|
||||
),
|
||||
nspname=self.request['pronamespace'],
|
||||
name=self.request['name']
|
||||
)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
res = res['rows'][0]
|
||||
res = res['rows'][0]
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
res['oid'],
|
||||
self.request['pronamespace'],
|
||||
res['name'],
|
||||
icon="icon-" + self.node_type,
|
||||
language=res['lanname'],
|
||||
funcowner=res['funcowner']
|
||||
)
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
res['oid'],
|
||||
res['nsp'],
|
||||
res['name'],
|
||||
icon="icon-" + self.node_type,
|
||||
language=res['lanname'],
|
||||
funcowner=res['funcowner']
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, scid, fnid):
|
||||
@ -832,48 +852,44 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=SQL)
|
||||
|
||||
try:
|
||||
if SQL and SQL.strip('\n') and SQL.strip(' '):
|
||||
if SQL and SQL.strip('\n') and SQL.strip(' '):
|
||||
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
resp_data = self._fetch_properties(gid, sid, did, scid, fnid)
|
||||
resp_data = self._fetch_properties(gid, sid, did, scid, fnid)
|
||||
|
||||
if self.node_type == 'procedure':
|
||||
obj_name = resp_data['name_with_args']
|
||||
elif self.node_type == 'function':
|
||||
args = resp_data['proargs'] if resp_data['proargs'] else ''
|
||||
obj_name = resp_data['name'] + '({0})'.format(args)
|
||||
else:
|
||||
obj_name = resp_data['name'] + '()'
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
fnid,
|
||||
resp_data['pronamespace'],
|
||||
obj_name,
|
||||
icon="icon-" + self.node_type,
|
||||
language=resp_data['lanname'],
|
||||
funcowner=resp_data['funcowner']
|
||||
)
|
||||
)
|
||||
if self.node_type == 'procedure':
|
||||
obj_name = resp_data['name_with_args']
|
||||
elif self.node_type == 'function':
|
||||
args = resp_data['proargs'] if resp_data['proargs'] else ''
|
||||
obj_name = resp_data['name'] + '({0})'.format(args)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update.",
|
||||
data={
|
||||
'id': fnid,
|
||||
'scid': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
)
|
||||
obj_name = resp_data['name'] + '()'
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
fnid,
|
||||
resp_data['pronamespace'],
|
||||
obj_name,
|
||||
icon="icon-" + self.node_type,
|
||||
language=resp_data['lanname'],
|
||||
funcowner=resp_data['funcowner']
|
||||
)
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update.",
|
||||
data={
|
||||
'id': fnid,
|
||||
'scid': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, scid, fnid=None):
|
||||
@ -1025,160 +1041,157 @@ class FunctionView(PGChildNodeView, DataTypeReader):
|
||||
fnid: Function Id
|
||||
"""
|
||||
|
||||
try:
|
||||
vol_dict = {'v': 'VOLATILE', 's': 'STABLE', 'i': 'IMMUTABLE'}
|
||||
vol_dict = {'v': 'VOLATILE', 's': 'STABLE', 'i': 'IMMUTABLE'}
|
||||
|
||||
# Get Schema Name from its OID.
|
||||
if 'pronamespace' in data:
|
||||
data['pronamespace'] = self._get_schema(data[
|
||||
# Get Schema Name from its OID.
|
||||
if 'pronamespace' in data:
|
||||
data['pronamespace'] = self._get_schema(data[
|
||||
'pronamespace'])
|
||||
if 'provolatile' in data:
|
||||
data['provolatile'] = vol_dict[data['provolatile']]
|
||||
|
||||
if fnid is not None:
|
||||
# Edit Mode
|
||||
|
||||
# Fetch Old Data from database.
|
||||
old_data = self._fetch_properties(gid, sid, did, scid, fnid)
|
||||
|
||||
# Get Schema Name
|
||||
old_data['pronamespace'] = self._get_schema(old_data[
|
||||
'pronamespace'])
|
||||
if 'provolatile' in data:
|
||||
data['provolatile'] = vol_dict[data['provolatile']]
|
||||
|
||||
if fnid is not None:
|
||||
# Edit Mode
|
||||
if 'provolatile' in old_data:
|
||||
old_data['provolatile'] = vol_dict[old_data['provolatile']]
|
||||
|
||||
# Fetch Old Data from database.
|
||||
old_data = self._fetch_properties(gid, sid, did, scid, fnid)
|
||||
# If any of the below argument is changed,
|
||||
# then CREATE OR REPLACE SQL statement should be called
|
||||
fun_change_args = ['lanname', 'prosrc', 'probin', 'prosrc_c',
|
||||
'provolatile', 'proisstrict', 'prosecdef',
|
||||
'procost', 'proleakproof', 'arguments']
|
||||
|
||||
# Get Schema Name
|
||||
old_data['pronamespace'] = self._get_schema(old_data[
|
||||
'pronamespace'])
|
||||
data['change_func'] = False
|
||||
for arg in fun_change_args:
|
||||
if arg == 'arguments' and arg in data and len(data[arg]) \
|
||||
> 0:
|
||||
data['change_func'] = True
|
||||
elif arg in data:
|
||||
data['change_func'] = True
|
||||
|
||||
if 'provolatile' in old_data:
|
||||
old_data['provolatile'] = vol_dict[old_data['provolatile']]
|
||||
# If Function Definition/Arguments are changed then merge old
|
||||
# Arguments with changed ones for Create/Replace Function
|
||||
# SQL statement
|
||||
if 'arguments' in data and len(data['arguments']) > 0:
|
||||
for arg in data['arguments']['changed']:
|
||||
for old_arg in old_data['arguments']:
|
||||
if arg['argid'] == old_arg['argid']:
|
||||
old_arg.update(arg)
|
||||
break
|
||||
data['arguments'] = old_data['arguments']
|
||||
elif data['change_func']:
|
||||
data['arguments'] = old_data['arguments']
|
||||
|
||||
# If any of the below argument is changed,
|
||||
# then CREATE OR REPLACE SQL statement should be called
|
||||
fun_change_args = ['lanname', 'prosrc', 'probin', 'prosrc_c',
|
||||
'provolatile', 'proisstrict', 'prosecdef',
|
||||
'procost', 'proleakproof', 'arguments']
|
||||
# Parse Privileges
|
||||
if 'acl' in data:
|
||||
for key in ['added', 'deleted', 'changed']:
|
||||
if key in data['acl']:
|
||||
data['acl'][key] = parse_priv_to_db(
|
||||
data['acl'][key], ["X"])
|
||||
|
||||
data['change_func'] = False
|
||||
for arg in fun_change_args:
|
||||
if arg == 'arguments' and arg in data and len(data[arg]) \
|
||||
> 0:
|
||||
data['change_func'] = True
|
||||
elif arg in data:
|
||||
data['change_func'] = True
|
||||
# Parse Variables
|
||||
chngd_variables = {}
|
||||
data['merged_variables'] = []
|
||||
old_data['chngd_variables'] = {}
|
||||
del_variables = {}
|
||||
|
||||
# If Function Definition/Arguments are changed then merge old
|
||||
# Arguments with changed ones for Create/Replace Function
|
||||
# SQL statement
|
||||
if 'arguments' in data and len(data['arguments']) > 0:
|
||||
for arg in data['arguments']['changed']:
|
||||
for old_arg in old_data['arguments']:
|
||||
if arg['argid'] == old_arg['argid']:
|
||||
old_arg.update(arg)
|
||||
break
|
||||
data['arguments'] = old_data['arguments']
|
||||
elif data['change_func']:
|
||||
data['arguments'] = old_data['arguments']
|
||||
# If Function Definition/Arguments are changed then,
|
||||
# Merge old, new (added, changed, deleted) variables,
|
||||
# which will be used in the CREATE or REPLACE Function sql
|
||||
# statement
|
||||
|
||||
# Parse Privileges
|
||||
if 'acl' in data:
|
||||
for key in ['added', 'deleted', 'changed']:
|
||||
if key in data['acl']:
|
||||
data['acl'][key] = parse_priv_to_db(
|
||||
data['acl'][key], ["X"])
|
||||
if data['change_func']:
|
||||
# To compare old and new variables, preparing name :
|
||||
# value dict
|
||||
|
||||
# Parse Variables
|
||||
chngd_variables = {}
|
||||
data['merged_variables'] = []
|
||||
old_data['chngd_variables'] = {}
|
||||
del_variables = {}
|
||||
# Deleted Variables
|
||||
if 'variables' in data and 'deleted' in data['variables']:
|
||||
for v in data['variables']['deleted']:
|
||||
del_variables[v['name']] = v['value']
|
||||
|
||||
# If Function Definition/Arguments are changed then,
|
||||
# Merge old, new (added, changed, deleted) variables,
|
||||
# which will be used in the CREATE or REPLACE Function sql
|
||||
# statement
|
||||
if 'variables' in data and 'changed' in data['variables']:
|
||||
for v in data['variables']['changed']:
|
||||
chngd_variables[v['name']] = v['value']
|
||||
|
||||
if data['change_func']:
|
||||
# To compare old and new variables, preparing name :
|
||||
# value dict
|
||||
if 'variables' in data and 'added' in data['variables']:
|
||||
for v in data['variables']['added']:
|
||||
chngd_variables[v['name']] = v['value']
|
||||
|
||||
# Deleted Variables
|
||||
if 'variables' in data and 'deleted' in data['variables']:
|
||||
for v in data['variables']['deleted']:
|
||||
del_variables[v['name']] = v['value']
|
||||
for v in old_data['variables']:
|
||||
old_data['chngd_variables'][v['name']] = v['value']
|
||||
|
||||
if 'variables' in data and 'changed' in data['variables']:
|
||||
for v in data['variables']['changed']:
|
||||
chngd_variables[v['name']] = v['value']
|
||||
# Prepare final dict of new and old variables
|
||||
for name, val in old_data['chngd_variables'].items():
|
||||
if name not in chngd_variables and name not in \
|
||||
del_variables:
|
||||
chngd_variables[name] = val
|
||||
|
||||
if 'variables' in data and 'added' in data['variables']:
|
||||
for v in data['variables']['added']:
|
||||
chngd_variables[v['name']] = v['value']
|
||||
|
||||
for v in old_data['variables']:
|
||||
old_data['chngd_variables'][v['name']] = v['value']
|
||||
|
||||
# Prepare final dict of new and old variables
|
||||
for name, val in old_data['chngd_variables'].items():
|
||||
if name not in chngd_variables and name not in \
|
||||
del_variables:
|
||||
chngd_variables[name] = val
|
||||
|
||||
# Prepare dict in [{'name': var_name, 'value': var_val},..]
|
||||
# format
|
||||
for name, val in chngd_variables.items():
|
||||
data['merged_variables'].append({'name': name,
|
||||
'value': val})
|
||||
else:
|
||||
if 'variables' in data and 'changed' in data['variables']:
|
||||
for v in data['variables']['changed']:
|
||||
data['merged_variables'].append(v)
|
||||
|
||||
if 'variables' in data and 'added' in data['variables']:
|
||||
for v in data['variables']['added']:
|
||||
data['merged_variables'].append(v)
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.sql_template_path, 'update.sql']),
|
||||
data=data, o_data=old_data
|
||||
)
|
||||
# Prepare dict in [{'name': var_name, 'value': var_val},..]
|
||||
# format
|
||||
for name, val in chngd_variables.items():
|
||||
data['merged_variables'].append({'name': name,
|
||||
'value': val})
|
||||
else:
|
||||
# Parse Privileges
|
||||
if 'acl' in data:
|
||||
data['acl'] = parse_priv_to_db(data['acl'], ["X"])
|
||||
if 'variables' in data and 'changed' in data['variables']:
|
||||
for v in data['variables']['changed']:
|
||||
data['merged_variables'].append(v)
|
||||
|
||||
args = u''
|
||||
args_without_name = u''
|
||||
cnt = 1
|
||||
args_list = []
|
||||
if 'arguments' in data and len(data['arguments']) > 0:
|
||||
args_list = data['arguments']
|
||||
elif 'args' in data and len(data['args']) > 0:
|
||||
args_list = data['args']
|
||||
for a in args_list:
|
||||
if (('argmode' in a and a['argmode'] != 'OUT' and
|
||||
a['argmode'] is not None
|
||||
) or 'argmode' not in a):
|
||||
if 'argmode' in a:
|
||||
args += a['argmode'] + " "
|
||||
args_without_name += a['argmode'] + " "
|
||||
if 'argname' in a and a['argname'] != '' \
|
||||
and a['argname'] is not None:
|
||||
args += self.qtIdent(
|
||||
self.conn, a['argname']) + " "
|
||||
if 'argtype' in a:
|
||||
args += a['argtype']
|
||||
args_without_name += a['argtype']
|
||||
if cnt < len(args_list):
|
||||
args += ', '
|
||||
args_without_name += ', '
|
||||
cnt += 1
|
||||
if 'variables' in data and 'added' in data['variables']:
|
||||
for v in data['variables']['added']:
|
||||
data['merged_variables'].append(v)
|
||||
|
||||
data['func_args'] = args.strip(' ')
|
||||
data['func_args_without'] = args_without_name.strip(' ')
|
||||
# Create mode
|
||||
SQL = render_template("/".join([self.sql_template_path,
|
||||
'create.sql']),
|
||||
data=data, is_sql=is_sql)
|
||||
return True, SQL.strip('\n')
|
||||
SQL = render_template(
|
||||
"/".join([self.sql_template_path, 'update.sql']),
|
||||
data=data, o_data=old_data
|
||||
)
|
||||
else:
|
||||
# Parse Privileges
|
||||
if 'acl' in data:
|
||||
data['acl'] = parse_priv_to_db(data['acl'], ["X"])
|
||||
|
||||
args = u''
|
||||
args_without_name = u''
|
||||
cnt = 1
|
||||
args_list = []
|
||||
if 'arguments' in data and len(data['arguments']) > 0:
|
||||
args_list = data['arguments']
|
||||
elif 'args' in data and len(data['args']) > 0:
|
||||
args_list = data['args']
|
||||
for a in args_list:
|
||||
if (('argmode' in a and a['argmode'] != 'OUT' and
|
||||
a['argmode'] is not None
|
||||
) or 'argmode' not in a):
|
||||
if 'argmode' in a:
|
||||
args += a['argmode'] + " "
|
||||
args_without_name += a['argmode'] + " "
|
||||
if 'argname' in a and a['argname'] != '' \
|
||||
and a['argname'] is not None:
|
||||
args += self.qtIdent(
|
||||
self.conn, a['argname']) + " "
|
||||
if 'argtype' in a:
|
||||
args += a['argtype']
|
||||
args_without_name += a['argtype']
|
||||
if cnt < len(args_list):
|
||||
args += ', '
|
||||
args_without_name += ', '
|
||||
cnt += 1
|
||||
|
||||
data['func_args'] = args.strip(' ')
|
||||
data['func_args_without'] = args_without_name.strip(' ')
|
||||
# Create mode
|
||||
SQL = render_template("/".join([self.sql_template_path,
|
||||
'create.sql']),
|
||||
data=data, is_sql=is_sql)
|
||||
return True, SQL.strip('\n')
|
||||
|
||||
except Exception as e:
|
||||
return False, e
|
||||
|
||||
def _fetch_properties(self, gid, sid, did, scid, fnid=None):
|
||||
"""
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace as nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -11,7 +11,12 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -22,10 +22,11 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace as nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -11,7 +11,12 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -22,10 +22,11 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace as nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -11,7 +11,12 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -22,10 +22,11 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace as nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -12,7 +12,12 @@ LEFT OUTER JOIN
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pr.protype = '0'::char
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -22,10 +22,11 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace AS nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -12,7 +12,12 @@ LEFT OUTER JOIN
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pr.protype = '0'::char
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -22,10 +22,11 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace AS nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -12,7 +12,12 @@ LEFT OUTER JOIN
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pr.protype = '0'::char
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -22,10 +22,11 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace AS nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -19,7 +19,12 @@ LEFT OUTER JOIN
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pr.protype = '1'::char
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -29,10 +29,11 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace AS nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -19,7 +19,12 @@ LEFT OUTER JOIN
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pr.protype = '1'::char
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -29,10 +29,11 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace AS nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -19,7 +19,12 @@ LEFT OUTER JOIN
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pr.protype = '1'::char
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -29,10 +29,11 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname NOT IN ('trigger', 'event_trigger')
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace AS nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -11,7 +11,12 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname = 'trigger' AND lanname != 'edbspl'
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -22,10 +22,11 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname = 'trigger' AND lanname != 'edbspl'
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace AS nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -11,8 +11,13 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname IN ('trigger', 'event_trigger')
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND lanname NOT IN ('edbspl', 'sql', 'internal')
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -22,11 +22,12 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname IN ('trigger', 'event_trigger')
|
||||
AND lanname NOT IN ('edbspl', 'sql', 'internal')
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace AS nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -11,7 +11,12 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname IN ('trigger', 'event_trigger')
|
||||
AND lanname NOT IN ('edbspl', 'sql', 'internal')
|
||||
ORDER BY
|
||||
|
@ -22,11 +22,12 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname IN ('trigger', 'event_trigger')
|
||||
AND lanname NOT IN ('edbspl', 'sql', 'internal')
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace AS nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -11,7 +11,12 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname = 'trigger' AND lanname != 'edbspl'
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -22,10 +22,11 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname = 'trigger' AND lanname != 'edbspl'
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace AS nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -11,7 +11,12 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname IN ('trigger', 'event_trigger')
|
||||
AND lanname NOT IN ('edbspl', 'sql', 'internal')
|
||||
ORDER BY
|
||||
|
@ -22,11 +22,12 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname IN ('trigger', 'event_trigger')
|
||||
AND lanname NOT IN ('edbspl', 'sql', 'internal')
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -1,7 +1,7 @@
|
||||
SELECT
|
||||
pr.oid, pr.proname || '(' || COALESCE(pg_catalog
|
||||
.pg_get_function_identity_arguments(pr.oid), '') || ')' as name,
|
||||
lanname, pg_get_userbyid(proowner) as funcowner
|
||||
lanname, pg_get_userbyid(proowner) as funcowner, pr.pronamespace AS nsp
|
||||
FROM
|
||||
pg_proc pr
|
||||
JOIN
|
||||
|
@ -11,7 +11,12 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
{% if scid %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
AND typname IN ('trigger', 'event_trigger')
|
||||
AND lanname NOT IN ('edbspl', 'sql', 'internal')
|
||||
ORDER BY
|
||||
|
@ -22,11 +22,12 @@ LEFT OUTER JOIN
|
||||
pg_description des ON (des.objoid=pr.oid AND des.classoid='pg_proc'::regclass)
|
||||
WHERE
|
||||
proisagg = FALSE
|
||||
AND pronamespace = {{scid}}::oid
|
||||
AND typname IN ('trigger', 'event_trigger')
|
||||
AND lanname NOT IN ('edbspl', 'sql', 'internal')
|
||||
{% if fnid %}
|
||||
AND pr.oid = {{fnid}}::oid
|
||||
{% else %}
|
||||
AND pronamespace = {{scid}}::oid
|
||||
{% endif %}
|
||||
ORDER BY
|
||||
proname;
|
||||
|
@ -22,7 +22,7 @@ from pgadmin.browser.server_groups.servers.utils import parse_priv_from_db, \
|
||||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, \
|
||||
make_response as ajax_response, internal_server_error
|
||||
from pgadmin.utils.ajax import precondition_required
|
||||
from pgadmin.utils.ajax import precondition_required, gone
|
||||
from pgadmin.utils.driver import get_driver
|
||||
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
@ -98,7 +98,7 @@ class PackageView(PGChildNodeView):
|
||||
],
|
||||
'delete': [{'delete': 'delete'}],
|
||||
'children': [{'get': 'children'}],
|
||||
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
|
||||
'nodes': [{'get': 'nodes'}, {'get': 'nodes'}],
|
||||
'sql': [{'get': 'sql'}],
|
||||
'msql': [{'get': 'msql'}, {'get': 'msql'}],
|
||||
'stats': [{'get': 'statistics'}, {'get': 'statistics'}],
|
||||
@ -149,9 +149,10 @@ class PackageView(PGChildNodeView):
|
||||
if self.manager.version < 90200:
|
||||
self.template_path = 'package/ppas/9.1_plus'
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_schema.sql']),
|
||||
scid=kwargs['scid'])
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'get_schema.sql']),
|
||||
scid=kwargs['scid']
|
||||
)
|
||||
status, rset = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
@ -191,7 +192,7 @@ class PackageView(PGChildNodeView):
|
||||
)
|
||||
|
||||
@check_precondition(action='nodes')
|
||||
def nodes(self, gid, sid, did, scid):
|
||||
def nodes(self, gid, sid, did, scid, pkgid=None):
|
||||
"""
|
||||
This function is used to create all the child nodes within the collection.
|
||||
Here it will create all the package nodes.
|
||||
@ -206,11 +207,31 @@ class PackageView(PGChildNodeView):
|
||||
|
||||
"""
|
||||
res = []
|
||||
SQL = render_template("/".join([self.template_path, 'nodes.sql']), scid=scid)
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'nodes.sql']),
|
||||
scid=scid,
|
||||
pkgid=pkgid
|
||||
)
|
||||
status, rset = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
if pkgid is not None:
|
||||
if len(rset['rows']) == 0:
|
||||
return gone(
|
||||
errormsg=_("Couldn't find the package.")
|
||||
)
|
||||
|
||||
row = rset['rows'][0]
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
sid,
|
||||
row['name'],
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
|
||||
for row in rset['rows']:
|
||||
res.append(
|
||||
self.blueprint.generate_browser_node(
|
||||
@ -246,6 +267,11 @@ class PackageView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
errormsg=_("Could not find the package in the database.")
|
||||
)
|
||||
|
||||
res['rows'][0]['pkgheadsrc'] = self.get_inner(res['rows'][0]['pkgheadsrc'])
|
||||
res['rows'][0]['pkgbodysrc'] = self.get_inner(res['rows'][0]['pkgbodysrc'])
|
||||
|
||||
@ -298,39 +324,39 @@ class PackageView(PGChildNodeView):
|
||||
"Could not find the required parameter (%s)." % arg
|
||||
)
|
||||
)
|
||||
try:
|
||||
data['schema'] = self.schema
|
||||
# The SQL below will execute CREATE DDL only
|
||||
SQL = render_template("/".join([self.template_path, 'create.sql']), data=data, conn=self.conn)
|
||||
status, msg = self.conn.execute_scalar(SQL)
|
||||
data['schema'] = self.schema
|
||||
# The SQL below will execute CREATE DDL only
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'create.sql']),
|
||||
data=data, conn=self.conn
|
||||
)
|
||||
|
||||
status, msg = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=msg)
|
||||
|
||||
# We need oid of newly created package.
|
||||
SQL = render_template(
|
||||
"/".join([
|
||||
self.template_path, 'get_oid.sql'
|
||||
]),
|
||||
name=data['name'], scid=scid
|
||||
)
|
||||
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
if SQL and SQL != "":
|
||||
status, pkgid = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=msg)
|
||||
return internal_server_error(errormsg=pkgid)
|
||||
|
||||
# We need oid of newly created package.
|
||||
SQL = render_template("/".join([self.template_path, 'get_oid.sql']),
|
||||
name=data['name'], scid=scid)
|
||||
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
if SQL and SQL != "":
|
||||
status, pkgid = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=pkgid)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
pkgid,
|
||||
scid,
|
||||
data['name'],
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return make_json_response(
|
||||
status=500,
|
||||
success=0,
|
||||
errormsg=str(e)
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
pkgid,
|
||||
scid,
|
||||
data['name'],
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
|
||||
@check_precondition(action='delete')
|
||||
def delete(self, gid, sid, did, scid, pkgid):
|
||||
@ -414,40 +440,21 @@ class PackageView(PGChildNodeView):
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
try:
|
||||
SQL = self.getSQL(gid, sid, did, data, scid, pkgid)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
if SQL != "":
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Package updated",
|
||||
data={
|
||||
'id': pkgid,
|
||||
'scid': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': pkgid,
|
||||
'scid': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
)
|
||||
SQL, name = self.getSQL(gid, sid, did, data, scid, pkgid)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
pkgid,
|
||||
scid,
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
|
||||
@check_precondition(action='msql')
|
||||
def msql(self, gid, sid, did, scid, pkgid=None):
|
||||
@ -484,18 +491,15 @@ class PackageView(PGChildNodeView):
|
||||
"Could not find the required parameter (%s)." % arg
|
||||
)
|
||||
)
|
||||
try:
|
||||
SQL = self.getSQL(gid, sid, did, data, scid, pkgid)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
except Exception as e:
|
||||
return make_json_response(
|
||||
data="-- modified SQL",
|
||||
status=200
|
||||
)
|
||||
|
||||
SQL, name = self.getSQL(gid, sid, did, data, scid, pkgid)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
status=200
|
||||
)
|
||||
|
||||
def getSQL(self, gid, sid, did, data, scid, pkgid=None):
|
||||
"""
|
||||
@ -558,6 +562,7 @@ class PackageView(PGChildNodeView):
|
||||
|
||||
SQL = render_template("/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data, conn=self.conn)
|
||||
return SQL, data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
# To format privileges coming from client
|
||||
if 'pkgacl' in data:
|
||||
@ -565,7 +570,7 @@ class PackageView(PGChildNodeView):
|
||||
|
||||
SQL = render_template("/".join([self.template_path, 'create.sql']), data=data, conn=self.conn)
|
||||
|
||||
return SQL
|
||||
return SQL, data['name']
|
||||
|
||||
@check_precondition(action="sql")
|
||||
def sql(self, gid, sid, did, scid, pkgid):
|
||||
@ -579,45 +584,48 @@ class PackageView(PGChildNodeView):
|
||||
scid: Schema ID
|
||||
pkgid: Package ID
|
||||
"""
|
||||
try:
|
||||
SQL = render_template("/".join([self.template_path, 'properties.sql']), scid=scid, pkgid=pkgid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
SQL = render_template("/".join([self.template_path, 'properties.sql']), scid=scid, pkgid=pkgid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
res['rows'][0]['pkgheadsrc'] = self.get_inner(res['rows'][0]['pkgheadsrc'])
|
||||
res['rows'][0]['pkgbodysrc'] = self.get_inner(res['rows'][0]['pkgbodysrc'])
|
||||
|
||||
res['rows'][0]['pkgheadsrc'] = self.get_inner(res['rows'][0]['pkgheadsrc'])
|
||||
res['rows'][0]['pkgbodysrc'] = self.get_inner(res['rows'][0]['pkgbodysrc'])
|
||||
SQL = render_template("/".join([self.template_path, 'acl.sql']),
|
||||
scid=scid,
|
||||
pkgid=pkgid)
|
||||
status, rset1 = self.conn.execute_dict(SQL)
|
||||
|
||||
SQL = render_template("/".join([self.template_path, 'acl.sql']),
|
||||
scid=scid,
|
||||
pkgid=pkgid)
|
||||
status, rset1 = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset1)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset1)
|
||||
for row in rset1['rows']:
|
||||
priv = parse_priv_from_db(row)
|
||||
res['rows'][0].setdefault(row['deftype'], []).append(priv)
|
||||
|
||||
for row in rset1['rows']:
|
||||
priv = parse_priv_from_db(row)
|
||||
res['rows'][0].setdefault(row['deftype'], []).append(priv)
|
||||
result = res['rows'][0]
|
||||
sql, name = self.getSQL(gid, sid, did, result, scid)
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
|
||||
result = res['rows'][0]
|
||||
sql = self.getSQL(gid, sid, did, result, scid)
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
sql_header = "-- Package: {}\n\n-- ".format(
|
||||
self.qtIdent(self.conn, self.schema, result['name'])
|
||||
)
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
|
||||
sql_header = "-- Package: {}\n\n-- ".format(self.qtIdent(self.conn,
|
||||
self.schema,
|
||||
result['name']))
|
||||
if hasattr(str, 'decode'):
|
||||
sql_header = sql_header.decode('utf-8')
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
data=result)
|
||||
sql_header += "\n\n"
|
||||
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
data=result)
|
||||
sql_header += "\n\n"
|
||||
sql = sql_header + sql
|
||||
|
||||
sql = sql_header + sql
|
||||
return ajax_response(response=sql)
|
||||
|
||||
return ajax_response(response=sql)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition(action="dependents")
|
||||
def dependents(self, gid, sid, did, scid, pkgid):
|
||||
|
@ -157,7 +157,7 @@ class EdbFuncView(PGChildNodeView, DataTypeReader):
|
||||
{'get': 'properties'},
|
||||
{'get': 'list'}
|
||||
],
|
||||
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
|
||||
'nodes': [{'get': 'nodes'}, {'get': 'nodes'}],
|
||||
'sql': [{'get': 'sql'}],
|
||||
'dependency': [{'get': 'dependencies'}],
|
||||
'dependent': [{'get': 'dependents'}],
|
||||
@ -246,7 +246,7 @@ class EdbFuncView(PGChildNodeView, DataTypeReader):
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def nodes(self, gid, sid, did, scid, pkgid):
|
||||
def nodes(self, gid, sid, did, scid, pkgid, edbfnid=None):
|
||||
"""
|
||||
Returns all the Functions to generate the Nodes.
|
||||
|
||||
@ -258,13 +258,32 @@ class EdbFuncView(PGChildNodeView, DataTypeReader):
|
||||
"""
|
||||
|
||||
res = []
|
||||
SQL = render_template("/".join([self.sql_template_path,
|
||||
'node.sql']), pkgid=pkgid)
|
||||
SQL = render_template(
|
||||
"/".join([self.sql_template_path, 'node.sql']),
|
||||
pkgid=pkgid,
|
||||
fnid=edbfnid
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
if edbfnid is not None:
|
||||
if len(rset['rows']) == 0:
|
||||
return gone(
|
||||
errormsg=_("Couldn't find the function")
|
||||
)
|
||||
row = rset['rows'][0]
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
pkgid,
|
||||
row['name'],
|
||||
icon="icon-" + self.node_type
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
for row in rset['rows']:
|
||||
res.append(
|
||||
self.blueprint.generate_browser_node(
|
||||
|
@ -2,5 +2,9 @@ SELECT pg_proc.oid,
|
||||
pg_proc.proname || '(' || COALESCE(pg_catalog.pg_get_function_identity_arguments(pg_proc.oid), '') || ')' AS name
|
||||
FROM pg_proc, pg_namespace
|
||||
WHERE format_type(prorettype, NULL) != 'void'
|
||||
AND pronamespace = {{pkgid}}::oid
|
||||
AND pg_proc.pronamespace = pg_namespace.oid
|
||||
{% if fnid %}
|
||||
AND pg_proc.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
AND pronamespace = {{ pkgid|qtLiteral }}::oid
|
||||
AND pg_proc.pronamespace = pg_namespace.oid
|
||||
ORDER BY pg_proc.proname
|
||||
|
@ -2,5 +2,8 @@ SELECT pg_proc.oid,
|
||||
pg_proc.proname || '(' || COALESCE(pg_catalog.pg_get_function_identity_arguments(pg_proc.oid), '') || ')' AS name
|
||||
FROM pg_proc, pg_namespace
|
||||
WHERE format_type(prorettype, NULL) != 'void'
|
||||
AND pronamespace = {{pkgid}}::oid
|
||||
AND pg_proc.pronamespace = pg_namespace.oid
|
||||
{% if fnid %}
|
||||
AND pg_proc.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
AND pronamespace = {{pkgid|qtLiteral}}::oid
|
||||
AND pg_proc.pronamespace = pg_namespace.oid
|
||||
|
@ -2,5 +2,8 @@ SELECT pg_proc.oid,
|
||||
pg_proc.proname || '(' || COALESCE(pg_catalog.pg_get_function_identity_arguments(pg_proc.oid), '') || ')' AS name
|
||||
FROM pg_proc, pg_namespace
|
||||
WHERE format_type(prorettype, NULL) = 'void'
|
||||
AND pronamespace = {{pkgid}}::oid
|
||||
AND pg_proc.pronamespace = pg_namespace.oid
|
||||
{% if fnid %}
|
||||
AND pg_proc.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
AND pronamespace = {{pkgid|qtLiteral}}::oid
|
||||
AND pg_proc.pronamespace = pg_namespace.oid
|
||||
|
@ -2,5 +2,8 @@ SELECT pg_proc.oid,
|
||||
pg_proc.proname || '(' || COALESCE(pg_catalog.pg_get_function_identity_arguments(pg_proc.oid), '') || ')' AS name
|
||||
FROM pg_proc, pg_namespace
|
||||
WHERE format_type(prorettype, NULL) = 'void'
|
||||
AND pronamespace = {{pkgid}}::oid
|
||||
AND pg_proc.pronamespace = pg_namespace.oid
|
||||
{% if fnid %}
|
||||
AND pg_proc.oid = {{ fnid|qtLiteral }}
|
||||
{% endif %}
|
||||
AND pronamespace = {{pkgid|qtLiteral}}::oid
|
||||
AND pg_proc.pronamespace = pg_namespace.oid
|
||||
|
@ -150,7 +150,7 @@ class EdbVarView(PGChildNodeView, DataTypeReader):
|
||||
{'get': 'properties'},
|
||||
{'get': 'list'}
|
||||
],
|
||||
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
|
||||
'nodes': [{'get': 'nodes'}, {'get': 'nodes'}],
|
||||
'sql': [{'get': 'sql'}],
|
||||
'module.js': [{}, {}, {'get': 'module_js'}]
|
||||
})
|
||||
@ -238,8 +238,10 @@ class EdbVarView(PGChildNodeView, DataTypeReader):
|
||||
"""
|
||||
|
||||
res = []
|
||||
SQL = render_template("/".join([self.sql_template_path,
|
||||
'node.sql']), pkgid=pkgid)
|
||||
SQL = render_template(
|
||||
"/".join([self.sql_template_path, 'node.sql']),
|
||||
pkgid=pkgid
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
|
||||
if not status:
|
||||
|
@ -2,4 +2,7 @@ SELECT oid,
|
||||
varname AS name
|
||||
FROM edb_variable
|
||||
WHERE varpackage = {{pkgid}}::oid
|
||||
{% if varid %}
|
||||
AND oid = {{ varid|qtLiteral }}
|
||||
{% endif %}
|
||||
ORDER BY varname
|
||||
|
@ -25,6 +25,7 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
from pgadmin.utils.driver import get_driver
|
||||
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.utils.ajax import gone
|
||||
|
||||
|
||||
class SequenceModule(SchemaChildModule):
|
||||
@ -105,7 +106,7 @@ class SequenceView(PGChildNodeView):
|
||||
],
|
||||
'delete': [{'delete': 'delete'}],
|
||||
'children': [{'get': 'children'}],
|
||||
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
|
||||
'nodes': [{'get': 'nodes'}, {'get': 'nodes'}],
|
||||
'sql': [{'get': 'sql'}],
|
||||
'msql': [{'get': 'msql'}, {'get': 'msql'}],
|
||||
'stats': [{'get': 'statistics'}, {'get': 'statistics'}],
|
||||
@ -176,7 +177,7 @@ class SequenceView(PGChildNodeView):
|
||||
)
|
||||
|
||||
@check_precondition(action='nodes')
|
||||
def nodes(self, gid, sid, did, scid):
|
||||
def nodes(self, gid, sid, did, scid, seid=None):
|
||||
"""
|
||||
This function is used to create all the child nodes within the collection.
|
||||
Here it will create all the sequence nodes.
|
||||
@ -191,11 +192,32 @@ class SequenceView(PGChildNodeView):
|
||||
|
||||
"""
|
||||
res = []
|
||||
SQL = render_template("/".join([self.template_path, 'nodes.sql']), scid=scid)
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'nodes.sql']),
|
||||
scid=scid,
|
||||
seid=seid
|
||||
)
|
||||
status, rset = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
if seid is not None:
|
||||
if len(rset['rows']) == 0:
|
||||
return gone(
|
||||
errormsg=_("Couldn't find the sequence.")
|
||||
)
|
||||
row = rset['rows'][0]
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
sid,
|
||||
row['name'],
|
||||
icon="icon-%s" % self.node_type
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
|
||||
for row in rset['rows']:
|
||||
res.append(
|
||||
self.blueprint.generate_browser_node(
|
||||
@ -231,6 +253,9 @@ class SequenceView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""could not find the sequence in the database."""))
|
||||
|
||||
for row in res['rows']:
|
||||
SQL = render_template("/".join([self.template_path, 'get_def.sql']), data=row)
|
||||
status, rset1 = self.conn.execute_dict(SQL)
|
||||
@ -305,47 +330,47 @@ class SequenceView(PGChildNodeView):
|
||||
"Could not find the required parameter (%s)." % arg
|
||||
)
|
||||
)
|
||||
try:
|
||||
# The SQL below will execute CREATE DDL only
|
||||
SQL = render_template("/".join([self.template_path, 'create.sql']), data=data, conn=self.conn)
|
||||
# The SQL below will execute CREATE DDL only
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'create.sql']),
|
||||
data=data, conn=self.conn
|
||||
)
|
||||
status, msg = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=msg)
|
||||
|
||||
if 'relacl' in data:
|
||||
data['relacl'] = parse_priv_to_db(data['relacl'], 'DATABASE')
|
||||
|
||||
# The SQL below will execute rest DMLs because we can not execute CREATE with any other
|
||||
SQL = render_template("/".join([self.template_path, 'grant.sql']), data=data, conn=self.conn)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
if SQL and SQL != "":
|
||||
status, msg = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=msg)
|
||||
|
||||
if 'relacl' in data:
|
||||
data['relacl'] = parse_priv_to_db(data['relacl'], 'DATABASE')
|
||||
# We need oid of newly created sequence.
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'get_oid.sql']),
|
||||
name=data['name'],
|
||||
schema=data['schema']
|
||||
)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
|
||||
# The SQL below will execute rest DMLs because we can not execute CREATE with any other
|
||||
SQL = render_template("/".join([self.template_path, 'grant.sql']), data=data, conn=self.conn)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
if SQL and SQL != "":
|
||||
status, msg = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=msg)
|
||||
status, rset= self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
# We need oid of newly created sequence.
|
||||
SQL = render_template("/".join([self.template_path, 'get_oid.sql']), name=data['name'], scid=scid)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
|
||||
status, seid = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=seid)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
seid,
|
||||
scid,
|
||||
data['name'],
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return make_json_response(
|
||||
status=500,
|
||||
success=0,
|
||||
errormsg=str(e)
|
||||
row=rset['rows'][0]
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
row['relnamespace'],
|
||||
data['name'],
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
|
||||
@check_precondition(action='delete')
|
||||
def delete(self, gid, sid, did, scid, seid):
|
||||
@ -424,40 +449,30 @@ class SequenceView(PGChildNodeView):
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
try:
|
||||
SQL = self.getSQL(gid, sid, did, data, scid, seid)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
if SQL != "":
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
SQL, name = self.getSQL(gid, sid, did, data, scid, seid)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Sequence updated",
|
||||
data={
|
||||
'id': seid,
|
||||
'scid': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
)
|
||||
else:
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info="Nothing to update",
|
||||
data={
|
||||
'id': seid,
|
||||
'scid': scid,
|
||||
'sid': sid,
|
||||
'gid': gid,
|
||||
'did': did
|
||||
}
|
||||
)
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'nodes.sql']),
|
||||
seid=seid
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
row = rset['rows'][0]
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
seid,
|
||||
row['schema'],
|
||||
row['name'],
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
|
||||
@check_precondition(action='msql')
|
||||
def msql(self, gid, sid, did, scid, seid=None):
|
||||
@ -494,8 +509,10 @@ class SequenceView(PGChildNodeView):
|
||||
"Could not find the required parameter (%s)." % arg
|
||||
)
|
||||
)
|
||||
SQL = self.getSQL(gid, sid, did, data, scid, seid)
|
||||
SQL, name = self.getSQL(gid, sid, did, data, scid, seid)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
if SQL == '':
|
||||
SQL = "--modified SQL"
|
||||
|
||||
return make_json_response(
|
||||
data=SQL,
|
||||
@ -544,6 +561,7 @@ class SequenceView(PGChildNodeView):
|
||||
data[arg] = old_data[arg]
|
||||
SQL = render_template("/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data, conn=self.conn)
|
||||
return SQL, data['name'] if 'name' in data else old_data['name']
|
||||
else:
|
||||
# To format privileges coming from client
|
||||
if 'relacl' in data:
|
||||
@ -551,7 +569,7 @@ class SequenceView(PGChildNodeView):
|
||||
|
||||
SQL = render_template("/".join([self.template_path, 'create.sql']), data=data, conn=self.conn)
|
||||
SQL += render_template("/".join([self.template_path, 'grant.sql']), data=data, conn=self.conn)
|
||||
return SQL
|
||||
return SQL, data['name']
|
||||
|
||||
@check_precondition(action="sql")
|
||||
def sql(self, gid, sid, did, scid, seid):
|
||||
@ -586,7 +604,7 @@ class SequenceView(PGChildNodeView):
|
||||
|
||||
result = res['rows'][0]
|
||||
result = self._formatter(result, scid, seid)
|
||||
SQL = self.getSQL(gid, sid, did, result, scid)
|
||||
SQL, name = self.getSQL(gid, sid, did, result, scid)
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
return ajax_response(response=SQL)
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
SELECT cl.oid as oid
|
||||
SELECT cl.oid as oid, relnamespace
|
||||
FROM pg_class cl
|
||||
LEFT OUTER JOIN pg_description des ON (des.objoid=cl.oid AND des.classoid='pg_class'::regclass)
|
||||
WHERE relkind = 'S' AND relnamespace = {{scid}}::oid
|
||||
AND relname = {{ name|qtLiteral }}
|
||||
LEFT OUTER JOIN pg_namespace nsp ON (nsp.oid = cl.relnamespace)
|
||||
WHERE relkind = 'S'
|
||||
AND relname = {{ name|qtLiteral }}
|
||||
AND nspname = {{ schema|qtLiteral }}
|
||||
|
@ -1,4 +1,11 @@
|
||||
SELECT cl.oid as oid, relname as name
|
||||
SELECT cl.oid as oid, relname as name, relnamespace as schema
|
||||
FROM pg_class cl
|
||||
WHERE relkind = 'S' AND relnamespace = {{scid}}::oid
|
||||
ORDER BY relname
|
||||
WHERE
|
||||
relkind = 'S'
|
||||
{% if scid %}
|
||||
AND relnamespace = {{scid|qtLiteral}}::oid
|
||||
{% endif %}
|
||||
{% if seid %}
|
||||
AND cl.oid = {{seid|qtLiteral}}::oid
|
||||
{% endif %}
|
||||
ORDER BY relname
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user