2015-03-10 08:09:11 -05:00
|
|
|
##########################################################################
|
|
|
|
#
|
|
|
|
# pgAdmin 4 - PostgreSQL Tools
|
|
|
|
#
|
2021-01-04 04:04:45 -06:00
|
|
|
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
2015-03-10 08:09:11 -05:00
|
|
|
# This software is released under the PostgreSQL Licence
|
|
|
|
#
|
|
|
|
##########################################################################
|
|
|
|
|
|
|
|
"""Browser helper utilities"""
|
|
|
|
|
2016-03-07 05:48:24 -06:00
|
|
|
from abc import abstractmethod
|
2016-06-21 08:12:14 -05:00
|
|
|
|
2015-06-30 00:51:55 -05:00
|
|
|
import flask
|
2016-02-22 07:07:16 -06:00
|
|
|
from flask import render_template, current_app
|
2016-06-21 08:12:14 -05:00
|
|
|
from flask.views import View, MethodViewType, with_metaclass
|
2018-04-04 04:47:01 -05:00
|
|
|
from flask_babelex import gettext
|
2015-06-30 00:51:55 -05:00
|
|
|
|
2016-06-21 08:12:14 -05:00
|
|
|
from config import PG_DEFAULT_DRIVER
|
2019-11-25 21:34:41 -06:00
|
|
|
from pgadmin.utils.ajax import make_json_response, precondition_required,\
|
|
|
|
internal_server_error
|
2019-05-28 01:30:18 -05:00
|
|
|
from pgadmin.utils.exception import ConnectionLost, SSHTunnelConnectionLost,\
|
|
|
|
CryptKeyMissing
|
2016-06-21 08:12:14 -05:00
|
|
|
|
2018-02-26 09:58:48 -06:00
|
|
|
|
2019-06-21 03:53:57 -05:00
|
|
|
def underscore_escape(text):
|
|
|
|
"""
|
|
|
|
This function mimics the behaviour of underscore js escape function
|
|
|
|
The html escaped by jinja is not compatible for underscore unescape
|
|
|
|
function
|
|
|
|
:param text: input html text
|
|
|
|
:return: escaped text
|
|
|
|
"""
|
|
|
|
html_map = {
|
|
|
|
'&': "&",
|
|
|
|
'<': "<",
|
|
|
|
'>': ">",
|
|
|
|
'"': """,
|
|
|
|
'`': "`",
|
|
|
|
"'": "'"
|
|
|
|
}
|
|
|
|
|
|
|
|
# always replace & first
|
|
|
|
for c, r in sorted(html_map.items(),
|
|
|
|
key=lambda x: 0 if x[0] == '&' else 1):
|
|
|
|
text = text.replace(c, r)
|
|
|
|
|
|
|
|
return text
|
|
|
|
|
|
|
|
|
2019-08-23 06:14:20 -05:00
|
|
|
def underscore_unescape(text):
|
|
|
|
"""
|
|
|
|
This function mimics the behaviour of underscore js unescape function
|
|
|
|
The html unescape by jinja is not compatible for underscore escape
|
|
|
|
function
|
|
|
|
:param text: input html text
|
|
|
|
:return: unescaped text
|
|
|
|
"""
|
|
|
|
html_map = {
|
|
|
|
"&": '&',
|
|
|
|
"<": '<',
|
|
|
|
">": '>',
|
|
|
|
""": '"',
|
|
|
|
"`": '`',
|
2021-05-25 09:42:57 -05:00
|
|
|
"'": "'",
|
2021-06-08 04:28:43 -05:00
|
|
|
"'": "'",
|
|
|
|
""": '"'
|
2019-08-23 06:14:20 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
# always replace & first
|
|
|
|
for c, r in html_map.items():
|
|
|
|
text = text.replace(c, r)
|
|
|
|
|
|
|
|
return text
|
|
|
|
|
|
|
|
|
2017-08-30 05:18:14 -05:00
|
|
|
def is_version_in_range(sversion, min_ver, max_ver):
|
|
|
|
assert (max_ver is None or isinstance(max_ver, int))
|
|
|
|
assert (min_ver is None or isinstance(min_ver, int))
|
|
|
|
|
|
|
|
if min_ver is None and max_ver is None:
|
|
|
|
return True
|
|
|
|
|
2020-06-15 05:29:37 -05:00
|
|
|
if (min_ver is None or min_ver <= sversion) and \
|
|
|
|
(max_ver is None or max_ver >= sversion):
|
|
|
|
return True
|
2017-08-30 05:18:14 -05:00
|
|
|
return False
|
2015-10-20 02:03:18 -05:00
|
|
|
|
2018-02-26 09:58:48 -06:00
|
|
|
|
2016-02-18 05:32:52 -06:00
|
|
|
class PGChildModule(object):
|
2015-10-20 02:03:18 -05:00
|
|
|
"""
|
2015-11-19 11:45:48 -06:00
|
|
|
class PGChildModule
|
2015-10-20 02:03:18 -05:00
|
|
|
|
|
|
|
This is a base class for children/grand-children of PostgreSQL, and
|
2018-02-26 09:58:48 -06:00
|
|
|
all EDB Postgres Advanced Server version
|
|
|
|
(i.e. EDB Postgres Advanced Server, Green Plum, etc).
|
2015-10-20 02:03:18 -05:00
|
|
|
|
|
|
|
Method:
|
|
|
|
------
|
2020-07-06 01:18:23 -05:00
|
|
|
* backend_supported(manager)
|
2015-10-20 02:03:18 -05:00
|
|
|
- Return True when it supports certain version.
|
|
|
|
Uses the psycopg2 server connection manager as input for checking the
|
|
|
|
compatibility of the current module.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
2016-02-18 05:32:52 -06:00
|
|
|
self.min_ver = 0
|
2018-06-13 09:03:17 -05:00
|
|
|
self.max_ver = 1100000000
|
|
|
|
self.min_ppasver = 0
|
|
|
|
self.max_ppasver = 1100000000
|
Resolved few intialization issue with Node model data, moved the
privileges functionality out of the backform.pgadmin.js to make it more
modular. Now - privileges will expect the privileges data in following
format:
<name_of_the_property> : [{
"privileges": [{
"privilege_type": <privilege_type>,
"privilege": true,
"with_grant": false
},
...
],
"grantee": <grantee>,
"grantor": <grantor>
},
...
]
Example:
acl": [{
"privileges": [{
"privilege_type": "CONNECT",
"privilege": true,
"with_grant": false
}],
"grantee": '',
"grantor": 'ashesh'
},{
"privileges": [{
"privilege_type": "CREATE",
"privilege": true,
"with_grant": false
},{
"privilege": true,
"privilege_type": "TEMPORARY",
"with_grant": false
}],
"grantee": test,
"grantor": ashesh
}]
2015-12-23 00:40:20 -06:00
|
|
|
self.server_type = None
|
2017-08-30 05:18:14 -05:00
|
|
|
self.min_gpdbver = 80323
|
|
|
|
self.max_gpdbver = 1000000000
|
2015-10-20 02:03:18 -05:00
|
|
|
|
2016-03-07 05:48:24 -06:00
|
|
|
super(PGChildModule, self).__init__()
|
2015-10-20 02:03:18 -05:00
|
|
|
|
2020-07-06 01:18:23 -05:00
|
|
|
def backend_supported(self, manager, **kwargs):
|
2020-09-29 04:38:14 -05:00
|
|
|
if hasattr(self, 'show_node') and not self.show_node:
|
|
|
|
return False
|
|
|
|
|
2016-02-18 05:32:52 -06:00
|
|
|
sversion = getattr(manager, 'sversion', None)
|
2017-08-30 05:18:14 -05:00
|
|
|
|
2018-03-08 03:33:43 -06:00
|
|
|
if sversion is None or not isinstance(sversion, int):
|
2015-10-20 02:03:18 -05:00
|
|
|
return False
|
|
|
|
|
2016-06-21 08:21:06 -05:00
|
|
|
assert (self.server_type is None or isinstance(self.server_type, list))
|
2015-10-20 02:03:18 -05:00
|
|
|
|
2015-11-19 11:45:48 -06:00
|
|
|
if self.server_type is None or manager.server_type in self.server_type:
|
2018-03-08 03:33:43 -06:00
|
|
|
min_server_version = self.min_ver
|
|
|
|
max_server_version = self.max_ver
|
2018-06-13 09:03:17 -05:00
|
|
|
if manager.server_type == 'ppas':
|
|
|
|
min_server_version = self.min_ppasver
|
|
|
|
max_server_version = self.max_ppasver
|
2018-03-08 03:33:43 -06:00
|
|
|
if manager.server_type == 'gpdb':
|
|
|
|
min_server_version = self.min_gpdbver
|
|
|
|
max_server_version = self.max_gpdbver
|
|
|
|
return is_version_in_range(sversion, min_server_version,
|
|
|
|
max_server_version)
|
2015-10-20 02:03:18 -05:00
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def get_nodes(self, sid=None, **kwargs):
|
|
|
|
pass
|
|
|
|
|
2016-03-07 05:48:24 -06:00
|
|
|
|
2015-06-29 03:11:56 -05:00
|
|
|
class NodeView(with_metaclass(MethodViewType, View)):
|
|
|
|
"""
|
|
|
|
A PostgreSQL Object has so many operaions/functions apart from CRUD
|
|
|
|
(Create, Read, Update, Delete):
|
|
|
|
i.e.
|
|
|
|
- Reversed Engineered SQL
|
|
|
|
- Modified Query for parameter while editing object attributes
|
|
|
|
i.e. ALTER TABLE ...
|
|
|
|
- Statistics of the objects
|
|
|
|
- List of dependents
|
|
|
|
- List of dependencies
|
|
|
|
- Listing of the children object types for the certain node
|
|
|
|
It will used by the browser tree to get the children nodes
|
|
|
|
|
|
|
|
This class can be inherited to achieve the diffrent routes for each of the
|
|
|
|
object types/collections.
|
|
|
|
|
2015-11-23 01:55:37 -06:00
|
|
|
OPERATION | URL | HTTP Method | Method
|
|
|
|
---------------+-----------------------------+-------------+--------------
|
|
|
|
List | /obj/[Parent URL]/ | GET | list
|
|
|
|
Properties | /obj/[Parent URL]/id | GET | properties
|
|
|
|
Create | /obj/[Parent URL]/ | POST | create
|
|
|
|
Delete | /obj/[Parent URL]/id | DELETE | delete
|
|
|
|
Update | /obj/[Parent URL]/id | PUT | update
|
|
|
|
|
|
|
|
SQL (Reversed | /sql/[Parent URL]/id | GET | sql
|
2015-06-29 03:11:56 -05:00
|
|
|
Engineering) |
|
2015-11-23 01:55:37 -06:00
|
|
|
SQL (Modified | /msql/[Parent URL]/id | GET | modified_sql
|
2015-06-29 03:11:56 -05:00
|
|
|
Properties) |
|
|
|
|
|
2015-11-23 01:55:37 -06:00
|
|
|
Statistics | /stats/[Parent URL]/id | GET | statistics
|
|
|
|
Dependencies | /dependency/[Parent URL]/id | GET | dependencies
|
|
|
|
Dependents | /dependent/[Parent URL]/id | GET | dependents
|
2015-06-29 03:11:56 -05:00
|
|
|
|
2015-11-23 01:55:37 -06:00
|
|
|
Nodes | /nodes/[Parent URL]/ | GET | nodes
|
|
|
|
Current Node | /nodes/[Parent URL]/id | GET | node
|
|
|
|
|
|
|
|
Children | /children/[Parent URL]/id | GET | children
|
2015-06-29 03:11:56 -05:00
|
|
|
|
|
|
|
NOTE:
|
|
|
|
Parent URL can be seen as the path to identify the particular node.
|
|
|
|
|
|
|
|
i.e.
|
|
|
|
In order to identify the TABLE object, we need server -> database -> schema
|
|
|
|
information.
|
|
|
|
"""
|
2015-06-30 00:51:55 -05:00
|
|
|
operations = dict({
|
2015-06-29 03:11:56 -05:00
|
|
|
'obj': [
|
|
|
|
{'get': 'properties', 'delete': 'delete', 'put': 'update'},
|
|
|
|
{'get': 'list', 'post': 'create'}
|
|
|
|
],
|
2015-11-23 01:55:37 -06:00
|
|
|
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
|
|
|
|
'sql': [{'get': 'sql'}],
|
|
|
|
'msql': [{'get': 'modified_sql'}],
|
2015-06-30 00:51:55 -05:00
|
|
|
'stats': [{'get': 'statistics'}],
|
2015-11-23 01:55:37 -06:00
|
|
|
'dependency': [{'get': 'dependencies'}],
|
|
|
|
'dependent': [{'get': 'dependents'}],
|
2017-06-22 05:26:45 -05:00
|
|
|
'children': [{'get': 'children'}]
|
2015-06-30 00:51:55 -05:00
|
|
|
})
|
2015-06-29 03:11:56 -05:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def generate_ops(cls):
|
|
|
|
cmds = []
|
|
|
|
for op in cls.operations:
|
2015-06-30 00:51:55 -05:00
|
|
|
idx = 0
|
2015-06-29 03:11:56 -05:00
|
|
|
for ops in cls.operations[op]:
|
|
|
|
meths = []
|
|
|
|
for meth in ops:
|
|
|
|
meths.append(meth.upper())
|
|
|
|
if len(meths) > 0:
|
2015-10-20 02:03:18 -05:00
|
|
|
cmds.append({
|
|
|
|
'cmd': op, 'req': (idx == 0),
|
|
|
|
'with_id': (idx != 2), 'methods': meths
|
2016-06-21 08:21:06 -05:00
|
|
|
})
|
2015-06-30 00:51:55 -05:00
|
|
|
idx += 1
|
2015-06-29 03:11:56 -05:00
|
|
|
return cmds
|
|
|
|
|
|
|
|
# Inherited class needs to modify these parameters
|
|
|
|
node_type = None
|
2020-08-19 03:46:02 -05:00
|
|
|
# Inherited class needs to modify these parameters
|
|
|
|
node_label = None
|
2015-06-29 03:11:56 -05:00
|
|
|
# This must be an array object with attributes (type and id)
|
|
|
|
parent_ids = []
|
|
|
|
# This must be an array object with attributes (type and id)
|
|
|
|
ids = []
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_node_urls(cls):
|
2015-06-30 00:51:55 -05:00
|
|
|
assert cls.node_type is not None, \
|
|
|
|
"Please set the node_type for this class ({0})".format(
|
|
|
|
str(cls.__class__.__name__))
|
2015-06-29 03:11:56 -05:00
|
|
|
common_url = '/'
|
|
|
|
for p in cls.parent_ids:
|
2015-06-30 00:51:55 -05:00
|
|
|
common_url += '<{0}:{1}>/'.format(str(p['type']), str(p['id']))
|
2015-06-29 03:11:56 -05:00
|
|
|
|
2015-06-30 00:51:55 -05:00
|
|
|
id_url = None
|
2015-06-29 03:11:56 -05:00
|
|
|
for p in cls.ids:
|
2018-02-26 09:58:48 -06:00
|
|
|
id_url = '{0}<{1}:{2}>'.format(
|
|
|
|
common_url if not id_url else id_url,
|
|
|
|
p['type'], p['id'])
|
2015-06-29 03:11:56 -05:00
|
|
|
|
|
|
|
return id_url, common_url
|
|
|
|
|
2015-06-30 00:51:55 -05:00
|
|
|
def __init__(self, **kwargs):
|
|
|
|
self.cmd = kwargs['cmd']
|
2015-06-29 03:11:56 -05:00
|
|
|
|
|
|
|
# Check the existance of all the required arguments from parent_ids
|
|
|
|
# and return combination of has parent arguments, and has id arguments
|
2015-06-30 00:51:55 -05:00
|
|
|
def check_args(self, **kwargs):
|
2015-06-29 03:11:56 -05:00
|
|
|
has_id = has_args = True
|
|
|
|
for p in self.parent_ids:
|
|
|
|
if p['id'] not in kwargs:
|
|
|
|
has_args = False
|
|
|
|
break
|
|
|
|
|
|
|
|
for p in self.ids:
|
|
|
|
if p['id'] not in kwargs:
|
|
|
|
has_id = False
|
|
|
|
break
|
|
|
|
|
|
|
|
return has_args, has_id and has_args
|
|
|
|
|
|
|
|
def dispatch_request(self, *args, **kwargs):
|
2018-03-08 03:33:43 -06:00
|
|
|
http_method = flask.request.method.lower()
|
|
|
|
if http_method == 'head':
|
|
|
|
http_method = 'get'
|
2015-06-29 03:11:56 -05:00
|
|
|
|
2015-06-30 00:51:55 -05:00
|
|
|
assert self.cmd in self.operations, \
|
2018-03-08 03:33:43 -06:00
|
|
|
'Unimplemented command ({0}) for {1}'.format(
|
2016-06-21 08:21:06 -05:00
|
|
|
self.cmd,
|
|
|
|
str(self.__class__.__name__)
|
|
|
|
)
|
2015-07-30 05:47:13 -05:00
|
|
|
|
2015-06-30 00:51:55 -05:00
|
|
|
has_args, has_id = self.check_args(**kwargs)
|
2015-06-29 03:11:56 -05:00
|
|
|
|
2018-03-08 03:33:43 -06:00
|
|
|
assert (
|
|
|
|
self.cmd in self.operations and
|
|
|
|
(has_id and len(self.operations[self.cmd]) > 0 and
|
|
|
|
http_method in self.operations[self.cmd][0]) or
|
|
|
|
(not has_id and len(self.operations[self.cmd]) > 1 and
|
|
|
|
http_method in self.operations[self.cmd][1]) or
|
|
|
|
(len(self.operations[self.cmd]) > 2 and
|
|
|
|
http_method in self.operations[self.cmd][2])
|
|
|
|
), \
|
|
|
|
'Unimplemented method ({0}) for command ({1}), which {2} ' \
|
|
|
|
'an id'.format(http_method,
|
|
|
|
self.cmd,
|
|
|
|
'requires' if has_id else 'does not require')
|
|
|
|
meth = None
|
|
|
|
if has_id:
|
|
|
|
meth = self.operations[self.cmd][0][http_method]
|
|
|
|
elif has_args and http_method in self.operations[self.cmd][1]:
|
|
|
|
meth = self.operations[self.cmd][1][http_method]
|
|
|
|
else:
|
|
|
|
meth = self.operations[self.cmd][2][http_method]
|
2015-06-29 03:11:56 -05:00
|
|
|
|
|
|
|
method = getattr(self, meth, None)
|
|
|
|
|
2015-06-30 00:51:55 -05:00
|
|
|
if method is None:
|
|
|
|
return make_json_response(
|
|
|
|
status=406,
|
|
|
|
success=0,
|
2015-10-20 02:03:18 -05:00
|
|
|
errormsg=gettext(
|
2020-04-10 04:22:41 -05:00
|
|
|
'Unimplemented method ({0}) for this url ({1})').format(
|
|
|
|
meth, flask.request.path
|
2015-06-30 00:51:55 -05:00
|
|
|
)
|
|
|
|
)
|
2015-06-29 03:11:56 -05:00
|
|
|
|
|
|
|
return method(*args, **kwargs)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def register_node_view(cls, blueprint):
|
2015-10-20 02:03:18 -05:00
|
|
|
cls.blueprint = blueprint
|
2015-06-29 03:11:56 -05:00
|
|
|
id_url, url = cls.get_node_urls()
|
|
|
|
|
|
|
|
commands = cls.generate_ops()
|
|
|
|
|
|
|
|
for c in commands:
|
2018-06-17 23:28:14 -05:00
|
|
|
cmd = c['cmd'].replace('.', '-')
|
2015-07-30 05:47:13 -05:00
|
|
|
if c['with_id']:
|
|
|
|
blueprint.add_url_rule(
|
2016-06-21 08:21:06 -05:00
|
|
|
'/{0}{1}'.format(
|
|
|
|
c['cmd'], id_url if c['req'] else url
|
|
|
|
),
|
|
|
|
view_func=cls.as_view(
|
|
|
|
'{0}{1}'.format(
|
2018-06-17 23:28:14 -05:00
|
|
|
cmd, '_id' if c['req'] else ''
|
2016-06-21 08:21:06 -05:00
|
|
|
),
|
|
|
|
cmd=c['cmd']
|
|
|
|
),
|
|
|
|
methods=c['methods']
|
|
|
|
)
|
2015-07-30 05:47:13 -05:00
|
|
|
else:
|
|
|
|
blueprint.add_url_rule(
|
2016-06-21 08:21:06 -05:00
|
|
|
'/{0}'.format(c['cmd']),
|
|
|
|
view_func=cls.as_view(
|
2018-06-17 23:28:14 -05:00
|
|
|
cmd, cmd=c['cmd']
|
2016-06-21 08:21:06 -05:00
|
|
|
),
|
|
|
|
methods=c['methods']
|
|
|
|
)
|
2015-07-30 05:47:13 -05:00
|
|
|
|
2015-11-23 01:55:37 -06:00
|
|
|
def children(self, *args, **kwargs):
|
2015-10-20 02:03:18 -05:00
|
|
|
"""Build a list of treeview nodes from the child nodes."""
|
2020-01-06 03:23:47 -06:00
|
|
|
children = self.get_children_nodes(*args, **kwargs)
|
2015-10-20 02:03:18 -05:00
|
|
|
|
2016-06-02 20:19:20 -05:00
|
|
|
# Return sorted nodes based on label
|
|
|
|
return make_json_response(
|
|
|
|
data=sorted(
|
|
|
|
children, key=lambda c: c['label']
|
|
|
|
)
|
|
|
|
)
|
2015-10-20 02:03:18 -05:00
|
|
|
|
2020-01-06 03:23:47 -06:00
|
|
|
def get_children_nodes(self, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Returns the list of children nodes for the current nodes. Override this
|
|
|
|
function for special cases only.
|
|
|
|
|
|
|
|
:param args:
|
|
|
|
:param kwargs: Parameters to generate the correct set of tree node.
|
|
|
|
:return: List of the children nodes
|
|
|
|
"""
|
|
|
|
children = []
|
|
|
|
|
|
|
|
for module in self.blueprint.submodules:
|
|
|
|
children.extend(module.get_nodes(*args, **kwargs))
|
|
|
|
|
|
|
|
return children
|
|
|
|
|
2015-10-20 02:03:18 -05:00
|
|
|
|
2015-12-25 23:59:08 -06:00
|
|
|
class PGChildNodeView(NodeView):
|
2020-01-06 03:23:47 -06:00
|
|
|
|
2020-07-16 09:09:55 -05:00
|
|
|
_NODE_SQL = 'node.sql'
|
|
|
|
_NODES_SQL = 'nodes.sql'
|
|
|
|
_CREATE_SQL = 'create.sql'
|
|
|
|
_UPDATE_SQL = 'update.sql'
|
2020-07-27 06:01:03 -05:00
|
|
|
_ALTER_SQL = 'alter.sql'
|
2020-07-16 09:09:55 -05:00
|
|
|
_PROPERTIES_SQL = 'properties.sql'
|
|
|
|
_DELETE_SQL = 'delete.sql'
|
|
|
|
_GRANT_SQL = 'grant.sql'
|
|
|
|
_SCHEMA_SQL = 'schema.sql'
|
|
|
|
_ACL_SQL = 'acl.sql'
|
|
|
|
_OID_SQL = 'get_oid.sql'
|
2020-07-27 06:01:03 -05:00
|
|
|
_FUNCTIONS_SQL = 'functions.sql'
|
|
|
|
_GET_CONSTRAINTS_SQL = 'get_constraints.sql'
|
|
|
|
_GET_TABLES_SQL = 'get_tables.sql'
|
|
|
|
_GET_DEFINITION_SQL = 'get_definition.sql'
|
|
|
|
_GET_SCHEMA_OID_SQL = 'get_schema_oid.sql'
|
|
|
|
_GET_COLUMNS_SQL = 'get_columns.sql'
|
2020-08-20 08:04:49 -05:00
|
|
|
_GET_COLUMNS_FOR_TABLE_SQL = 'get_columns_for_table.sql'
|
2020-09-29 04:38:14 -05:00
|
|
|
_GET_SUBTYPES_SQL = 'get_subtypes.sql'
|
|
|
|
_GET_EXTERNAL_FUNCTIONS_SQL = 'get_external_functions.sql'
|
2021-02-15 04:35:40 -06:00
|
|
|
_GET_TABLE_FOR_PUBLICATION = 'get_tables.sql'
|
2020-07-16 09:09:55 -05:00
|
|
|
|
2020-01-06 03:23:47 -06:00
|
|
|
def get_children_nodes(self, manager, **kwargs):
|
|
|
|
"""
|
|
|
|
Returns the list of children nodes for the current nodes.
|
|
|
|
|
|
|
|
:param manager: Server Manager object
|
|
|
|
:param kwargs: Parameters to generate the correct set of browser tree
|
|
|
|
node
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
nodes = []
|
|
|
|
for module in self.blueprint.submodules:
|
|
|
|
if isinstance(module, PGChildModule):
|
|
|
|
if (
|
|
|
|
manager is not None and
|
2020-07-06 01:18:23 -05:00
|
|
|
module.backend_supported(manager, **kwargs)
|
2020-01-06 03:23:47 -06:00
|
|
|
):
|
|
|
|
nodes.extend(module.get_nodes(**kwargs))
|
|
|
|
else:
|
|
|
|
nodes.extend(module.get_nodes(**kwargs))
|
|
|
|
return nodes
|
|
|
|
|
2015-12-25 23:59:08 -06:00
|
|
|
def children(self, **kwargs):
|
2015-10-20 02:03:18 -05:00
|
|
|
"""Build a list of treeview nodes from the child nodes."""
|
|
|
|
|
2015-12-25 23:59:08 -06:00
|
|
|
if 'sid' not in kwargs:
|
|
|
|
return precondition_required(
|
2016-05-06 07:53:48 -05:00
|
|
|
gettext('Required properties are missing.')
|
2015-12-25 23:59:08 -06:00
|
|
|
)
|
|
|
|
|
2015-10-20 02:03:18 -05:00
|
|
|
from pgadmin.utils.driver import get_driver
|
2015-11-19 11:45:48 -06:00
|
|
|
manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(
|
2016-06-21 08:21:06 -05:00
|
|
|
sid=kwargs['sid']
|
|
|
|
)
|
2015-11-19 11:45:48 -06:00
|
|
|
|
|
|
|
did = None
|
|
|
|
if 'did' in kwargs:
|
|
|
|
did = kwargs['did']
|
|
|
|
|
2019-05-28 01:30:18 -05:00
|
|
|
try:
|
|
|
|
conn = manager.connection(did=did)
|
|
|
|
if not conn.connected():
|
|
|
|
status, msg = conn.connect()
|
|
|
|
if not status:
|
2019-11-25 21:34:41 -06:00
|
|
|
return internal_server_error(errormsg=msg)
|
2019-05-28 01:30:18 -05:00
|
|
|
except (ConnectionLost, SSHTunnelConnectionLost, CryptKeyMissing):
|
|
|
|
raise
|
2020-07-24 01:45:29 -05:00
|
|
|
except Exception:
|
2015-11-19 11:45:48 -06:00
|
|
|
return precondition_required(
|
2016-06-21 08:21:06 -05:00
|
|
|
gettext(
|
|
|
|
"Connection to the server has been lost."
|
|
|
|
)
|
|
|
|
)
|
2015-10-20 02:03:18 -05:00
|
|
|
|
2016-06-02 20:19:20 -05:00
|
|
|
# Return sorted nodes based on label
|
|
|
|
return make_json_response(
|
|
|
|
data=sorted(
|
2020-01-06 03:23:47 -06:00
|
|
|
self.get_children_nodes(manager, **kwargs),
|
|
|
|
key=lambda c: c['label']
|
2016-06-02 20:19:20 -05:00
|
|
|
)
|
|
|
|
)
|
2016-02-22 07:07:16 -06:00
|
|
|
|
2019-03-28 13:20:52 -05:00
|
|
|
def get_dependencies(self, conn, object_id, where=None,
|
2020-08-10 04:43:34 -05:00
|
|
|
show_system_objects=None, is_schema_diff=False):
|
2016-02-22 07:07:16 -06:00
|
|
|
"""
|
|
|
|
This function is used to fetch the dependencies for the selected node.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
conn: Connection object
|
|
|
|
object_id: Object Id of the selected node.
|
|
|
|
where: where clause for the sql query (optional)
|
2020-08-10 04:43:34 -05:00
|
|
|
show_system_objects: System object status
|
|
|
|
is_schema_diff: True when function gets called from schema diff.
|
2016-02-22 07:07:16 -06:00
|
|
|
|
|
|
|
Returns: Dictionary of dependencies for the selected node.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Set the sql_path
|
2020-07-14 08:04:50 -05:00
|
|
|
sql_path = 'depends/{0}/#{1}#'.format(
|
|
|
|
conn.manager.server_type, conn.manager.version)
|
2016-02-22 07:07:16 -06:00
|
|
|
|
|
|
|
if where is None:
|
|
|
|
where_clause = "WHERE dep.objid={0}::oid".format(object_id)
|
|
|
|
else:
|
|
|
|
where_clause = where
|
|
|
|
|
2017-05-15 00:10:46 -05:00
|
|
|
query = render_template("/".join([sql_path, 'dependencies.sql']),
|
2020-08-10 04:43:34 -05:00
|
|
|
where_clause=where_clause,
|
|
|
|
object_id=object_id)
|
2016-02-22 07:07:16 -06:00
|
|
|
# fetch the dependency for the selected object
|
2020-08-10 04:43:34 -05:00
|
|
|
dependencies = self.__fetch_dependency(
|
|
|
|
conn, query, show_system_objects, is_schema_diff)
|
2016-02-22 07:07:16 -06:00
|
|
|
|
|
|
|
# fetch role dependencies
|
|
|
|
if where_clause.find('subid') < 0:
|
2018-02-26 09:58:48 -06:00
|
|
|
sql = render_template(
|
|
|
|
"/".join([sql_path, 'role_dependencies.sql']),
|
2021-01-29 00:40:21 -06:00
|
|
|
where_clause=where_clause, db_name=conn.db)
|
2016-02-22 07:07:16 -06:00
|
|
|
|
|
|
|
status, result = conn.execute_dict(sql)
|
|
|
|
if not status:
|
|
|
|
current_app.logger.error(result)
|
|
|
|
|
|
|
|
for row in result['rows']:
|
|
|
|
ref_name = row['refname']
|
|
|
|
dep_str = row['deptype']
|
|
|
|
dep_type = ''
|
|
|
|
|
|
|
|
if dep_str == 'a':
|
|
|
|
dep_type = 'ACL'
|
|
|
|
elif dep_str == 'o':
|
|
|
|
dep_type = 'Owner'
|
|
|
|
|
|
|
|
if row['refclassid'] == 1260:
|
2018-02-26 09:58:48 -06:00
|
|
|
dependencies.append(
|
|
|
|
{'type': 'role',
|
|
|
|
'name': ref_name,
|
|
|
|
'field': dep_type}
|
|
|
|
)
|
2016-02-22 07:07:16 -06:00
|
|
|
|
|
|
|
return dependencies
|
|
|
|
|
2016-03-15 09:16:33 -05:00
|
|
|
def get_dependents(self, conn, object_id, where=None):
|
2016-02-22 07:07:16 -06:00
|
|
|
"""
|
|
|
|
This function is used to fetch the dependents for the selected node.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
conn: Connection object
|
|
|
|
object_id: Object Id of the selected node.
|
|
|
|
where: where clause for the sql query (optional)
|
|
|
|
|
|
|
|
Returns: Dictionary of dependents for the selected node.
|
|
|
|
"""
|
|
|
|
# Set the sql_path
|
2020-07-14 08:04:50 -05:00
|
|
|
sql_path = 'depends/{0}/#{1}#'.format(
|
|
|
|
conn.manager.server_type, conn.manager.version)
|
2016-02-22 07:07:16 -06:00
|
|
|
|
|
|
|
if where is None:
|
|
|
|
where_clause = "WHERE dep.refobjid={0}::oid".format(object_id)
|
|
|
|
else:
|
|
|
|
where_clause = where
|
|
|
|
|
|
|
|
query = render_template("/".join([sql_path, 'dependents.sql']),
|
2017-05-15 00:10:46 -05:00
|
|
|
where_clause=where_clause)
|
2016-02-22 07:07:16 -06:00
|
|
|
# fetch the dependency for the selected object
|
2016-03-15 09:16:33 -05:00
|
|
|
dependents = self.__fetch_dependency(conn, query)
|
2016-02-22 07:07:16 -06:00
|
|
|
|
|
|
|
return dependents
|
|
|
|
|
2020-08-10 04:43:34 -05:00
|
|
|
def __fetch_dependency(self, conn, query, show_system_objects=None,
|
|
|
|
is_schema_diff=False):
|
2016-02-22 07:07:16 -06:00
|
|
|
"""
|
|
|
|
This function is used to fetch the dependency for the selected node.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
conn: Connection object
|
|
|
|
query: sql query to fetch dependencies/dependents
|
2020-08-10 04:43:34 -05:00
|
|
|
show_system_objects: System object status
|
|
|
|
is_schema_diff: True when function gets called from schema diff.
|
2016-02-22 07:07:16 -06:00
|
|
|
|
|
|
|
Returns: Dictionary of dependency for the selected node.
|
|
|
|
"""
|
|
|
|
|
2020-07-14 08:04:50 -05:00
|
|
|
standard_types = {
|
2016-02-22 07:07:16 -06:00
|
|
|
'r': None,
|
|
|
|
'i': 'index',
|
|
|
|
'S': 'sequence',
|
|
|
|
'v': 'view',
|
2020-07-14 08:04:50 -05:00
|
|
|
'p': 'partition_table',
|
|
|
|
'f': 'foreign_table',
|
|
|
|
'm': 'materialized_view',
|
|
|
|
't': 'toast_table',
|
|
|
|
'I': 'partition_index'
|
|
|
|
}
|
|
|
|
|
|
|
|
# Dictionary for the object types
|
|
|
|
custom_types = {
|
|
|
|
'x': 'external_table', 'n': 'schema', 'd': 'domain',
|
|
|
|
'l': 'language', 'Cc': 'check', 'Cd': 'domain_constraints',
|
|
|
|
'Cf': 'foreign_key', 'Cp': 'primary_key', 'Co': 'collation',
|
|
|
|
'Cu': 'unique_constraint', 'Cx': 'exclusion_constraint',
|
|
|
|
'Fw': 'foreign_data_wrapper', 'Fs': 'foreign_server',
|
|
|
|
'Fc': 'fts_configuration', 'Fp': 'fts_parser',
|
|
|
|
'Fd': 'fts_dictionary', 'Ft': 'fts_template',
|
|
|
|
'Ex': 'extension', 'Et': 'event_trigger', 'Pa': 'package',
|
|
|
|
'Pf': 'function', 'Pt': 'trigger_function', 'Pp': 'procedure',
|
|
|
|
'Rl': 'rule', 'Rs': 'row_security_policy', 'Sy': 'synonym',
|
|
|
|
'Ty': 'type', 'Tr': 'trigger', 'Tc': 'compound_trigger',
|
|
|
|
# None specified special handling for this type
|
2016-02-22 07:07:16 -06:00
|
|
|
'A': None
|
|
|
|
}
|
|
|
|
|
2020-07-14 08:04:50 -05:00
|
|
|
# Merging above two dictionaries
|
|
|
|
types = {**standard_types, **custom_types}
|
|
|
|
|
2016-02-22 07:07:16 -06:00
|
|
|
# Dictionary for the restrictions
|
|
|
|
dep_types = {
|
|
|
|
# None specified special handling for this type
|
|
|
|
'n': 'normal',
|
|
|
|
'a': 'auto',
|
|
|
|
'i': None,
|
|
|
|
'p': None
|
|
|
|
}
|
|
|
|
|
|
|
|
status, result = conn.execute_dict(query)
|
|
|
|
if not status:
|
|
|
|
current_app.logger.error(result)
|
|
|
|
|
|
|
|
dependency = list()
|
|
|
|
|
|
|
|
for row in result['rows']:
|
|
|
|
_ref_name = row['refname']
|
|
|
|
type_str = row['type']
|
|
|
|
dep_str = row['deptype']
|
|
|
|
nsp_name = row['nspname']
|
2020-08-10 04:43:34 -05:00
|
|
|
object_id = None
|
|
|
|
if 'refobjid' in row:
|
|
|
|
object_id = row['refobjid']
|
2016-02-22 07:07:16 -06:00
|
|
|
|
|
|
|
ref_name = ''
|
|
|
|
if nsp_name is not None:
|
|
|
|
ref_name = nsp_name + '.'
|
|
|
|
|
|
|
|
type_name = ''
|
2019-06-17 04:36:30 -05:00
|
|
|
icon = None
|
2016-02-22 07:07:16 -06:00
|
|
|
|
|
|
|
# Fetch the type name from the dictionary
|
|
|
|
# if type is not present in the types dictionary then
|
|
|
|
# we will continue and not going to add it.
|
2020-07-14 08:04:50 -05:00
|
|
|
if len(type_str) and type_str in types and \
|
|
|
|
types[type_str] is not None:
|
|
|
|
type_name = types[type_str]
|
|
|
|
if type_str == 'Rl':
|
|
|
|
ref_name = \
|
|
|
|
_ref_name + ' ON ' + ref_name + row['ownertable']
|
|
|
|
_ref_name = None
|
|
|
|
elif type_str == 'Cf':
|
|
|
|
ref_name += row['ownertable'] + '.'
|
|
|
|
elif type_str == 'm':
|
|
|
|
icon = 'icon-mview'
|
|
|
|
elif len(type_str) and type_str[0] in types and \
|
|
|
|
types[type_str[0]] is None:
|
2016-02-22 07:07:16 -06:00
|
|
|
# if type is present in the types dictionary, but it's
|
|
|
|
# value is None then it requires special handling.
|
2020-07-14 08:04:50 -05:00
|
|
|
if type_str[0] == 'r':
|
2021-03-09 07:13:04 -06:00
|
|
|
if (len(type_str) > 1 and type_str[1].isdigit() and
|
|
|
|
int(type_str[1]) > 0) or \
|
2020-10-05 07:23:47 -05:00
|
|
|
(len(type_str) > 2 and type_str[2].isdigit() and
|
|
|
|
int(type_str[2]) > 0):
|
2020-07-14 08:04:50 -05:00
|
|
|
type_name = 'column'
|
|
|
|
else:
|
|
|
|
type_name = 'table'
|
|
|
|
if 'is_inherits' in row and row['is_inherits'] == '1':
|
|
|
|
if 'is_inherited' in row and \
|
|
|
|
row['is_inherited'] == '1':
|
|
|
|
icon = 'icon-table-multi-inherit'
|
|
|
|
# For tables under partitioned tables,
|
|
|
|
# is_inherits will be true and dependency
|
|
|
|
# will be auto as it inherits from parent
|
|
|
|
# partitioned table
|
|
|
|
elif ('is_inherited' in row and
|
|
|
|
row['is_inherited'] == '0') and \
|
|
|
|
dep_str == 'a':
|
|
|
|
type_name = 'partition'
|
|
|
|
else:
|
|
|
|
icon = 'icon-table-inherits'
|
|
|
|
elif 'is_inherited' in row and \
|
|
|
|
row['is_inherited'] == '1':
|
|
|
|
icon = 'icon-table-inherited'
|
|
|
|
elif type_str[0] == 'A':
|
|
|
|
# Include only functions
|
|
|
|
if row['adbin'].startswith('{FUNCEXPR'):
|
|
|
|
type_name = 'function'
|
|
|
|
ref_name = row['adsrc']
|
|
|
|
else:
|
|
|
|
continue
|
2016-02-22 07:07:16 -06:00
|
|
|
else:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if _ref_name is not None:
|
|
|
|
ref_name += _ref_name
|
|
|
|
|
2020-08-10 04:43:34 -05:00
|
|
|
# If schema diff is set to True then we don't need to calculate
|
|
|
|
# field and also no need to add icon and field in the list.
|
|
|
|
if is_schema_diff and type_name != 'schema':
|
|
|
|
dependency.append(
|
|
|
|
{
|
|
|
|
'type': type_name,
|
|
|
|
'name': ref_name,
|
|
|
|
'oid': object_id
|
|
|
|
}
|
|
|
|
)
|
|
|
|
elif not is_schema_diff:
|
|
|
|
dep_type = ''
|
|
|
|
if show_system_objects is None:
|
|
|
|
show_system_objects = self.blueprint.show_system_objects
|
|
|
|
if dep_str[0] in dep_types:
|
|
|
|
# if dep_type is present in the dep_types dictionary,
|
|
|
|
# but it's value is None then it requires special
|
|
|
|
# handling.
|
|
|
|
if dep_types[dep_str[0]] is None:
|
|
|
|
if dep_str[0] == 'i':
|
|
|
|
if show_system_objects:
|
|
|
|
dep_type = 'internal'
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
elif dep_str[0] == 'p':
|
|
|
|
dep_type = 'pin'
|
|
|
|
type_name = ''
|
|
|
|
else:
|
|
|
|
dep_type = dep_types[dep_str[0]]
|
|
|
|
|
|
|
|
dependency.append(
|
|
|
|
{
|
|
|
|
'type': type_name,
|
|
|
|
'name': ref_name,
|
|
|
|
'field': dep_type,
|
|
|
|
'icon': icon,
|
|
|
|
}
|
|
|
|
)
|
2016-02-22 07:07:16 -06:00
|
|
|
|
|
|
|
return dependency
|
2020-08-19 03:46:02 -05:00
|
|
|
|
2020-08-25 01:58:55 -05:00
|
|
|
def _check_cascade_operation(self, only_sql=None):
|
|
|
|
"""
|
|
|
|
Check cascade operation.
|
|
|
|
:param only_sql:
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
if self.cmd == 'delete' or only_sql:
|
|
|
|
# This is a cascade operation
|
|
|
|
cascade = True
|
|
|
|
else:
|
|
|
|
cascade = False
|
|
|
|
return cascade
|
|
|
|
|
2020-08-20 08:04:49 -05:00
|
|
|
def not_found_error_msg(self, custom_label=None):
|
2020-08-19 03:46:02 -05:00
|
|
|
return gettext("Could not find the specified {}.".format(
|
2020-08-20 08:04:49 -05:00
|
|
|
custom_label if custom_label else self.node_label).lower())
|