Added support for Logical Replication. Fixes #5912
@ -9,6 +9,7 @@ This release contains a number of bug fixes and new features since the release o
|
||||
New features
|
||||
************
|
||||
|
||||
| `Issue #5912 <https://redmine.postgresql.org/issues/5912>`_ - Added support for Logical Replication.
|
||||
| `Issue #5967 <https://redmine.postgresql.org/issues/5967>`_ - Implemented runtime using NWjs to open pgAdmin4 in a standalone window instead of the system tray and web browser.
|
||||
|
||||
Housekeeping
|
||||
@ -27,4 +28,4 @@ Bug fixes
|
||||
| `Issue #6177 <https://redmine.postgresql.org/issues/6177>`_ - Fixed an issue while downloading ERD images in Safari and Firefox.
|
||||
| `Issue #6179 <https://redmine.postgresql.org/issues/6179>`_ - Fixed an issue where Generate SQL displayed twice in the ERD tool.
|
||||
| `Issue #6180 <https://redmine.postgresql.org/issues/6180>`_ - Updated missing documentation for the 'Download Image' option in ERD.
|
||||
| `Issue #6187 <https://redmine.postgresql.org/issues/6187>`_ - Limit the upgrade check to run once per day.
|
||||
| `Issue #6187 <https://redmine.postgresql.org/issues/6187>`_ - Limit the upgrade check to run once per day.
|
||||
|
@ -0,0 +1,858 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
"""Implements Publication Node"""
|
||||
import simplejson as json
|
||||
from functools import wraps
|
||||
|
||||
import pgadmin.browser.server_groups.servers.databases as databases
|
||||
from flask import render_template, request, jsonify
|
||||
from flask_babelex import gettext
|
||||
from pgadmin.browser.collection import CollectionNodeModule
|
||||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
|
||||
|
||||
class PublicationModule(CollectionNodeModule):
|
||||
"""
|
||||
class PublicationModule(CollectionNodeModule)
|
||||
|
||||
A module class for Publication node derived from CollectionNodeModule.
|
||||
|
||||
Methods:
|
||||
-------
|
||||
* __init__(*args, **kwargs)
|
||||
- Method is used to initialize the PublicationModule and it's
|
||||
base module.
|
||||
|
||||
* get_nodes(gid, sid, did)
|
||||
- Method is used to generate the browser collection node.
|
||||
|
||||
* node_inode()
|
||||
- Method is overridden from its base class to make the node as leaf node.
|
||||
|
||||
* script_load()
|
||||
- Load the module script for publication, when any of the database node
|
||||
is initialized.
|
||||
"""
|
||||
|
||||
_NODE_TYPE = 'publication'
|
||||
_COLLECTION_LABEL = gettext("Publications")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""
|
||||
Method is used to initialize the PublicationModule and it's
|
||||
base module.
|
||||
|
||||
Args:
|
||||
*args:
|
||||
**kwargs:
|
||||
"""
|
||||
super(PublicationModule, self).__init__(*args, **kwargs)
|
||||
self.min_ver = 100000
|
||||
self.max_ver = None
|
||||
|
||||
def get_nodes(self, gid, sid, did):
|
||||
"""
|
||||
Method is used to generate the browser collection node
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database Id
|
||||
"""
|
||||
yield self.generate_browser_collection_node(did)
|
||||
|
||||
@property
|
||||
def node_inode(self):
|
||||
"""
|
||||
Override this property to make the node a leaf node.
|
||||
|
||||
Returns: False as this is the leaf node
|
||||
"""
|
||||
return False
|
||||
|
||||
@property
|
||||
def script_load(self):
|
||||
"""
|
||||
Load the module script for publication, when any of the database nodes
|
||||
are initialized.
|
||||
|
||||
Returns: node type of the server module.
|
||||
"""
|
||||
return databases.DatabaseModule.node_type
|
||||
|
||||
@property
|
||||
def module_use_template_javascript(self):
|
||||
"""
|
||||
Returns whether Jinja2 template is used for generating the javascript
|
||||
module.
|
||||
"""
|
||||
return False
|
||||
|
||||
|
||||
blueprint = PublicationModule(__name__)
|
||||
|
||||
|
||||
class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
|
||||
"""
|
||||
class PublicationView(PGChildNodeView)
|
||||
|
||||
A view class for Publication node derived from PGChildNodeView.
|
||||
This class is responsible for all the stuff related to view like
|
||||
updating publication node, showing properties, showing sql in sql pane.
|
||||
|
||||
Methods:
|
||||
-------
|
||||
* __init__(**kwargs)
|
||||
- Method is used to initialize the PublicationView and it's base view.
|
||||
|
||||
* check_precondition()
|
||||
- This function will behave as a decorator which will checks
|
||||
database connection before running view, it will also attaches
|
||||
manager,conn & template_path properties to self
|
||||
|
||||
* list()
|
||||
- This function is used to list all the publication nodes within that
|
||||
collection.
|
||||
|
||||
* nodes()
|
||||
- This function will used to create all the child node within that
|
||||
collection. Here it will create all the publication node.
|
||||
|
||||
* properties(gid, sid, did, pbid)
|
||||
- This function will show the properties of the selected publication node
|
||||
|
||||
* update(gid, sid, did, pbid)
|
||||
- This function will update the data for the selected publication node
|
||||
|
||||
* create(gid, sid, did)
|
||||
- This function will create the new publication node
|
||||
|
||||
* delete(gid, sid, did, pbid)
|
||||
- This function will delete the selected publication node
|
||||
|
||||
* msql(gid, sid, did, pbid)
|
||||
- This function is used to return modified SQL for the selected
|
||||
publication node
|
||||
|
||||
* get_sql(data, pbid)
|
||||
- This function will generate sql from model data
|
||||
|
||||
* get_tables(gid, sid, did)
|
||||
- This function returns the handler and inline functions for the
|
||||
selected publication node
|
||||
|
||||
* get_templates(gid, sid, did)
|
||||
- This function returns publication templates.
|
||||
|
||||
* sql(gid, sid, did, pbid):
|
||||
- This function will generate sql to show it in sql pane for the
|
||||
selected publication node.
|
||||
|
||||
* dependents(gid, sid, did, pbid):
|
||||
- This function get the dependents and return ajax response for the
|
||||
publication node.
|
||||
|
||||
* dependencies(self, gid, sid, did, pbid):
|
||||
- This function get the dependencies and return ajax response for the
|
||||
publication node.
|
||||
"""
|
||||
|
||||
_NOT_FOUND_PUB_INFORMATION = \
|
||||
gettext("Could not find the publication information.")
|
||||
node_type = blueprint.node_type
|
||||
|
||||
parent_ids = [
|
||||
{'type': 'int', 'id': 'gid'},
|
||||
{'type': 'int', 'id': 'sid'},
|
||||
{'type': 'int', 'id': 'did'}
|
||||
]
|
||||
ids = [
|
||||
{'type': 'int', 'id': 'pbid'}
|
||||
]
|
||||
|
||||
operations = dict({
|
||||
'obj': [
|
||||
{'get': 'properties', 'delete': 'delete', 'put': 'update'},
|
||||
{'get': 'list', 'post': 'create', 'delete': 'delete'}
|
||||
],
|
||||
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
|
||||
'sql': [{'get': 'sql'}],
|
||||
'msql': [{'get': 'msql'}, {'get': 'msql'}],
|
||||
'stats': [{'get': 'statistics'}],
|
||||
'dependency': [{'get': 'dependencies'}],
|
||||
'dependent': [{'get': 'dependents'}],
|
||||
'get_tables': [{}, {'get': 'get_tables'}],
|
||||
'get_templates': [{}, {'get': 'get_templates'}],
|
||||
'delete': [{'delete': 'delete'}, {'delete': 'delete'}]
|
||||
})
|
||||
|
||||
def _init_(self, **kwargs):
|
||||
"""
|
||||
Method is used to initialize the PublicationView and its base view.
|
||||
Initialize all the variables create/used dynamically like conn,
|
||||
template_path.
|
||||
|
||||
Args:
|
||||
**kwargs:
|
||||
"""
|
||||
self.conn = None
|
||||
self.template_path = None
|
||||
self.manager = None
|
||||
|
||||
super(PublicationView, self).__init__(**kwargs)
|
||||
|
||||
def check_precondition(f):
|
||||
"""
|
||||
This function will behave as a decorator which will check the
|
||||
database connection before running the view. It also attaches
|
||||
manager, conn & template_path properties to self
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
def wrap(*args, **kwargs):
|
||||
# Here args[0] will hold self & kwargs will hold gid,sid,did
|
||||
self = args[0]
|
||||
self.driver = get_driver(PG_DEFAULT_DRIVER)
|
||||
self.manager = self.driver.connection_manager(kwargs['sid'])
|
||||
self.conn = self.manager.connection(did=kwargs['did'])
|
||||
self.datlastsysoid = self.manager.db_info[kwargs['did']][
|
||||
'datlastsysoid'] if self.manager.db_info is not None \
|
||||
and kwargs['did'] in self.manager.db_info else 0
|
||||
|
||||
# Set the template path for the SQL scripts
|
||||
self.template_path = (
|
||||
"publications/sql/#gpdb#{0}#".format(self.manager.version) if
|
||||
self.manager.server_type == 'gpdb' else
|
||||
"publications/sql/#{0}#".format(self.manager.version)
|
||||
)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return wrap
|
||||
|
||||
@staticmethod
|
||||
def _parser_data_input_from_client(data):
|
||||
"""
|
||||
|
||||
:param data:
|
||||
:return: data
|
||||
"""
|
||||
|
||||
if 'pubtable' in data and data['pubtable'] != '':
|
||||
data['pubtable'] = json.loads(
|
||||
data['pubtable'], encoding='utf-8'
|
||||
)
|
||||
return data
|
||||
|
||||
@check_precondition
|
||||
def list(self, gid, sid, did):
|
||||
"""
|
||||
This function is used to list all the publication nodes within that
|
||||
collection.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
"""
|
||||
sql = render_template("/".join([self.template_path,
|
||||
self._PROPERTIES_SQL]))
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
for rows in res['rows']:
|
||||
if not rows['all_table']:
|
||||
get_name_sql = render_template(
|
||||
"/".join([self.template_path, self._DELETE_SQL]),
|
||||
pbid=rows['oid'], conn=self.conn
|
||||
)
|
||||
status, pname = self.conn.execute_scalar(get_name_sql)
|
||||
table_sql = render_template(
|
||||
"/".join([self.template_path, 'get_tables.sql']),
|
||||
pname=pname
|
||||
)
|
||||
|
||||
pub_table = []
|
||||
status, table_res = self.conn.execute_dict(table_sql)
|
||||
|
||||
for table in table_res['rows']:
|
||||
pub_table.append(table['pubtable'])
|
||||
|
||||
pub_table = ", ".join(str(elem) for elem in pub_table)
|
||||
|
||||
rows['pubtable'] = pub_table
|
||||
|
||||
return ajax_response(
|
||||
response=res['rows'],
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def nodes(self, gid, sid, did):
|
||||
"""
|
||||
This function is used to create all the child nodes within the
|
||||
collection. Here it will create all the publication nodes.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
"""
|
||||
res = []
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'nodes.sql']))
|
||||
status, result = self.conn.execute_2darray(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=result)
|
||||
|
||||
for row in result['rows']:
|
||||
res.append(
|
||||
self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
did,
|
||||
row['name'],
|
||||
icon="icon-publication"
|
||||
))
|
||||
|
||||
return make_json_response(
|
||||
data=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, pbid):
|
||||
"""
|
||||
This function will fetch properties of the publication nodes.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
pbid: Publication ID
|
||||
"""
|
||||
sql = render_template("/".join([self.template_path,
|
||||
self._PROPERTIES_SQL]),
|
||||
pbid=pbid)
|
||||
status, result = self.conn.execute_2darray(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=result)
|
||||
|
||||
for row in result['rows']:
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
did,
|
||||
row['name'],
|
||||
icon="icon-publication"
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
return gone(gettext("Could not find the specified publication."))
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, pbid):
|
||||
"""
|
||||
This function will show the properties of the
|
||||
selected publication node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
pbid: Publication ID
|
||||
"""
|
||||
status, res = self._fetch_properties(did, pbid)
|
||||
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, did, pbid):
|
||||
"""
|
||||
This function fetch the properties of the extension.
|
||||
:param did:
|
||||
:param pbid:
|
||||
:return:
|
||||
"""
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, self._PROPERTIES_SQL]),
|
||||
pbid=pbid
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return False, gone(self._NOT_FOUND_PUB_INFORMATION)
|
||||
|
||||
if not res['rows'][0]['all_table']:
|
||||
get_name_sql = render_template(
|
||||
"/".join([self.template_path, self._DELETE_SQL]),
|
||||
pbid=pbid, conn=self.conn
|
||||
)
|
||||
status, pname = self.conn.execute_scalar(get_name_sql)
|
||||
table_sql = render_template(
|
||||
"/".join([self.template_path, 'get_tables.sql']),
|
||||
pname=pname
|
||||
)
|
||||
|
||||
pub_table = []
|
||||
status, table_res = self.conn.execute_dict(table_sql)
|
||||
|
||||
for table in table_res['rows']:
|
||||
pub_table.append(table['pubtable'])
|
||||
|
||||
res['rows'][0]['pubtable'] = pub_table
|
||||
|
||||
return True, res['rows'][0]
|
||||
|
||||
@check_precondition
|
||||
def update(self, gid, sid, did, pbid):
|
||||
"""
|
||||
This function will update the data for the selected publication node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
pbid: Publication ID
|
||||
"""
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
|
||||
try:
|
||||
data = self._parser_data_input_from_client(data)
|
||||
|
||||
sql, name = self.get_sql(data, pbid)
|
||||
|
||||
# Most probably this is due to error
|
||||
if not isinstance(sql, str):
|
||||
return sql
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
pbid,
|
||||
did,
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def create(self, gid, sid, did):
|
||||
"""
|
||||
This function will create the publication object
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
"""
|
||||
required_args = [
|
||||
'name'
|
||||
]
|
||||
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
"Could not find the required parameter ({})."
|
||||
).format(arg)
|
||||
)
|
||||
|
||||
try:
|
||||
data = self._parser_data_input_from_client(data)
|
||||
|
||||
sql = render_template("/".join([self.template_path,
|
||||
self._CREATE_SQL]),
|
||||
data=data, conn=self.conn)
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_position.sql']),
|
||||
conn=self.conn, pubname=data['name']
|
||||
)
|
||||
|
||||
status, r_set = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=r_set)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
r_set['rows'][0]['oid'],
|
||||
did,
|
||||
r_set['rows'][0]['name'],
|
||||
icon='icon-publication'
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, pbid=None):
|
||||
"""
|
||||
This function will drop the publication object
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
pbid: Publication ID
|
||||
"""
|
||||
if pbid is None:
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
else:
|
||||
data = {'ids': [pbid]}
|
||||
|
||||
cascade = self._check_cascade_operation()
|
||||
|
||||
try:
|
||||
for pbid in data['ids']:
|
||||
# Get name for publication from pbid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, self._DELETE_SQL]),
|
||||
pbid=pbid, conn=self.conn
|
||||
)
|
||||
status, pname = self.conn.execute_scalar(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=pname)
|
||||
|
||||
# drop publication
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, self._DELETE_SQL]),
|
||||
pname=pname, cascade=cascade, conn=self.conn
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info=gettext("Publication dropped")
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def msql(self, gid, sid, did, pbid=None):
|
||||
"""
|
||||
This function is used to return modified SQL for the selected
|
||||
publication node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
pbid: Publication ID
|
||||
"""
|
||||
data = {}
|
||||
for k, v in request.args.items():
|
||||
try:
|
||||
# comments should be taken as is because if user enters a
|
||||
# json comment it is parsed by loads which should not happen
|
||||
if k in ('description',):
|
||||
data[k] = v
|
||||
else:
|
||||
data[k] = json.loads(v, encoding='utf-8')
|
||||
except ValueError:
|
||||
data[k] = v
|
||||
try:
|
||||
sql, name = self.get_sql(data, pbid)
|
||||
# Most probably this is due to error
|
||||
if not isinstance(sql, str):
|
||||
return sql
|
||||
if sql == '':
|
||||
sql = "--modified SQL"
|
||||
|
||||
return make_json_response(
|
||||
data=sql,
|
||||
status=200
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def _get_option_details(self, old_data, data):
|
||||
"""
|
||||
Return the option details
|
||||
:param old_data:
|
||||
:param data:
|
||||
:return: data
|
||||
"""
|
||||
|
||||
if 'evnt_insert' in data or 'evnt_delete' in data or \
|
||||
'evnt_update' in data or 'evnt_truncate' in data:
|
||||
|
||||
if 'evnt_insert' not in data:
|
||||
data['evnt_insert'] = old_data['evnt_insert']
|
||||
|
||||
if 'evnt_delete' not in data:
|
||||
data['evnt_delete'] = old_data['evnt_delete']
|
||||
|
||||
if 'evnt_update' not in data:
|
||||
data['evnt_update'] = old_data['evnt_update']
|
||||
|
||||
if 'evnt_truncate' not in data and 'evnt_truncate' in old_data:
|
||||
data['evnt_truncate'] = old_data['evnt_truncate']
|
||||
|
||||
return data
|
||||
|
||||
def get_sql(self, data, pbid=None):
|
||||
"""
|
||||
This function will generate sql from model data.
|
||||
|
||||
Args:
|
||||
data: Contains the data of the selected publication node.
|
||||
pbid: Publication ID
|
||||
"""
|
||||
required_args = [
|
||||
'name'
|
||||
]
|
||||
drop_table = False
|
||||
add_table = False
|
||||
|
||||
if pbid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, self._PROPERTIES_SQL]), pbid=pbid
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(self._NOT_FOUND_PUB_INFORMATION)
|
||||
|
||||
old_data = self._get_old_table_data(res['rows'][0]['name'], res)
|
||||
|
||||
drop_table_data = []
|
||||
|
||||
add_table_data = []
|
||||
|
||||
for table in old_data['pubtable']:
|
||||
if 'pubtable' in data and table not in data['pubtable']:
|
||||
drop_table_data.append(table)
|
||||
drop_table = True
|
||||
|
||||
if 'pubtable' in data:
|
||||
for table in data['pubtable']:
|
||||
if table not in old_data['pubtable']:
|
||||
add_table_data.append(table)
|
||||
add_table = True
|
||||
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
|
||||
# Add old event setting for future reference
|
||||
data = self._get_option_details(old_data, data)
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, self._UPDATE_SQL]),
|
||||
data=data, o_data=old_data, conn=self.conn,
|
||||
drop_table=drop_table, drop_table_data=drop_table_data,
|
||||
add_table=add_table, add_table_data=add_table_data
|
||||
)
|
||||
return sql.strip('\n'), data['name'] if 'name' in data \
|
||||
else old_data['name']
|
||||
else:
|
||||
|
||||
sql = render_template("/".join([self.template_path,
|
||||
self._CREATE_SQL]),
|
||||
data=data, conn=self.conn)
|
||||
return sql.strip('\n'), data['name']
|
||||
|
||||
@check_precondition
|
||||
def get_tables(self, gid, sid, did):
|
||||
"""
|
||||
This function returns the tables list.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
"""
|
||||
res = [{'label': '', 'value': ''}]
|
||||
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'get_all_tables.sql']),
|
||||
show_sys_objects=self.blueprint.
|
||||
show_system_objects,
|
||||
server_type=self.manager.server_type
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
for row in rset['rows']:
|
||||
res.append(
|
||||
{
|
||||
'label': row['table'],
|
||||
'value': row['table']
|
||||
}
|
||||
)
|
||||
return make_json_response(
|
||||
data=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _get_old_table_data(self, pname, res):
|
||||
"""
|
||||
This function return table details before update
|
||||
:param pname:
|
||||
:param res:
|
||||
:return:old_data
|
||||
"""
|
||||
|
||||
table_sql = render_template(
|
||||
"/".join([self.template_path, 'get_tables.sql']),
|
||||
pname=pname
|
||||
)
|
||||
|
||||
pub_table = []
|
||||
status, table_res = self.conn.execute_dict(table_sql)
|
||||
|
||||
for table in table_res['rows']:
|
||||
pub_table.append(table['pubtable'])
|
||||
|
||||
res['rows'][0]['pubtable'] = pub_table
|
||||
|
||||
# Making copy of output for future use
|
||||
old_data = dict(res['rows'][0])
|
||||
|
||||
if 'all_table' in old_data and old_data['all_table']:
|
||||
old_data['pubtable'] = ''
|
||||
|
||||
return old_data
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, pbid, json_resp=True):
|
||||
"""
|
||||
This function will generate sql to show in the sql panel for the
|
||||
selected publication node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
pbid: Publication ID
|
||||
json_resp:
|
||||
"""
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, self._PROPERTIES_SQL]),
|
||||
pbid=pbid
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(self._NOT_FOUND_PUB_INFORMATION)
|
||||
|
||||
get_name_sql = render_template(
|
||||
"/".join([self.template_path, self._DELETE_SQL]),
|
||||
pbid=pbid, conn=self.conn
|
||||
)
|
||||
status, pname = self.conn.execute_scalar(get_name_sql)
|
||||
|
||||
# Get old table details
|
||||
old_data = self._get_old_table_data(pname, res)
|
||||
|
||||
sql = render_template("/".join([self.template_path,
|
||||
self._CREATE_SQL]),
|
||||
data=old_data, conn=self.conn)
|
||||
|
||||
sql_header = "-- Publication: {}".format(old_data['name'])
|
||||
sql_header += "\n\n"
|
||||
|
||||
sql_header += "-- "
|
||||
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, self._DELETE_SQL]),
|
||||
pname=old_data['name'], )
|
||||
|
||||
sql_header += "\n"
|
||||
|
||||
sql = sql_header + sql
|
||||
|
||||
if not json_resp:
|
||||
return sql
|
||||
|
||||
return ajax_response(response=sql)
|
||||
|
||||
@check_precondition
|
||||
def dependents(self, gid, sid, did, pbid):
|
||||
"""
|
||||
This function gets the dependents and returns an ajax response
|
||||
for the publication node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
pbid: Publication ID
|
||||
"""
|
||||
dependents_result = self.get_dependents(self.conn, pbid)
|
||||
return ajax_response(
|
||||
response=dependents_result,
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def dependencies(self, gid, sid, did, pbid):
|
||||
"""
|
||||
This function gets the dependencies and returns an ajax response
|
||||
for the publication node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
pbid: Publication ID
|
||||
"""
|
||||
dependencies_result = self.get_dependencies(self.conn, pbid)
|
||||
return ajax_response(
|
||||
response=dependencies_result,
|
||||
status=200
|
||||
)
|
||||
|
||||
|
||||
PublicationView.register_node_view(blueprint)
|
@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 24.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 16 16" style="enable-background:new 0 0 16 16;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#DDE1F0;stroke:#4B5FAD;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
.st1{fill:#4B5FAD;}
|
||||
.st2{fill:none;stroke:#4B5FAD;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
</style>
|
||||
<g>
|
||||
<path class="st0" d="M10,13c-2,2-5.1,2-7.1,0s-2-5.1,0-7.1C4.5,7.5,8.5,11.5,10,13z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path class="st0" d="M11.8,11.3c-2,2-5.1,2-7.1,0s-2-5.1,0-7.1C6.2,5.7,10.2,9.8,11.8,11.3z"/>
|
||||
</g>
|
||||
<path class="st1" d="M8.7,7.8l0.8-0.7c0.1,0.1,0.3,0.1,0.4,0.1c0.6,0,1-0.4,1-1s-0.4-1-1-1s-1,0.4-1,1c0,0.1,0,0.3,0.1,0.4L8.2,7.3
|
||||
L8.7,7.8z"/>
|
||||
<g>
|
||||
<path class="st2" d="M10.5,1.6c2.2,0,4,1.8,4,4"/>
|
||||
<path class="st2" d="M10.6,3.6c1.1,0,2,0.9,2,2"/>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.1 KiB |
@ -0,0 +1,30 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 24.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 16 16" style="enable-background:new 0 0 16 16;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FBEEDF;stroke:#E58E26;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
.st1{fill:#E58E26;}
|
||||
.st2{fill:none;stroke:#E58E26;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
</style>
|
||||
<g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st0" d="M12.9,5.7c2,2,2,5.1,0,7.1s-5.1,2-7.1,0C7.3,11.3,11.4,7.2,12.9,5.7z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st0" d="M11.4,4.2c2,2,2,5.1,0,7.1s-5.1,2-7.1,0C5.8,9.8,9.9,5.7,11.4,4.2z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path class="st1" d="M7.8,7.3L7.1,6.6c0.1-0.1,0.1-0.3,0.1-0.4c0-0.6-0.4-1-1-1s-1,0.4-1,1s0.4,1,1,1c0.1,0,0.3,0,0.4-0.1
|
||||
l0.7,0.7"/>
|
||||
<g>
|
||||
<path class="st2" d="M5.6,1.7c0,2.2-1.8,4-4,4"/>
|
||||
<path class="st2" d="M3.7,1.8c0,1.1-0.9,2-2,2"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.1 KiB |
@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 23.0.6, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 16 16" style="enable-background:new 0 0 16 16;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#DDE1F0;stroke:#4B5FAD;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
.st1{fill:#4B5FAD;}
|
||||
.st2{fill:none;stroke:#4B5FAD;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
</style>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st0" d="M11,12c-2,2-5.1,2-7.1,0s-2-5.1,0-7.1C5.4,6.4,9.5,10.5,11,12z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path class="st1" d="M7.9,8.6l0.7-0.7C8.7,8,8.9,8,9,8c0.6,0,1-0.4,1-1S9.6,6,9,6S8,6.4,8,7c0,0.1,0,0.3,0.1,0.4L7.4,8.1"/>
|
||||
<g>
|
||||
<path class="st2" d="M9.6,2.5c2.2,0,4,1.8,4,4"/>
|
||||
<path class="st2" d="M9.7,4.4c1.1,0,2,0.9,2,2"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 997 B |
@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 23.0.6, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 16 16" style="enable-background:new 0 0 16 16;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FBEEDF;stroke:#E58E26;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
.st1{fill:#E58E26;}
|
||||
.st2{fill:none;stroke:#E58E26;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
</style>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st0" d="M12.2,5c2,2,2,5.1,0,7.1s-5.1,2-7.1,0C6.6,10.6,10.7,6.5,12.2,5z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path class="st1" d="M8.6,8.1L7.9,7.4C8,7.3,8,7.1,8,7c0-0.6-0.4-1-1-1S6,6.4,6,7s0.4,1,1,1c0.1,0,0.3,0,0.4-0.1l0.7,0.7"/>
|
||||
<g>
|
||||
<path class="st2" d="M6.4,2.5c0,2.2-1.8,4-4,4"/>
|
||||
<path class="st2" d="M4.5,2.6c0,1.1-0.9,2-2,2"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 999 B |
@ -0,0 +1,252 @@
|
||||
/////////////////////////////////////////////////////////////
|
||||
//
|
||||
// pgAdmin 4 - PostgreSQL Tools
|
||||
//
|
||||
// Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
// This software is released under the PostgreSQL Licence
|
||||
//
|
||||
//////////////////////////////////////////////////////////////
|
||||
|
||||
define('pgadmin.node.publication', [
|
||||
'sources/gettext', 'sources/url_for', 'jquery', 'underscore',
|
||||
'sources/pgadmin', 'pgadmin.browser', 'pgadmin.backform',
|
||||
'pgadmin.browser.collection', 'pgadmin.browser.server.privilege',
|
||||
], function(gettext, url_for, $, _, pgAdmin, pgBrowser, Backform) {
|
||||
|
||||
// Extend the browser's collection class for publications collection
|
||||
if (!pgBrowser.Nodes['coll-publication']) {
|
||||
pgBrowser.Nodes['coll-publication'] =
|
||||
pgBrowser.Collection.extend({
|
||||
node: 'publication',
|
||||
label: gettext('Publications'),
|
||||
type: 'coll-publication',
|
||||
columns: ['name', 'pubowner', 'pubtable', 'all_table'],
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
// Extend the browser's node class for publication node
|
||||
if (!pgBrowser.Nodes['publication']) {
|
||||
pgBrowser.Nodes['publication'] = pgBrowser.Node.extend({
|
||||
parent_type: 'database',
|
||||
type: 'publication',
|
||||
sqlAlterHelp: 'sql-alterpublication.html',
|
||||
sqlCreateHelp: 'sql-createpublication.html',
|
||||
dialogHelp: url_for('help.static', {'filename': 'publication_dialog.html'}),
|
||||
label: gettext('Publication'),
|
||||
hasSQL: true,
|
||||
canDrop: true,
|
||||
canDropCascade: true,
|
||||
hasDepends: true,
|
||||
|
||||
Init: function() {
|
||||
|
||||
// Avoid multiple registration of menus
|
||||
if (this.initialized)
|
||||
return;
|
||||
|
||||
this.initialized = true;
|
||||
|
||||
|
||||
// Add context menus for publication
|
||||
pgBrowser.add_menus([{
|
||||
name: 'create_publication_on_database', node: 'database', module: this,
|
||||
applies: ['object', 'context'], callback: 'show_obj_properties',
|
||||
category: 'create', priority: 4, label: gettext('Publication...'),
|
||||
icon: 'wcTabIcon icon-publication', data: {action: 'create'},
|
||||
enable: pgBrowser.Nodes['database'].canCreate,
|
||||
},{
|
||||
name: 'create_publication_on_coll', node: 'coll-publication', module: this,
|
||||
applies: ['object', 'context'], callback: 'show_obj_properties',
|
||||
category: 'create', priority: 4, label: gettext('Publication...'),
|
||||
icon: 'wcTabIcon icon-publication', data: {action: 'create'},
|
||||
},{
|
||||
name: 'create_publication', node: 'publication', module: this,
|
||||
applies: ['object', 'context'], callback: 'show_obj_properties',
|
||||
category: 'create', priority: 4, label: gettext('Publication...'),
|
||||
icon: 'wcTabIcon icon-publication', data: {action: 'create'},
|
||||
}]);
|
||||
},
|
||||
// Define the model for publication node
|
||||
model: pgBrowser.Node.Model.extend({
|
||||
idAttribute: 'oid',
|
||||
defaults: {
|
||||
name: undefined,
|
||||
pubowner: undefined,
|
||||
pubtable: undefined,
|
||||
all_table: undefined,
|
||||
evnt_insert:true,
|
||||
evnt_delete:true,
|
||||
evnt_update:true,
|
||||
evnt_truncate:true,
|
||||
only_table: undefined,
|
||||
},
|
||||
|
||||
// Default values!
|
||||
initialize: function(attrs, args) {
|
||||
var isNew = (_.size(attrs) === 0);
|
||||
if (isNew) {
|
||||
var userInfo = pgBrowser.serverInfo[args.node_info.server._id].user;
|
||||
|
||||
this.set({'pubowner': userInfo.name}, {silent: true});
|
||||
}
|
||||
pgBrowser.Node.Model.prototype.initialize.apply(this, arguments);
|
||||
},
|
||||
|
||||
// Define the schema for the publication node
|
||||
schema: [{
|
||||
id: 'name', label: gettext('Name'), type: 'text',
|
||||
mode: ['properties', 'create', 'edit'],
|
||||
visible: function() {
|
||||
if(!_.isUndefined(this.node_info) && !_.isUndefined(this.node_info.server)
|
||||
&& !_.isUndefined(this.node_info.server.version) &&
|
||||
this.node_info.server.version >= 100000) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
},{
|
||||
id: 'oid', label: gettext('OID'), cell: 'string', mode: ['properties'],
|
||||
type: 'text',
|
||||
},{
|
||||
id: 'pubowner', label: gettext('Owner'), type: 'text',
|
||||
control: Backform.NodeListByNameControl, node: 'role',
|
||||
disabled: function(m){
|
||||
if(m.isNew())
|
||||
return true;
|
||||
return false;
|
||||
},
|
||||
mode: ['edit', 'properties', 'create'], select2: { allowClear: false},
|
||||
},{
|
||||
id: 'all_table', label: gettext('All tables?'), type: 'switch',
|
||||
group: gettext('Definition'), mode: ['edit', 'properties', 'create'], deps: ['name'],
|
||||
readonly: function(m) {return !m.isNew();},
|
||||
},
|
||||
{
|
||||
id: 'only_table', label: gettext('Only table?'), type: 'switch',
|
||||
group: gettext('Definition'), mode: ['edit', 'create'],
|
||||
deps: ['name', 'pubtable', 'all_table'], readonly: 'isTable',
|
||||
helpMessage: gettext('If ONLY is specified before the table name, only that table is added to the publication. If ONLY is not specified, the table and all its descendant tables (if any) are added.'),
|
||||
},
|
||||
{
|
||||
id: 'pubtable', label: gettext('Tables'), type: 'array',
|
||||
select2: { allowClear: true, multiple: true },
|
||||
control: 'node-ajax-options', url:'get_tables',
|
||||
group: gettext('Definition'), mode: ['edit', 'create'],
|
||||
deps: ['all_table'], disabled: 'isAllTable',
|
||||
},
|
||||
{
|
||||
id: 'pubtable', label: gettext('Tables'), type: 'text', group: gettext('Definition'),
|
||||
mode: ['properties'],
|
||||
},
|
||||
{
|
||||
type: 'nested', control: 'fieldset', mode: ['create','edit', 'properties'],
|
||||
label: gettext('With'), group: gettext('Definition'), contentClass: 'row',
|
||||
schema:[{
|
||||
id: 'evnt_insert', label: gettext('INSERT'),
|
||||
type: 'switch', mode: ['create','edit', 'properties'],
|
||||
group: gettext('With'),
|
||||
extraToggleClasses: 'pg-el-sm-6',
|
||||
controlLabelClassName: 'control-label pg-el-sm-5 pg-el-12',
|
||||
controlsClassName: 'pgadmin-controls pg-el-sm-7 pg-el-12',
|
||||
},{
|
||||
id: 'evnt_update', label: gettext('UPDATE'),
|
||||
type: 'switch', mode: ['create','edit', 'properties'],
|
||||
group: gettext('With'),
|
||||
extraToggleClasses: 'pg-el-sm-6',
|
||||
controlLabelClassName: 'control-label pg-el-sm-5 pg-el-12',
|
||||
controlsClassName: 'pgadmin-controls pg-el-sm-7 pg-el-12',
|
||||
},{
|
||||
id: 'evnt_delete', label: gettext('DELETE'),
|
||||
type: 'switch', mode: ['create','edit', 'properties'],
|
||||
group: gettext('With'),
|
||||
extraToggleClasses: 'pg-el-sm-6',
|
||||
controlLabelClassName: 'control-label pg-el-sm-5 pg-el-12',
|
||||
controlsClassName: 'pgadmin-controls pg-el-sm-7 pg-el-12',
|
||||
},{
|
||||
id: 'evnt_truncate', label: gettext('TRUNCATE'),
|
||||
type: 'switch', group: gettext('With'),
|
||||
extraToggleClasses: 'pg-el-sm-6',
|
||||
controlLabelClassName: 'control-label pg-el-sm-5 pg-el-12',
|
||||
controlsClassName: 'pgadmin-controls pg-el-sm-7 pg-el-12',
|
||||
visible: function(m) {
|
||||
if(!_.isUndefined(m.node_info) && !_.isUndefined(m.node_info.server)
|
||||
&& !_.isUndefined(m.node_info.server.version) &&
|
||||
m.node_info.server.version >= 110000)
|
||||
return true;
|
||||
return false;
|
||||
},
|
||||
|
||||
}],
|
||||
},
|
||||
],
|
||||
|
||||
isAllTable: function(m) {
|
||||
var all_table = m.get('all_table');
|
||||
if(all_table){
|
||||
setTimeout( function() {
|
||||
m.set('pubtable', '');
|
||||
}, 10);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
isTable: function(m) {
|
||||
var all_table = m.get('all_table'),
|
||||
table = m.get('pubtable');
|
||||
if(all_table){
|
||||
setTimeout( function() {
|
||||
m.set('only_table', false);
|
||||
}, 10);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!_.isUndefined(table) && table.length > 0 && m._changing && !_.isEqual(m.origSessAttrs['pubtable'], m.changed['pubtable']) && 'pubtable' in m.changed){
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
|
||||
/* validate function is used to validate the input given by
|
||||
* the user. In case of error, message will be displayed on
|
||||
* the GUI for the respective control.
|
||||
*/
|
||||
|
||||
sessChanged: function() {
|
||||
if (this.sessAttrs['pubtable'] == '' && this.origSessAttrs['pubtable'] == '')
|
||||
return false;
|
||||
return pgBrowser.DataModel.prototype.sessChanged.apply(this);
|
||||
},
|
||||
|
||||
validate: function() {
|
||||
var name = this.get('name'),
|
||||
msg;
|
||||
|
||||
if (_.isUndefined(name) || _.isNull(name) ||
|
||||
String(name).replace(/^\s+|\s+$/g, '') == '') {
|
||||
msg = gettext('Name cannot be empty.');
|
||||
this.errorModel.set('name', msg);
|
||||
return msg;
|
||||
} else {
|
||||
this.errorModel.unset('name');
|
||||
}
|
||||
return null;
|
||||
},
|
||||
canCreate: function(itemData, item) {
|
||||
|
||||
var treeData = this.getTreeNodeHierarchy(item),
|
||||
server = treeData['server'];
|
||||
|
||||
// If server is less than 10 then do not allow 'create' menu
|
||||
if (server && server.version < 100000)
|
||||
return false;
|
||||
|
||||
// by default we want to allow create menu
|
||||
return true;
|
||||
},
|
||||
|
||||
}),
|
||||
});
|
||||
}
|
||||
return pgBrowser.Nodes['coll-publication'];
|
||||
});
|
@ -0,0 +1,20 @@
|
||||
{% if data.evnt_delete or data.evnt_update or data.evnt_truncate %}
|
||||
{% set add_comma_after_insert = 'insert' %}
|
||||
{% endif %}
|
||||
{% if data.evnt_truncate %}
|
||||
{% set add_comma_after_delete = 'delete' %}
|
||||
{% endif %}
|
||||
{% if data.evnt_delete or data.evnt_truncate%}
|
||||
{% set add_comma_after_update = 'update' %}
|
||||
{% endif %}
|
||||
{### Create PUBLICATION ###}
|
||||
CREATE PUBLICATION {{ conn|qtIdent(data.name) }}
|
||||
{% if data.all_table %}
|
||||
FOR ALL TABLES
|
||||
{% elif data.pubtable %}
|
||||
FOR TABLE {% if data.only_table%}ONLY {% endif %}{% for pub_table in data.pubtable %}{% if loop.index != 1 %}, {% endif %}{{ pub_table }}{% endfor %}
|
||||
|
||||
{% endif %}
|
||||
{% if data.evnt_insert or data.evnt_update or data.evnt_delete or data.evnt_truncate %}
|
||||
WITH (publish = '{% if data.evnt_insert %}insert{% if add_comma_after_insert == 'insert' %}, {% endif %}{% endif %}{% if data.evnt_update %}update{% if add_comma_after_update == 'update' %}, {% endif %}{% endif %}{% if data.evnt_delete %}delete{% if add_comma_after_delete == 'delete' %}, {% endif %}{% endif %}{% if data.evnt_truncate %}truncate{% endif %}');
|
||||
{% endif %}
|
@ -0,0 +1,8 @@
|
||||
SELECT c.oid AS oid, c.pubname AS name,
|
||||
pubinsert AS evnt_insert, pubupdate AS evnt_update, pubdelete AS evnt_delete, pubtruncate AS evnt_truncate,
|
||||
puballtables AS all_table,
|
||||
pga.rolname AS pubowner FROM pg_publication c
|
||||
JOIN pg_authid pga ON c.pubowner= pga.oid
|
||||
{% if pbid %}
|
||||
WHERE c.oid = {{ pbid }}
|
||||
{% endif %}
|
@ -0,0 +1,16 @@
|
||||
{% if data.evnt_delete or data.evnt_update or data.evnt_truncate %}
|
||||
{% set add_comma_after_insert = 'insert' %}
|
||||
{% endif %}
|
||||
{% if data.evnt_delete or data.evnt_truncate%}
|
||||
{% set add_comma_after_update = 'update' %}
|
||||
{% endif %}
|
||||
CREATE PUBLICATION {{ conn|qtIdent(data.name) }}
|
||||
{% if data.all_table %}
|
||||
FOR ALL TABLES
|
||||
{% elif data.pubtable %}
|
||||
FOR TABLE {% if data.only_table%}ONLY {% endif %}{% for pub_table in data.pubtable %}{% if loop.index != 1 %}, {% endif %}{{pub_table}}{% endfor %}
|
||||
|
||||
{% endif %}
|
||||
{% if data.evnt_insert or data.evnt_update or data.evnt_delete or data.evnt_truncate %}
|
||||
WITH (publish = '{% if data.evnt_insert %}insert{% if add_comma_after_insert == 'insert' %}, {% endif %}{% endif %}{% if data.evnt_update %}update{% if add_comma_after_update == 'update' %}, {% endif %}{% endif %}{% if data.evnt_delete %}delete{% endif %}');
|
||||
{% endif %}
|
@ -0,0 +1,8 @@
|
||||
{# ============= Get the publication name using oid ============= #}
|
||||
{% if pbid %}
|
||||
SELECT pubname FROM pg_publication WHERE oid = {{pbid}}::oid;
|
||||
{% endif %}
|
||||
{# ============= Drop the publication ============= #}
|
||||
{% if pname %}
|
||||
DROP PUBLICATION {{ conn|qtIdent(pname) }}{% if cascade %} CASCADE{% endif%};
|
||||
{% endif %}
|
@ -0,0 +1,6 @@
|
||||
SELECT quote_ident(c.table_schema)||'.'||quote_ident(c.table_name) AS table
|
||||
FROM information_schema.tables c
|
||||
where c.table_type = 'BASE TABLE'
|
||||
AND c.table_schema NOT LIKE 'pg\_%'
|
||||
AND c.table_schema NOT LIKE 'pgagent'
|
||||
AND c.table_schema NOT IN ('information_schema') ORDER BY 1;
|
@ -0,0 +1 @@
|
||||
SELECT oid, pubname AS name FROM pg_publication where pubname = '{{ pubname }}';
|
@ -0,0 +1 @@
|
||||
SELECT quote_ident(pgb_table.schemaname)||'.'||quote_ident(pgb_table.tablename) AS pubtable FROM pg_publication_tables pgb_table where pubname = '{{ pname }}' and pgb_table.schemaname NOT LIKE 'pgagent';
|
@ -0,0 +1 @@
|
||||
SELECT oid , pubname AS name FROM pg_publication;
|
@ -0,0 +1,8 @@
|
||||
SELECT c.oid AS oid, c.pubname AS name,
|
||||
pubinsert AS evnt_insert, pubupdate AS evnt_update, pubdelete AS evnt_delete,
|
||||
puballtables AS all_table,
|
||||
pga.rolname AS pubowner FROM pg_publication c
|
||||
JOIN pg_authid pga ON c.pubowner= pga.oid
|
||||
{% if pbid %}
|
||||
where c.oid = {{ pbid }}
|
||||
{% endif %}
|
@ -0,0 +1,42 @@
|
||||
{% if data.evnt_delete or data.evnt_update or data.evnt_truncate %}
|
||||
{% set add_comma_after_insert = 'insert' %}
|
||||
{% endif %}
|
||||
{% if data.evnt_truncate %}
|
||||
{% set add_comma_after_delete = 'delete' %}
|
||||
{% endif %}
|
||||
{% if data.evnt_delete or data.evnt_truncate%}
|
||||
{% set add_comma_after_update = 'update' %}
|
||||
{% endif %}
|
||||
{### Alter publication owner ###}
|
||||
{% if data.pubowner %}
|
||||
ALTER PUBLICATION {{ conn|qtIdent(o_data.name) }}
|
||||
OWNER TO {{ data.pubowner }};
|
||||
|
||||
{% endif %}
|
||||
{### Alter publication event ###}
|
||||
{% if (data.evnt_insert is defined and data.evnt_insert != o_data.evnt_insert) or (data.evnt_update is defined and data.evnt_update != o_data.evnt_update) or (data.evnt_delete is defined and data.evnt_delete != o_data.evnt_delete) or (data.evnt_truncate is defined and data.evnt_truncate != o_data.evnt_truncate) %}
|
||||
ALTER PUBLICATION {{ conn|qtIdent(o_data.name) }} SET
|
||||
(publish = '{% if data.evnt_insert %}insert{% if add_comma_after_insert == 'insert' %}, {% endif %}{% endif %}{% if data.evnt_update %}update{% if add_comma_after_update == 'update' %}, {% endif %}{% endif %}{% if data.evnt_delete %}delete{% if add_comma_after_delete == 'delete' %}, {% endif %}{% endif %}{% if data.evnt_truncate %}truncate{% endif %}');
|
||||
|
||||
{% endif %}
|
||||
{### Alter drop publication table ###}
|
||||
{% if drop_table %}
|
||||
ALTER PUBLICATION {{ conn|qtIdent(o_data.name) }}
|
||||
DROP TABLE {% if data.only_table%}ONLY {% endif %}{% for pub_table in drop_table_data %}{% if loop.index != 1 %}, {% endif %}{{ pub_table }}{% endfor %};
|
||||
|
||||
{% endif %}
|
||||
{### Alter publication table ###}
|
||||
{% if add_table %}
|
||||
ALTER PUBLICATION {{ conn|qtIdent(o_data.name) }}
|
||||
ADD TABLE {% if data.only_table%}ONLY {% endif %}{% for pub_table in add_table_data %}{% if loop.index != 1 %}, {% endif %}{{ pub_table }}{% endfor %};
|
||||
|
||||
{% endif %}
|
||||
{### Alter publication name ###}
|
||||
{% if data.name != o_data.name %}
|
||||
ALTER PUBLICATION {{ conn|qtIdent(o_data.name) }}
|
||||
RENAME TO {{ conn|qtIdent(data.name) }};
|
||||
{% endif %}
|
||||
|
||||
|
||||
|
||||
|
@ -0,0 +1,7 @@
|
||||
-- Publication: alterd_publication
|
||||
|
||||
-- DROP PUBLICATION alterd_publication;
|
||||
|
||||
CREATE PUBLICATION alterd_publication
|
||||
FOR ALL TABLES
|
||||
WITH (publish = 'insert, update');
|
@ -0,0 +1,7 @@
|
||||
-- Publication: alterd_publication
|
||||
|
||||
-- DROP PUBLICATION alterd_publication;
|
||||
|
||||
CREATE PUBLICATION alterd_publication
|
||||
FOR ALL TABLES
|
||||
WITH (publish = 'insert, update, delete');
|
@ -0,0 +1,2 @@
|
||||
ALTER PUBLICATION alterd_publication_event SET
|
||||
(publish = 'insert, update');
|
@ -0,0 +1 @@
|
||||
ALTER PUBLICATION test_publication_to_alter RENAME TO alterd_publication;
|
@ -0,0 +1,7 @@
|
||||
-- Publication: test_publication_create
|
||||
|
||||
-- DROP PUBLICATION test_publication_create;
|
||||
|
||||
CREATE PUBLICATION test_publication_create
|
||||
FOR ALL TABLES
|
||||
WITH (publish = 'insert, update');
|
@ -0,0 +1,3 @@
|
||||
CREATE PUBLICATION test_publication_create
|
||||
FOR ALL TABLES
|
||||
WITH (publish = 'insert, update');
|
@ -0,0 +1,7 @@
|
||||
-- Publication: test_publication_with_update
|
||||
|
||||
-- DROP PUBLICATION test_publication_with_update;
|
||||
|
||||
CREATE PUBLICATION test_publication_with_update
|
||||
FOR ALL TABLES
|
||||
WITH (publish = 'update');
|
@ -0,0 +1,3 @@
|
||||
CREATE PUBLICATION test_publication_with_update
|
||||
FOR ALL TABLES
|
||||
WITH (publish = 'update');
|
@ -0,0 +1,79 @@
|
||||
{
|
||||
"scenarios": [
|
||||
{
|
||||
"type": "create",
|
||||
"name": "Create Table For publication",
|
||||
"endpoint": "NODE-table.obj",
|
||||
"sql_endpoint": "NODE-table.sql_id",
|
||||
"data": {
|
||||
"name": "test_publication",
|
||||
"columns": [
|
||||
{
|
||||
"name": "emp_id",
|
||||
"cltype": "integer",
|
||||
"is_primary_key": true
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"cltype": "text"
|
||||
},
|
||||
{
|
||||
"name": "salary",
|
||||
"cltype": "bigint"
|
||||
}
|
||||
],
|
||||
"is_partitioned": false,
|
||||
"schema": "public",
|
||||
"spcname": "pg_default"
|
||||
},
|
||||
"store_object_id": true
|
||||
},
|
||||
{
|
||||
"type": "create",
|
||||
"name": "Create Publication",
|
||||
"endpoint": "NODE-publication.obj",
|
||||
"sql_endpoint": "NODE-publication.sql_id",
|
||||
"msql_endpoint": "NODE-publication.msql",
|
||||
"data": {
|
||||
"name": "test_publication_create",
|
||||
"evnt_insert": true,
|
||||
"evnt_update": true,
|
||||
"evnt_delete": false,
|
||||
"evnt_truncate": false,
|
||||
"pubowner": "postgres",
|
||||
"all_table": true,
|
||||
"pubtable": ""
|
||||
},
|
||||
"expected_sql_file": "create_publication.sql",
|
||||
"expected_msql_file": "create_publication_msql.sql"
|
||||
},
|
||||
{
|
||||
"type": "alter",
|
||||
"name": "Alter Publication name",
|
||||
"endpoint": "NODE-publication.obj_id",
|
||||
"sql_endpoint": "NODE-publication.sql_id",
|
||||
"data": {
|
||||
"name": "alterd_publication"
|
||||
},
|
||||
"expected_sql_file": "alter_publication.sql"
|
||||
},
|
||||
{
|
||||
"type": "alter",
|
||||
"name": "Alter Publication event",
|
||||
"endpoint": "NODE-publication.obj_id",
|
||||
"sql_endpoint": "NODE-publication.sql_id",
|
||||
"data": {
|
||||
"evnt_delete": true
|
||||
},
|
||||
"expected_sql_file": "alter_publication_event.sql"
|
||||
},
|
||||
{
|
||||
"type": "delete",
|
||||
"name": "Drop publication",
|
||||
"endpoint": "NODE-publication.delete_id",
|
||||
"data": {
|
||||
"name": "alterd_publication_event"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
|
||||
|
||||
class RulesTestGenerator(BaseTestGenerator):
|
||||
|
||||
def runTest(self):
|
||||
return []
|
@ -0,0 +1,397 @@
|
||||
{
|
||||
"add_publication": [
|
||||
{
|
||||
"name": "Create publication with insert and update",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": true,
|
||||
"test_data": {
|
||||
"name": "PLACEHOLDER",
|
||||
"evnt_insert": true,
|
||||
"evnt_update": true,
|
||||
"evnt_delete": false,
|
||||
"evnt_truncate": false,
|
||||
"pubowner": "postgres",
|
||||
"all_table": true,
|
||||
"pubtable": ""
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create publication for few tables",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": true,
|
||||
"few_tables": true,
|
||||
"test_data": {
|
||||
"name": "PLACEHOLDER",
|
||||
"evnt_insert": true,
|
||||
"evnt_update": true,
|
||||
"evnt_delete": false,
|
||||
"evnt_truncate": false,
|
||||
"pubowner": "postgres",
|
||||
"all_table": false,
|
||||
"pubtable": "PLACE_HOLDER"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create a publication without name",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": false,
|
||||
"without_name": true,
|
||||
"test_data": {
|
||||
"name": "PLACEHOLDER",
|
||||
"evnt_insert": true,
|
||||
"evnt_update": true,
|
||||
"evnt_delete": false,
|
||||
"evnt_truncate": false,
|
||||
"pubowner": "postgres",
|
||||
"all_table": true,
|
||||
"pubtable": ""
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while adding a publication",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": false,
|
||||
"error_creating_publication": true,
|
||||
"test_data": {
|
||||
"name": "PLACEHOLDER",
|
||||
"evnt_insert": true,
|
||||
"evnt_update": true,
|
||||
"evnt_delete": false,
|
||||
"evnt_truncate": false,
|
||||
"pubowner": "postgres",
|
||||
"all_table": true,
|
||||
"pubtable": ""
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False, 'Mocked Internal Server Error ')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Exception while adding a publication",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": false,
|
||||
"test_data": {
|
||||
"name": "PLACEHOLDER",
|
||||
"evnt_insert": true,
|
||||
"evnt_update": true,
|
||||
"evnt_delete": false,
|
||||
"evnt_truncate": false,
|
||||
"pubowner": "postgres",
|
||||
"all_table": true,
|
||||
"pubtable": ""
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(True, True)(False, 'Mocked Internal Server Error ')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
}
|
||||
],
|
||||
"get_publication": [
|
||||
{
|
||||
"name": "Get a publication URL",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while fetching a publication properties",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": false,
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get a publication properties under database nodes",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": true,
|
||||
"database_nodes": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while fetching a publication properties under database nodes",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": false,
|
||||
"database_nodes": true,
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get a publication Node",
|
||||
"url": "/browser/publication/nodes/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get a publication Node dependants",
|
||||
"url": "/browser/publication/dependent/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get a publication Node dependency",
|
||||
"url": "/browser/publication/dependency/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while fetching the publication under the database nodes using wrong database id",
|
||||
"url": "/browser/publication/nodes/",
|
||||
"is_positive_test": false,
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get all the publication under the database nodes",
|
||||
"url": "/browser/publication/nodes/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"database_nodes": true,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get all the publication under the database nodes using wrong database id",
|
||||
"url": "/browser/publication/nodes/",
|
||||
"is_positive_test": true,
|
||||
"incorrect_database_id": true,
|
||||
"database_nodes": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while fetching all the publication under the database nodes using wrong database id",
|
||||
"url": "/browser/publication/nodes/",
|
||||
"is_positive_test": false,
|
||||
"database_nodes": true,
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
}
|
||||
],
|
||||
"delete_publication": [
|
||||
{
|
||||
"name": "Delete a publication URL",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while deleting the publication",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": false,
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
}
|
||||
],
|
||||
"update_publication": [
|
||||
{
|
||||
"name": "update a publication name",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": true,
|
||||
"update_name": true,
|
||||
"test_data": {
|
||||
"name": "PLACE_HOLDER",
|
||||
"id": "PLACE_HOLDER"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "update a publication event insert",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": true,
|
||||
"owner_publication": true,
|
||||
"test_data": {
|
||||
"id": "PLACE_HOLDER",
|
||||
"evnt_insert": "PLACEHOLDER"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "update a publication event delete",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": true,
|
||||
"owner_publication": true,
|
||||
"test_data": {
|
||||
"id": "PLACE_HOLDER",
|
||||
"evnt_delete": "PLACEHOLDER"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while fetching a publication to update",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": false,
|
||||
"mocking_required": true,
|
||||
"test_data": {
|
||||
"name": "PLACE_HOLDER",
|
||||
"id": "PLACE_HOLDER"
|
||||
},
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while fetching a publication to update using wrong publication id",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": true,
|
||||
"wrong_publication_id": true,
|
||||
"mocking_required": false,
|
||||
"test_data": {
|
||||
"id": "PLACE_HOLDER"
|
||||
},
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
}
|
||||
],
|
||||
"delete_multiple_publication": [
|
||||
{
|
||||
"name": "Delete multiple publication",
|
||||
"url": "/browser/publication/obj/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
}
|
||||
],
|
||||
"sql_publication": [
|
||||
{
|
||||
"name": "Fetch the publication SQL",
|
||||
"url": "/browser/publication/sql/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while fetching a publication SQL",
|
||||
"url": "/browser/publication/sql/",
|
||||
"is_positive_test": false,
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Fetching a publication mSQL",
|
||||
"url": "/browser/publication/msql/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
@ -0,0 +1,95 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as publication_utils
|
||||
|
||||
|
||||
class PublicationsAddTestCase(BaseTestGenerator):
|
||||
"""This class will add new publication"""
|
||||
scenarios = utils.generate_scenarios('add_publication',
|
||||
publication_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
|
||||
self.server_version = schema_info["server_version"]
|
||||
if self.server_version < 99999:
|
||||
self.skipTest(
|
||||
"Logical replication is not supported "
|
||||
"for server version less than 10"
|
||||
|
||||
)
|
||||
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to add a publication.")
|
||||
|
||||
if self.is_positive_test and hasattr(self, 'few_tables'):
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils. \
|
||||
create_table(self.server, self.db_name, self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
self.test_data['pubtable'] = publication_utils.get_tables(self)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will publication."""
|
||||
self.test_data['name'] = \
|
||||
"test_publication_add_%s" % (str(uuid.uuid4())[1:8])
|
||||
|
||||
data = self.test_data
|
||||
if self.is_positive_test:
|
||||
response = self.create_publication()
|
||||
else:
|
||||
if hasattr(self, 'without_name'):
|
||||
del data["name"]
|
||||
response = self.create_publication()
|
||||
elif hasattr(self, 'error_creating_publication'):
|
||||
with patch(self.mock_data["function_name"],
|
||||
return_value=eval(self.mock_data["return_value"])):
|
||||
response = self.create_publication()
|
||||
else:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=self.mock_data["return_value"]):
|
||||
response = self.create_publication()
|
||||
self.assertEqual(response.status_code,
|
||||
self.expected_data["status_code"])
|
||||
|
||||
def create_publication(self):
|
||||
return self.tester.post(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(
|
||||
self.db_id) + '/',
|
||||
data=json.dumps(self.test_data),
|
||||
content_type='html/json')
|
||||
|
||||
def tearDown(self):
|
||||
if not hasattr(self, 'without_name'):
|
||||
publication_utils.delete_publication(self.server, self.db_name,
|
||||
self.test_data['name'])
|
||||
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,94 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as publication_utils
|
||||
|
||||
|
||||
class PublicationDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will delete publication."""
|
||||
scenarios = utils.generate_scenarios('delete_publication',
|
||||
publication_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
self.server_version = schema_info["server_version"]
|
||||
if self.server_version < 99999:
|
||||
self.skipTest(
|
||||
"Logical replication is not supported "
|
||||
"for server version less than 10"
|
||||
|
||||
)
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to delete publication.")
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to delete publication.")
|
||||
self.publication_name = "test_publication_delete_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
|
||||
self.publication_id = \
|
||||
publication_utils.create_publication(self.server,
|
||||
self.db_name,
|
||||
self.publication_name)
|
||||
|
||||
def delete_publication(self):
|
||||
return self.tester.delete(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(self.db_id) +
|
||||
'/' + str(self.publication_id),
|
||||
follow_redirects=True)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete publication"""
|
||||
publication_response = publication_utils. \
|
||||
verify_publication(self.server,
|
||||
self.db_name,
|
||||
self.publication_name)
|
||||
if not publication_response:
|
||||
raise Exception("Could not find the publication to delete.")
|
||||
|
||||
if self.is_positive_test:
|
||||
if hasattr(self, "invalid_publication_id"):
|
||||
self.publication_id = 9999
|
||||
response = self.delete_publication()
|
||||
else:
|
||||
with patch(self.mock_data["function_name"],
|
||||
return_value=eval(self.mock_data["return_value"])):
|
||||
response = self.delete_publication()
|
||||
|
||||
self.assertEqual(response.status_code,
|
||||
self.expected_data["status_code"])
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
if not self.is_positive_test or hasattr(self,
|
||||
'invalid_publication_id'):
|
||||
publication_utils.delete_publication(self.server, self.db_name,
|
||||
self.publication_name)
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,103 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
import json
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as publication_utils
|
||||
|
||||
|
||||
class PublicationDeleteTestCases(BaseTestGenerator):
|
||||
"""This class will delete publication."""
|
||||
|
||||
scenarios = utils.generate_scenarios('delete_multiple_publication',
|
||||
publication_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to delete publication.")
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
self.server_version = schema_info["server_version"]
|
||||
if self.server_version < 99999:
|
||||
self.skipTest(
|
||||
"Logical replication is not supported "
|
||||
"for server version less than 10"
|
||||
|
||||
)
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to delete publication.")
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
self.publication_name = "test_publication_delete_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.publication_name_1 = "test_publication_delete_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.publication_ids = [
|
||||
publication_utils.create_publication(self.server, self.db_name,
|
||||
self.publication_name),
|
||||
publication_utils.create_publication(self.server, self.db_name,
|
||||
self.publication_name_1),
|
||||
]
|
||||
|
||||
def delete_multiple_publication(self, data):
|
||||
return self.tester.delete(
|
||||
"{0}{1}/{2}/{3}/".format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id
|
||||
),
|
||||
follow_redirects=True,
|
||||
data=json.dumps(data),
|
||||
content_type='html/json'
|
||||
)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete publication."""
|
||||
rule_response = publication_utils. \
|
||||
verify_publication(self.server,
|
||||
self.db_name,
|
||||
self.publication_name)
|
||||
if not rule_response:
|
||||
raise Exception("Could not find the publication to delete.")
|
||||
|
||||
rule_response = publication_utils. \
|
||||
verify_publication(self.server,
|
||||
self.db_name,
|
||||
self.publication_name_1)
|
||||
if not rule_response:
|
||||
raise Exception("Could not find the publication to delete.")
|
||||
|
||||
data = {'ids': self.publication_ids}
|
||||
response = self.delete_multiple_publication(data)
|
||||
self.assertEqual(response.status_code,
|
||||
self.expected_data["status_code"])
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,100 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as publication_utils
|
||||
|
||||
|
||||
class PublicationGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the publication under table node."""
|
||||
scenarios = utils.generate_scenarios('get_publication',
|
||||
publication_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
self.server_version = schema_info["server_version"]
|
||||
if self.server_version < 99999:
|
||||
self.skipTest(
|
||||
"Logical replication is not supported "
|
||||
"for server version less than 10"
|
||||
|
||||
)
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to delete publication.")
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to delete publication.")
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
self.publication_name = "test_publication_get_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.publication_id = publication_utils. \
|
||||
create_publication(self.server,
|
||||
self.db_name,
|
||||
self.publication_name)
|
||||
|
||||
def get_publication(self):
|
||||
return self.tester.get(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' + str(
|
||||
self.server_id) + '/' +
|
||||
str(self.db_id) + '/' + str(self.publication_id),
|
||||
content_type='html/json')
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the publication."""
|
||||
|
||||
if self.is_positive_test:
|
||||
if hasattr(self, "database_nodes"):
|
||||
self.publication_id = ''
|
||||
response = self.get_publication()
|
||||
else:
|
||||
response = self.get_publication()
|
||||
else:
|
||||
if hasattr(self, "database_nodes"):
|
||||
self.publication_id = ''
|
||||
with patch(self.mock_data["function_name"],
|
||||
return_value=eval(self.mock_data["return_value"])):
|
||||
response = self.get_publication()
|
||||
else:
|
||||
with patch(self.mock_data["function_name"],
|
||||
return_value=eval(self.mock_data["return_value"])):
|
||||
response = self.get_publication()
|
||||
|
||||
self.assertEqual(response.status_code,
|
||||
self.expected_data["status_code"])
|
||||
|
||||
def tearDown(self):
|
||||
publication_utils.delete_publication(self.server, self.db_name,
|
||||
self.publication_name)
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,127 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as publication_utils
|
||||
|
||||
|
||||
class PublicationUpdateTestCase(BaseTestGenerator):
|
||||
"""This class will update the publication."""
|
||||
scenarios = utils.generate_scenarios('update_publication',
|
||||
publication_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
self.server_version = schema_info["server_version"]
|
||||
if self.server_version < 99999:
|
||||
self.skipTest(
|
||||
"Logical replication is not supported "
|
||||
"for server version less than 10"
|
||||
|
||||
)
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to delete publication.")
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to delete publication.")
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
|
||||
self.server_version = schema_info["server_version"]
|
||||
if self.server_version < 99999:
|
||||
self.skipTest(
|
||||
"Logical replication is not supported "
|
||||
"for server version less than 10"
|
||||
|
||||
)
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
self.publication_name = "test_publication_update_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.publication_id = \
|
||||
publication_utils.create_publication(self.server, self.db_name,
|
||||
self.publication_name)
|
||||
|
||||
def update_publication(self, data):
|
||||
return self.tester.put(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(
|
||||
self.db_id) +
|
||||
'/' + str(self.publication_id),
|
||||
data=json.dumps(data),
|
||||
follow_redirects=True)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will update the publication."""
|
||||
|
||||
publication_name = publication_utils. \
|
||||
verify_publication(self.server,
|
||||
self.db_name,
|
||||
self.publication_name)
|
||||
if hasattr(self, "update_name"):
|
||||
self.test_data['name'] = "test_publication_update_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
else:
|
||||
self.test_data['name'] = self.publication_name
|
||||
self.test_data['id'] = self.publication_id
|
||||
|
||||
if not publication_name:
|
||||
raise Exception("Could not find the publication to update.")
|
||||
|
||||
if self.is_positive_test:
|
||||
if hasattr(self, "wrong_publication_id"):
|
||||
self.publication_id = 9999
|
||||
if hasattr(self, "plid_none"):
|
||||
self.publication_id = ''
|
||||
response = self.update_publication(self.test_data)
|
||||
else:
|
||||
with patch(self.mock_data["function_name"],
|
||||
return_value=eval(self.mock_data["return_value"])):
|
||||
if hasattr(self, "wrong_publication_id"):
|
||||
self.publication_id = 9999
|
||||
response = self.update_publication(self.test_data)
|
||||
|
||||
self.assertEqual(response.status_code,
|
||||
self.expected_data["status_code"])
|
||||
|
||||
def tearDown(self):
|
||||
connection = utils.get_db_connection(self.server['db'],
|
||||
self.server['username'],
|
||||
self.server['db_password'],
|
||||
self.server['host'],
|
||||
self.server['port'],
|
||||
self.server['sslmode'])
|
||||
|
||||
publication_utils.delete_publication(self.server, self.db_name,
|
||||
self.test_data['name'])
|
||||
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,88 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
import json
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as publication_utils
|
||||
|
||||
|
||||
class PublicationGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the publication under table node."""
|
||||
scenarios = utils.generate_scenarios('sql_publication',
|
||||
publication_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
self.server_version = schema_info["server_version"]
|
||||
if self.server_version < 99999:
|
||||
self.skipTest(
|
||||
"Logical replication is not supported "
|
||||
"for server version less than 10"
|
||||
|
||||
)
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to delete publication.")
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to delete publication.")
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
self.publication_name = "test_publication_delete_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.publication_id = \
|
||||
publication_utils.create_publication(self.server, self.db_name,
|
||||
self.publication_name)
|
||||
|
||||
def get_sql(self):
|
||||
return self.tester.get(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' + str(
|
||||
self.server_id) + '/' +
|
||||
str(self.db_id) + '/' + str(self.publication_id),
|
||||
content_type='html/json')
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the publication under table node."""
|
||||
|
||||
if self.is_positive_test:
|
||||
response = self.get_sql()
|
||||
else:
|
||||
with patch(self.mock_data["function_name"],
|
||||
return_value=eval(self.mock_data["return_value"])):
|
||||
response = self.get_sql()
|
||||
|
||||
self.assertEqual(response.status_code,
|
||||
self.expected_data["status_code"])
|
||||
|
||||
def tearDown(self):
|
||||
publication_utils.delete_publication(self.server, self.db_name,
|
||||
self.publication_name)
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,152 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import traceback
|
||||
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
||||
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
with open(CURRENT_PATH + "/publication_test_data.json") as data_file:
|
||||
test_cases = json.load(data_file)
|
||||
|
||||
|
||||
def get_tables(self):
|
||||
tables = self.tester.get(
|
||||
'/browser/publication/get_tables/' + str(
|
||||
utils.SERVER_GROUP) + '/' + str(
|
||||
self.server_id) + '/' +
|
||||
str(self.db_id) + '/',
|
||||
content_type='html/json')
|
||||
return json.dumps([tables.json['data'][1]['value']])
|
||||
|
||||
|
||||
def create_publication_api(self):
|
||||
return self.tester.post(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(
|
||||
self.db_id) + '/',
|
||||
data=json.dumps(self.test_data),
|
||||
content_type='html/json')
|
||||
|
||||
|
||||
def create_publication(server, db_name, publication_name):
|
||||
"""
|
||||
This function creates a publication under provided table.
|
||||
:param server: server details
|
||||
:type server: dict
|
||||
:param db_name: database name
|
||||
:type db_name: str
|
||||
:param publication_name: publication name
|
||||
:type publication_name: str
|
||||
:return publication_id: publication id
|
||||
:rtype: int
|
||||
"""
|
||||
try:
|
||||
connection = utils.get_db_connection(db_name,
|
||||
server['username'],
|
||||
server['db_password'],
|
||||
server['host'],
|
||||
server['port'],
|
||||
server['sslmode'])
|
||||
old_isolation_level = connection.isolation_level
|
||||
connection.set_isolation_level(0)
|
||||
pg_cursor = connection.cursor()
|
||||
query = "CREATE publication %s FOR ALL TABLES" % \
|
||||
(publication_name)
|
||||
pg_cursor.execute(query)
|
||||
connection.set_isolation_level(old_isolation_level)
|
||||
connection.commit()
|
||||
# Get role oid of newly added publication
|
||||
pg_cursor.execute("select oid from pg_publication pub where "
|
||||
"pub.pubname='%s'" %
|
||||
publication_name)
|
||||
publication = pg_cursor.fetchone()
|
||||
publication_id = ''
|
||||
if publication:
|
||||
publication_id = publication[0]
|
||||
connection.close()
|
||||
return publication_id
|
||||
except Exception:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
raise
|
||||
|
||||
|
||||
def verify_publication(server, db_name, publication_name):
|
||||
"""
|
||||
This function verifies publication exist in database or not.
|
||||
:param server: server details
|
||||
:type server: dict
|
||||
:param db_name: database name
|
||||
:type db_name: str
|
||||
:param publication_name: publication name
|
||||
:type publication_name: str
|
||||
:return publication: publication record from database
|
||||
:rtype: tuple
|
||||
"""
|
||||
try:
|
||||
connection = utils.get_db_connection(db_name,
|
||||
server['username'],
|
||||
server['db_password'],
|
||||
server['host'],
|
||||
server['port'],
|
||||
server['sslmode'])
|
||||
pg_cursor = connection.cursor()
|
||||
pg_cursor.execute("select * from pg_publication pub "
|
||||
"where pub.pubname='%s'" %
|
||||
publication_name)
|
||||
publication = pg_cursor.fetchone()
|
||||
connection.close()
|
||||
return publication
|
||||
except Exception:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
raise
|
||||
|
||||
|
||||
def delete_publication(server, db_name, publication_name):
|
||||
"""
|
||||
This function use to delete the existing roles in the servers
|
||||
|
||||
:param db_name: db_name
|
||||
:type db_name: db_name object
|
||||
:param server: server
|
||||
:type server: server object
|
||||
:param publication_name: publication name
|
||||
:type publication_name: str
|
||||
:return: None
|
||||
"""
|
||||
|
||||
try:
|
||||
connection = utils.get_db_connection(db_name,
|
||||
server['username'],
|
||||
server['db_password'],
|
||||
server['host'],
|
||||
server['port'],
|
||||
server['sslmode'])
|
||||
pg_cursor = connection.cursor()
|
||||
|
||||
pg_cursor.execute("select * from pg_publication pub where "
|
||||
"pub.pubname='%s'" %
|
||||
publication_name)
|
||||
publication_count = pg_cursor.fetchone()
|
||||
if publication_count:
|
||||
old_isolation_level = connection.isolation_level
|
||||
connection.set_isolation_level(0)
|
||||
pg_cursor = connection.cursor()
|
||||
query = "DROP publication %s" % publication_name
|
||||
pg_cursor.execute(query)
|
||||
connection.set_isolation_level(old_isolation_level)
|
||||
connection.commit()
|
||||
connection.close()
|
||||
except Exception:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
raise
|
@ -517,6 +517,11 @@ define('pgadmin.node.table', [
|
||||
return true;
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'replica_identity', label: gettext('Replica Identity'),
|
||||
group: gettext('advanced'), type: 'text',mode: ['edit', 'properties'],
|
||||
},
|
||||
|
||||
{
|
||||
id: 'advanced', label: gettext('Advanced'), type: 'group',
|
||||
visible: ShowAdvancedTab.show_advanced_tab,
|
||||
|
@ -4,6 +4,12 @@ SELECT rel.oid, rel.relname AS name, rel.reltablespace AS spcoid,rel.relacl AS r
|
||||
JOIN pg_tablespace sp ON dtb.dattablespace=sp.oid
|
||||
WHERE dtb.oid = {{ did }}::oid)
|
||||
END) as spcname,
|
||||
(CASE rel.relreplident
|
||||
WHEN 'd' THEN 'default'
|
||||
WHEN 'n' THEN 'nothing'
|
||||
WHEN 'f' THEN 'full'
|
||||
WHEN 'i' THEN 'index'
|
||||
END) as replica_identity,
|
||||
(select nspname FROM pg_namespace WHERE oid = {{scid}}::oid ) as schema,
|
||||
pg_get_userbyid(rel.relowner) AS relowner, rel.relhasoids, rel.relkind,
|
||||
(CASE WHEN rel.relkind = 'p' THEN true ELSE false END) AS is_partitioned,
|
||||
|
@ -4,6 +4,12 @@ SELECT rel.oid, rel.relname AS name, rel.reltablespace AS spcoid,rel.relacl AS r
|
||||
JOIN pg_tablespace sp ON dtb.dattablespace=sp.oid
|
||||
WHERE dtb.oid = {{ did }}::oid)
|
||||
END) as spcname,
|
||||
(CASE rel.relreplident
|
||||
WHEN 'd' THEN 'default'
|
||||
WHEN 'n' THEN 'nothing'
|
||||
WHEN 'f' THEN 'full'
|
||||
WHEN 'i' THEN 'index'
|
||||
END) as replica_identity,
|
||||
(select nspname FROM pg_namespace WHERE oid = {{scid}}::oid ) as schema,
|
||||
pg_get_userbyid(rel.relowner) AS relowner, rel.relhasoids, rel.relkind,
|
||||
(CASE WHEN rel.relkind = 'p' THEN true ELSE false END) AS is_partitioned,
|
||||
|
@ -279,3 +279,15 @@ ALTER TABLE {{conn|qtIdent(data.schema, data.name)}}
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
{#####################################################}
|
||||
{## Change replica identity ##}
|
||||
{#####################################################}
|
||||
{% if data.replica_identity and data.replica_identity != o_data.replica_identity %}
|
||||
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}} REPLICA IDENTITY {{data.replica_identity }};
|
||||
{% endif %}
|
||||
{#####################################################}
|
||||
{## Change replica identity ##}
|
||||
{#####################################################}
|
||||
{% if data.replica_identity and data.replica_identity != o_data.replica_identity %}
|
||||
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}} REPLICA IDENTITY {{data.replica_identity }};
|
||||
{% endif %}
|
||||
|
@ -4,6 +4,12 @@ SELECT rel.oid, rel.relname AS name, rel.reltablespace AS spcoid,rel.relacl AS r
|
||||
JOIN pg_tablespace sp ON dtb.dattablespace=sp.oid
|
||||
WHERE dtb.oid = {{ did }}::oid)
|
||||
END) as spcname,
|
||||
(CASE rel.relreplident
|
||||
WHEN 'd' THEN 'default'
|
||||
WHEN 'n' THEN 'nothing'
|
||||
WHEN 'f' THEN 'full'
|
||||
WHEN 'i' THEN 'index'
|
||||
END) as replica_identity,
|
||||
(select nspname FROM pg_namespace WHERE oid = {{scid}}::oid ) as schema,
|
||||
pg_get_userbyid(rel.relowner) AS relowner, rel.relkind,
|
||||
(CASE WHEN rel.relkind = 'p' THEN true ELSE false END) AS is_partitioned,
|
||||
|
@ -272,3 +272,15 @@ COMMENT ON TABLE {{conn|qtIdent(data.schema, data.name)}}
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
{#####################################################}
|
||||
{## Change replica identity ##}
|
||||
{#####################################################}
|
||||
{% if data.replica_identity and data.replica_identity != o_data.replica_identity %}
|
||||
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}} REPLICA IDENTITY {{data.replica_identity }};
|
||||
{% endif %}
|
||||
{#####################################################}
|
||||
{## Change replica identity ##}
|
||||
{#####################################################}
|
||||
{% if data.replica_identity and data.replica_identity != o_data.replica_identity %}
|
||||
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}} REPLICA IDENTITY {{data.replica_identity }};
|
||||
{% endif %}
|
||||
|
@ -256,3 +256,17 @@ COMMENT ON TABLE {{conn|qtIdent(data.schema, data.name)}}
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
{#####################################################}
|
||||
{## Change replica identity ##}
|
||||
{#####################################################}
|
||||
{% if data.replica_identity and data.replica_identity != o_data.replica_identity %}
|
||||
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}} REPLICA IDENTITY {{data.replica_identity }};
|
||||
{% endif %}
|
||||
|
||||
{#####################################################}
|
||||
{## Change replica identity ##}
|
||||
{#####################################################}
|
||||
{% if data.replica_identity and data.replica_identity != o_data.replica_identity %}
|
||||
ALTER TABLE {{conn|qtIdent(data.schema, data.name)}} REPLICA IDENTITY {{data.replica_identity }};
|
||||
{% endif %}
|
||||
|
@ -99,6 +99,18 @@ define('pgadmin.node.database', [
|
||||
|
||||
return server.connected && server.user.can_create_db;
|
||||
},
|
||||
canCreate: function(itemData, item) {
|
||||
var treeData = this.getTreeNodeHierarchy(item),
|
||||
server = treeData['server'];
|
||||
|
||||
// If server is less than 10 then do not allow 'create' menu
|
||||
if (server && server.version < 100000)
|
||||
return false;
|
||||
|
||||
// by default we want to allow create menu
|
||||
return true;
|
||||
},
|
||||
|
||||
is_not_connected: function(node) {
|
||||
return (node && node.connected != true && node.allowConn == true);
|
||||
},
|
||||
|
@ -0,0 +1,860 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
"""Implements Subscription Node"""
|
||||
|
||||
import simplejson as json
|
||||
from functools import wraps
|
||||
|
||||
import pgadmin.browser.server_groups.servers.databases as databases
|
||||
from flask import render_template, request, jsonify
|
||||
from flask_babelex import gettext
|
||||
from pgadmin.browser.collection import CollectionNodeModule
|
||||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
|
||||
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
|
||||
import psycopg2
|
||||
from pgadmin.utils import get_complete_file_path
|
||||
|
||||
|
||||
class SubscriptionModule(CollectionNodeModule):
|
||||
"""
|
||||
class SubscriptionModule(CollectionNodeModule)
|
||||
|
||||
A module class for Subscription node derived from CollectionNodeModule.
|
||||
|
||||
Methods:
|
||||
-------
|
||||
* __init__(*args, **kwargs)
|
||||
- Method is used to initialize the SubscriptionModule and it's
|
||||
base module.
|
||||
|
||||
* get_nodes(gid, sid, did)
|
||||
- Method is used to generate the browser collection node.
|
||||
|
||||
* node_inode()
|
||||
- Method is overridden from its base class to make the node as leaf node.
|
||||
|
||||
* script_load()
|
||||
- Load the module script for subscription, when any of the database node
|
||||
is initialized.
|
||||
"""
|
||||
|
||||
_NODE_TYPE = 'subscription'
|
||||
_COLLECTION_LABEL = gettext("Subscriptions")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""
|
||||
Method is used to initialize the SubscriptionModule and it's
|
||||
base module.
|
||||
|
||||
Args:
|
||||
*args:
|
||||
**kwargs:
|
||||
"""
|
||||
super(SubscriptionModule, self).__init__(*args, **kwargs)
|
||||
self.min_ver = 100000
|
||||
self.max_ver = None
|
||||
|
||||
def get_nodes(self, gid, sid, did):
|
||||
"""
|
||||
Method is used to generate the browser collection node
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database Id
|
||||
"""
|
||||
yield self.generate_browser_collection_node(did)
|
||||
|
||||
@property
|
||||
def node_inode(self):
|
||||
"""
|
||||
Override this property to make the node a leaf node.
|
||||
|
||||
Returns: False as this is the leaf node
|
||||
"""
|
||||
return False
|
||||
|
||||
@property
|
||||
def script_load(self):
|
||||
"""
|
||||
Load the module script for subscription, when any of the database nodes
|
||||
are initialized.
|
||||
|
||||
Returns: node type of the server module.
|
||||
"""
|
||||
return databases.DatabaseModule.node_type
|
||||
|
||||
@property
|
||||
def module_use_template_javascript(self):
|
||||
"""
|
||||
Returns whether Jinja2 template is used for generating the javascript
|
||||
module.
|
||||
"""
|
||||
return False
|
||||
|
||||
|
||||
blueprint = SubscriptionModule(__name__)
|
||||
|
||||
|
||||
class SubscriptionView(PGChildNodeView, SchemaDiffObjectCompare):
|
||||
"""
|
||||
class SubscriptionView(PGChildNodeView)
|
||||
|
||||
A view class for Subscription node derived from PGChildNodeView.
|
||||
This class is responsible for all the stuff related to view like
|
||||
updating subscription node, showing properties, showing sql
|
||||
in sql pane.
|
||||
|
||||
Methods:
|
||||
-------
|
||||
* __init__(**kwargs)
|
||||
- Method is used to initialize the SubscriptionView and it's base view.
|
||||
|
||||
* check_precondition()
|
||||
- This function will behave as a decorator which will checks
|
||||
database connection before running view, it will also attaches
|
||||
manager,conn & template_path properties to self
|
||||
|
||||
* list()
|
||||
- This function is used to list all the subscription nodes within that
|
||||
collection.
|
||||
|
||||
* nodes()
|
||||
- This function will used to create all the child node within that
|
||||
collection. Here it will create all the subscription node.
|
||||
|
||||
* properties(gid, sid, did, subid)
|
||||
- This function will show the properties of the selected
|
||||
subscription node
|
||||
|
||||
* update(gid, sid, did, subid)
|
||||
- This function will update the data for the selected subscription node
|
||||
|
||||
* create(gid, sid, did)
|
||||
- This function will create the new subscription node
|
||||
|
||||
* delete(gid, sid, did, subid)
|
||||
- This function will delete the selected subscription node
|
||||
|
||||
* msql(gid, sid, did, subid)
|
||||
- This function is used to return modified SQL for the selected
|
||||
subscription node
|
||||
|
||||
* get_sql(data, subid)
|
||||
- This function will generate sql from model data
|
||||
|
||||
* get_publications(gid, sid, did)
|
||||
- This function returns the publications list
|
||||
|
||||
* get_templates(gid, sid, did)
|
||||
- This function returns subscription templates.
|
||||
|
||||
* sql(gid, sid, did, subid):
|
||||
- This function will generate sql to show it in sql pane for the
|
||||
selected subscription node.
|
||||
|
||||
* dependents(gid, sid, did, subid):
|
||||
- This function get the dependents and return ajax response for the
|
||||
subscription node.
|
||||
|
||||
* dependencies(self, gid, sid, did, subid):
|
||||
- This function get the dependencies and return ajax response for the
|
||||
subscription node.
|
||||
"""
|
||||
|
||||
_NOT_FOUND_PUB_INFORMATION = \
|
||||
gettext("Could not find the subscription information.")
|
||||
node_type = blueprint.node_type
|
||||
|
||||
parent_ids = [
|
||||
{'type': 'int', 'id': 'gid'},
|
||||
{'type': 'int', 'id': 'sid'},
|
||||
{'type': 'int', 'id': 'did'}
|
||||
]
|
||||
ids = [
|
||||
{'type': 'int', 'id': 'subid'}
|
||||
]
|
||||
|
||||
operations = dict({
|
||||
'obj': [
|
||||
{'get': 'properties', 'delete': 'delete', 'put': 'update'},
|
||||
{'get': 'list', 'post': 'create', 'delete': 'delete'}
|
||||
],
|
||||
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
|
||||
'sql': [{'get': 'sql'}],
|
||||
'msql': [{'get': 'msql'}, {'get': 'msql'}],
|
||||
'stats': [{'get': 'statistics'}],
|
||||
'dependency': [{'get': 'dependencies'}],
|
||||
'dependent': [{'get': 'dependents'}],
|
||||
'get_publications': [{}, {'get': 'get_publications'}],
|
||||
'delete': [{'delete': 'delete'}, {'delete': 'delete'}]
|
||||
})
|
||||
|
||||
def _init_(self, **kwargs):
|
||||
"""
|
||||
Method is used to initialize the SubscriptionView and its base view.
|
||||
Initialize all the variables create/used dynamically like conn,
|
||||
template_path.
|
||||
|
||||
Args:
|
||||
**kwargs:
|
||||
"""
|
||||
self.conn = None
|
||||
self.template_path = None
|
||||
self.manager = None
|
||||
|
||||
super(SubscriptionView, self).__init__(**kwargs)
|
||||
|
||||
def check_precondition(f):
|
||||
"""
|
||||
This function will behave as a decorator which will check the
|
||||
database connection before running the view. It also attaches
|
||||
manager, conn & template_path properties to self
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
def wrap(*args, **kwargs):
|
||||
# Here args[0] will hold self & kwargs will hold gid,sid,did
|
||||
self = args[0]
|
||||
self.driver = get_driver(PG_DEFAULT_DRIVER)
|
||||
self.manager = self.driver.connection_manager(kwargs['sid'])
|
||||
self.conn = self.manager.connection(did=kwargs['did'])
|
||||
self.datlastsysoid = self.manager.db_info[kwargs['did']][
|
||||
'datlastsysoid'] if self.manager.db_info is not None \
|
||||
and kwargs['did'] in self.manager.db_info else 0
|
||||
|
||||
# Set the template path for the SQL scripts
|
||||
self.template_path = (
|
||||
"subscriptions/sql/#gpdb#{0}#".format(self.manager.version) if
|
||||
self.manager.server_type == 'gpdb' else
|
||||
"subscriptions/sql/#{0}#".format(self.manager.version)
|
||||
)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return wrap
|
||||
|
||||
@check_precondition
|
||||
def list(self, gid, sid, did):
|
||||
"""
|
||||
This function is used to list all the subscription nodes within that
|
||||
collection.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
"""
|
||||
sql = render_template("/".join([self.template_path,
|
||||
self._PROPERTIES_SQL]), did=did)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return ajax_response(
|
||||
response=res['rows'],
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def nodes(self, gid, sid, did):
|
||||
"""
|
||||
This function is used to create all the child nodes within the
|
||||
collection. Here it will create all the subscription nodes.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
"""
|
||||
res = []
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'nodes.sql']), did=did)
|
||||
status, result = self.conn.execute_2darray(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=result)
|
||||
|
||||
for row in result['rows']:
|
||||
res.append(
|
||||
self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
did,
|
||||
row['name'],
|
||||
icon="icon-subscription"
|
||||
))
|
||||
|
||||
return make_json_response(
|
||||
data=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, subid):
|
||||
"""
|
||||
This function will fetch properties of the subscription nodes.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
subid: Subscription ID
|
||||
"""
|
||||
sql = render_template("/".join([self.template_path,
|
||||
self._PROPERTIES_SQL]),
|
||||
subid=subid)
|
||||
status, result = self.conn.execute_2darray(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=result)
|
||||
|
||||
for row in result['rows']:
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
did,
|
||||
row['name'],
|
||||
icon="icon-subscription"
|
||||
),
|
||||
status=200
|
||||
)
|
||||
|
||||
return gone(gettext("Could not find the specified subscription."))
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, subid):
|
||||
"""
|
||||
This function will show the properties of the selected subscription
|
||||
node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
subid: Subscription ID
|
||||
"""
|
||||
status, res = self._fetch_properties(did, subid)
|
||||
|
||||
if not status:
|
||||
return res
|
||||
|
||||
return ajax_response(
|
||||
response=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def _fetch_properties(self, did, subid):
|
||||
"""
|
||||
This function fetch the properties of the extension.
|
||||
:param did:
|
||||
:param subid:
|
||||
:return:
|
||||
"""
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, self._PROPERTIES_SQL]),
|
||||
subid=subid, did=did,
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return False, internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return False, gone(self._NOT_FOUND_PUB_INFORMATION)
|
||||
if 'cur_pub' in res['rows'][0]:
|
||||
res['rows'][0]['cur_pub'] = ", ".join(str(elem) for elem in
|
||||
res['rows'][0]['cur_pub'])
|
||||
res['rows'][0]['pub'] = ", ".join(str(elem) for elem in
|
||||
res['rows'][0]['pub'])
|
||||
|
||||
return True, res['rows'][0]
|
||||
|
||||
@check_precondition
|
||||
def dependents(self, gid, sid, did, subid):
|
||||
"""
|
||||
This function gets the dependents and returns an ajax response
|
||||
for the view node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
subid: View ID
|
||||
"""
|
||||
dependents_result = self.get_dependents(self.conn, subid)
|
||||
return ajax_response(
|
||||
response=dependents_result,
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def statistics(self, gid, sid, did, subid):
|
||||
"""
|
||||
This function gets the dependents and returns an ajax response
|
||||
for the view node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
subid: View ID
|
||||
"""
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'stats.sql']),
|
||||
subid=subid, conn=self.conn)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
return make_json_response(
|
||||
data=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def dependencies(self, gid, sid, did, subid):
|
||||
"""
|
||||
This function gets the dependencies and returns an ajax response
|
||||
for the view node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
subid: View ID
|
||||
"""
|
||||
dependencies_result = self.get_dependencies(self.conn, subid)
|
||||
return ajax_response(
|
||||
response=dependencies_result,
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def update(self, gid, sid, did, subid):
|
||||
"""
|
||||
This function will update the data for the selected subscription node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
subid: Subscription ID
|
||||
"""
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
|
||||
try:
|
||||
if 'pub' in data:
|
||||
data['pub'] = json.loads(
|
||||
data['pub'], encoding='utf-8'
|
||||
)
|
||||
sql, name = self.get_sql(data, subid)
|
||||
# Most probably this is due to error
|
||||
if not isinstance(sql, str):
|
||||
return sql
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
subid,
|
||||
did,
|
||||
name,
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def create(self, gid, sid, did):
|
||||
"""
|
||||
This function will create the subscription object
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
"""
|
||||
required_args = [
|
||||
'name'
|
||||
]
|
||||
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
"Could not find the required parameter ({})."
|
||||
).format(arg)
|
||||
)
|
||||
|
||||
try:
|
||||
data['pub'] = json.loads(
|
||||
data['pub'], encoding='utf-8'
|
||||
)
|
||||
|
||||
sql = render_template("/".join([self.template_path,
|
||||
self._CREATE_SQL]),
|
||||
data=data, dummy=False, conn=self.conn)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_position.sql']),
|
||||
conn=self.conn, subname=data['name']
|
||||
)
|
||||
|
||||
status, r_set = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=r_set)
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
r_set['rows'][0]['oid'],
|
||||
did,
|
||||
r_set['rows'][0]['name'],
|
||||
icon='icon-subscription'
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def delete(self, gid, sid, did, subid=None):
|
||||
"""
|
||||
This function will drop the subscription object
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
subid: Subscription ID
|
||||
only_sql:
|
||||
"""
|
||||
if subid is None:
|
||||
data = request.form if request.form else json.loads(
|
||||
request.data, encoding='utf-8'
|
||||
)
|
||||
else:
|
||||
data = {'ids': [subid]}
|
||||
|
||||
cascade = self._check_cascade_operation()
|
||||
|
||||
try:
|
||||
for subid in data['ids']:
|
||||
# Get name for subscription from subid
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, self._DELETE_SQL]),
|
||||
subid=subid, conn=self.conn
|
||||
)
|
||||
status, subname = self.conn.execute_scalar(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=subname)
|
||||
|
||||
# drop subscription
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, self._DELETE_SQL]),
|
||||
subname=subname, cascade=cascade, conn=self.conn
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return make_json_response(
|
||||
success=1,
|
||||
info=gettext("Subscription dropped")
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
@check_precondition
|
||||
def msql(self, gid, sid, did, subid=None):
|
||||
"""
|
||||
This function is used to return modified SQL for the selected
|
||||
subscription node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
subid: Subscription ID
|
||||
"""
|
||||
data = {}
|
||||
for k, v in request.args.items():
|
||||
try:
|
||||
# comments should be taken as is because if user enters a
|
||||
# json comment it is parsed by loads which should not happen
|
||||
if k in ('description',):
|
||||
data[k] = v
|
||||
else:
|
||||
data[k] = json.loads(v, encoding='utf-8')
|
||||
except ValueError:
|
||||
data[k] = v
|
||||
try:
|
||||
sql, name = self.get_sql(data, subid, 'msql')
|
||||
# Most probably this is due to error
|
||||
if not isinstance(sql, str):
|
||||
return sql
|
||||
if sql == '':
|
||||
sql = "--modified SQL"
|
||||
|
||||
return make_json_response(
|
||||
data=sql,
|
||||
status=200
|
||||
)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_details(self, data, old_data):
|
||||
"""
|
||||
This function returns the required data to create subscription
|
||||
:param data:
|
||||
:return:
|
||||
|
||||
"""
|
||||
required_args = ['name']
|
||||
|
||||
required_connection_args = ['host', 'port', 'username', 'db',
|
||||
'connect_timeout', 'passfile']
|
||||
for arg in required_args:
|
||||
if arg not in data and arg in old_data:
|
||||
data[arg] = old_data[arg]
|
||||
|
||||
for arg in required_connection_args:
|
||||
if arg not in data and arg in old_data:
|
||||
data[arg] = old_data[arg]
|
||||
|
||||
return data
|
||||
|
||||
def get_sql(self, data, subid=None, operation=None):
|
||||
"""
|
||||
This function will generate sql from model data.
|
||||
|
||||
Args:
|
||||
data: Contains the data of the selected subscription node.
|
||||
subid: Subscription ID
|
||||
"""
|
||||
|
||||
required_args = ['name']
|
||||
|
||||
required_connection_args = ['host', 'port', 'username', 'db',
|
||||
'connect_timeout', 'passfile']
|
||||
if operation == 'msql':
|
||||
dummy = True
|
||||
else:
|
||||
dummy = False
|
||||
|
||||
if subid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, self._PROPERTIES_SQL]),
|
||||
subid=subid,
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(self._NOT_FOUND_PUB_INFORMATION)
|
||||
|
||||
old_data = res['rows'][0]
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
|
||||
for arg in required_connection_args:
|
||||
if arg in data:
|
||||
old_data[arg] = data[arg]
|
||||
|
||||
if 'slot_name' in data and data['slot_name'] == '':
|
||||
data['slot_name'] = 'None'
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, self._UPDATE_SQL]),
|
||||
data=data, o_data=old_data, conn=self.conn, dummy=dummy,
|
||||
)
|
||||
return sql.strip('\n'), data['name'] if 'name' in data \
|
||||
else old_data['name']
|
||||
else:
|
||||
|
||||
sql = render_template("/".join([self.template_path,
|
||||
self._CREATE_SQL]),
|
||||
data=data, dummy=dummy, conn=self.conn)
|
||||
return sql.strip('\n'), data['name']
|
||||
|
||||
def get_connection(self, connection_details):
|
||||
|
||||
passfile = connection_details['passfile'] if \
|
||||
'passfile' in connection_details and \
|
||||
connection_details['passfile'] != '' else None
|
||||
|
||||
conn = psycopg2.connect(
|
||||
host=connection_details['host'],
|
||||
database=connection_details['db'],
|
||||
user=connection_details['username'],
|
||||
password=connection_details['password'] if
|
||||
connection_details['password'] else None,
|
||||
port=connection_details['port'] if
|
||||
connection_details['port'] else None,
|
||||
passfile=get_complete_file_path(passfile),
|
||||
connect_timeout=connection_details['connect_timeout'] if
|
||||
'connect_timeout' in connection_details and
|
||||
connection_details['connect_timeout'] else 0
|
||||
)
|
||||
# create a cursor
|
||||
cur = conn.cursor()
|
||||
cur.execute('SELECT pubname from pg_publication')
|
||||
|
||||
publications = cur.fetchall()
|
||||
# Close the connection
|
||||
conn.close()
|
||||
|
||||
return publications
|
||||
|
||||
@check_precondition
|
||||
def get_publications(self, gid, sid, did, *args, **kwargs):
|
||||
"""
|
||||
This function returns the publication list
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
"""
|
||||
|
||||
url_params = None
|
||||
if request.args:
|
||||
url_params = {k: v for k, v in request.args.items()}
|
||||
|
||||
required_connection_args = ['host', 'port', 'username', 'db',
|
||||
'connect_timeout', 'passfile']
|
||||
|
||||
if 'oid' in url_params:
|
||||
status, params = self._fetch_properties(did, url_params['oid'])
|
||||
for arg in required_connection_args:
|
||||
if arg not in url_params and arg in params:
|
||||
url_params[arg] = params[arg]
|
||||
|
||||
res = self.get_connection(url_params)
|
||||
|
||||
result = []
|
||||
for pub in res:
|
||||
result.append({
|
||||
"value": pub[0],
|
||||
"label": pub[0]
|
||||
})
|
||||
|
||||
return make_json_response(
|
||||
data=result,
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, subid, json_resp=True):
|
||||
"""
|
||||
This function will generate sql to show in the sql pane for the
|
||||
selected publication node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
subid: Publication ID
|
||||
json_resp:
|
||||
"""
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, self._PROPERTIES_SQL]),
|
||||
subid=subid
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(self._NOT_FOUND_PUB_INFORMATION)
|
||||
|
||||
# Making copy of output for future use
|
||||
old_data = dict(res['rows'][0])
|
||||
if old_data['slot_name'] is None and 'create_slot' not in old_data:
|
||||
old_data['create_slot'] = False
|
||||
|
||||
sql = render_template("/".join([self.template_path,
|
||||
self._CREATE_SQL]),
|
||||
data=old_data, conn=self.conn, dummy=True)
|
||||
sql += "\n\n"
|
||||
|
||||
sql_header = "-- Subscription: {}".format(old_data['name'])
|
||||
sql_header += "\n\n"
|
||||
|
||||
sql_header += "-- DROP SUBSCRIPTION {};".format(old_data['name'])
|
||||
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, self._DELETE_SQL]),
|
||||
sname=old_data['name'], )
|
||||
|
||||
sql_header += "\n"
|
||||
|
||||
sql = sql_header + sql
|
||||
|
||||
if not json_resp:
|
||||
return sql
|
||||
|
||||
return ajax_response(response=sql)
|
||||
|
||||
@check_precondition
|
||||
def dependents(self, gid, sid, did, subid):
|
||||
"""
|
||||
This function gets the dependents and returns an ajax response
|
||||
for the subscription node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
subid: Subscription ID
|
||||
"""
|
||||
dependents_result = self.get_dependents(self.conn, subid)
|
||||
return ajax_response(
|
||||
response=dependents_result,
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def dependencies(self, gid, sid, did, subid):
|
||||
"""
|
||||
This function gets the dependencies and returns an ajax response
|
||||
for the subscription node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
sid: Server ID
|
||||
did: Database ID
|
||||
subid: Subscription ID
|
||||
"""
|
||||
dependencies_result = self.get_dependencies(self.conn, subid)
|
||||
return ajax_response(
|
||||
response=dependencies_result,
|
||||
status=200
|
||||
)
|
||||
|
||||
|
||||
SubscriptionView.register_node_view(blueprint)
|
@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 24.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 16 16" style="enable-background:new 0 0 16 16;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#DDE1F0;stroke:#4B5FAD;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
.st1{fill:#4B5FAD;}
|
||||
.st2{fill:none;stroke:#4B5FAD;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
</style>
|
||||
<g>
|
||||
<path class="st0" d="M10,13c-2,2-5.1,2-7.1,0s-2-5.1,0-7.1C4.5,7.5,8.5,11.5,10,13z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path class="st0" d="M11.8,11.3c-2,2-5.1,2-7.1,0s-2-5.1,0-7.1C6.2,5.7,10.2,9.8,11.8,11.3z"/>
|
||||
</g>
|
||||
<path class="st1" d="M8.7,7.8l0.8-0.7c0.1,0.1,0.3,0.1,0.4,0.1c0.6,0,1-0.4,1-1s-0.4-1-1-1s-1,0.4-1,1c0,0.1,0,0.3,0.1,0.4L8.2,7.3
|
||||
L8.7,7.8z"/>
|
||||
<g>
|
||||
<path class="st2" d="M10.5,1.6c2.2,0,4,1.8,4,4"/>
|
||||
<path class="st2" d="M10.6,3.6c1.1,0,2,0.9,2,2"/>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.1 KiB |
@ -0,0 +1,30 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 24.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 16 16" style="enable-background:new 0 0 16 16;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FBEEDF;stroke:#E58E26;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
.st1{fill:#E58E26;}
|
||||
.st2{fill:none;stroke:#E58E26;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
</style>
|
||||
<g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st0" d="M12.9,5.7c2,2,2,5.1,0,7.1s-5.1,2-7.1,0C7.3,11.3,11.4,7.2,12.9,5.7z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st0" d="M11.4,4.2c2,2,2,5.1,0,7.1s-5.1,2-7.1,0C5.8,9.8,9.9,5.7,11.4,4.2z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path class="st1" d="M7.8,7.3L7.1,6.6c0.1-0.1,0.1-0.3,0.1-0.4c0-0.6-0.4-1-1-1s-1,0.4-1,1s0.4,1,1,1c0.1,0,0.3,0,0.4-0.1
|
||||
l0.7,0.7"/>
|
||||
<g>
|
||||
<path class="st2" d="M5.6,1.7c0,2.2-1.8,4-4,4"/>
|
||||
<path class="st2" d="M3.7,1.8c0,1.1-0.9,2-2,2"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.1 KiB |
@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 23.0.6, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 16 16" style="enable-background:new 0 0 16 16;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#DDE1F0;stroke:#4B5FAD;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
.st1{fill:#4B5FAD;}
|
||||
.st2{fill:none;stroke:#4B5FAD;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
</style>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st0" d="M11,12c-2,2-5.1,2-7.1,0s-2-5.1,0-7.1C5.4,6.4,9.5,10.5,11,12z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path class="st1" d="M7.9,8.6l0.7-0.7C8.7,8,8.9,8,9,8c0.6,0,1-0.4,1-1S9.6,6,9,6S8,6.4,8,7c0,0.1,0,0.3,0.1,0.4L7.4,8.1"/>
|
||||
<g>
|
||||
<path class="st2" d="M9.6,2.5c2.2,0,4,1.8,4,4"/>
|
||||
<path class="st2" d="M9.7,4.4c1.1,0,2,0.9,2,2"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 997 B |
@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 23.0.6, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 16 16" style="enable-background:new 0 0 16 16;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FBEEDF;stroke:#E58E26;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
.st1{fill:#E58E26;}
|
||||
.st2{fill:none;stroke:#E58E26;stroke-width:0.65;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:10;}
|
||||
</style>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st0" d="M12.2,5c2,2,2,5.1,0,7.1s-5.1,2-7.1,0C6.6,10.6,10.7,6.5,12.2,5z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path class="st1" d="M8.6,8.1L7.9,7.4C8,7.3,8,7.1,8,7c0-0.6-0.4-1-1-1S6,6.4,6,7s0.4,1,1,1c0.1,0,0.3,0,0.4-0.1l0.7,0.7"/>
|
||||
<g>
|
||||
<path class="st2" d="M6.4,2.5c0,2.2-1.8,4-4,4"/>
|
||||
<path class="st2" d="M4.5,2.6c0,1.1-0.9,2-2,2"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 999 B |
@ -0,0 +1,430 @@
|
||||
/////////////////////////////////////////////////////////////
|
||||
//
|
||||
// pgAdmin 4 - PostgreSQL Tools
|
||||
//
|
||||
// Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
// This software is released under the PostgreSQL Licence
|
||||
//
|
||||
//////////////////////////////////////////////////////////////
|
||||
|
||||
define('pgadmin.node.subscription', [
|
||||
'sources/gettext', 'sources/url_for', 'jquery', 'underscore',
|
||||
'sources/pgadmin', 'pgadmin.browser', 'pgadmin.backform',
|
||||
'sources/browser/server_groups/servers/model_validation', 'pgadmin.alertifyjs', 'pgadmin.browser.collection',
|
||||
], function(gettext, url_for, $, _, pgAdmin, pgBrowser, Backform, modelValidation, Alertify) {
|
||||
|
||||
// Extend the browser's collection class for subscriptions collection
|
||||
if (!pgBrowser.Nodes['coll-subscription']) {
|
||||
pgBrowser.Nodes['coll-subscription'] =
|
||||
pgBrowser.Collection.extend({
|
||||
node: 'subscription',
|
||||
label: gettext('Subscriptions'),
|
||||
type: 'coll-subscription',
|
||||
columns: ['name', 'subowner', 'pub', 'enabled'],
|
||||
hasStatistics: true,
|
||||
});
|
||||
}
|
||||
|
||||
// Extend the browser's node class for subscription node
|
||||
if (!pgBrowser.Nodes['subscription']) {
|
||||
pgBrowser.Nodes['subscription'] = pgBrowser.Node.extend({
|
||||
parent_type: 'database',
|
||||
type: 'subscription',
|
||||
sqlAlterHelp: 'sql-altersubscription.html',
|
||||
sqlCreateHelp: 'sql-createsubscription.html',
|
||||
dialogHelp: url_for('help.static', {'filename': 'subscription_dialog.html'}),
|
||||
label: gettext('Subscription'),
|
||||
hasSQL: true,
|
||||
canDrop: true,
|
||||
canDropCascade: true,
|
||||
hasDepends: true,
|
||||
hasStatistics: true,
|
||||
width: '501px',
|
||||
Init: function() {
|
||||
|
||||
// Avoid multiple registration of menus
|
||||
if (this.initialized)
|
||||
return;
|
||||
|
||||
this.initialized = true;
|
||||
|
||||
|
||||
// Add context menus for subscription
|
||||
pgBrowser.add_menus([{
|
||||
name: 'create_subscription_on_database', node: 'database', module: this,
|
||||
applies: ['object', 'context'], callback: 'show_obj_properties',
|
||||
category: 'create', priority: 4, label: gettext('Subscription...'),
|
||||
icon: 'wcTabIcon icon-subscription', data: {action: 'create'},
|
||||
enable: pgBrowser.Nodes['database'].canCreate,
|
||||
},{
|
||||
name: 'create_subscription_on_coll', node: 'coll-subscription', module: this,
|
||||
applies: ['object', 'context'], callback: 'show_obj_properties',
|
||||
category: 'create', priority: 4, label: gettext('Subscription...'),
|
||||
icon: 'wcTabIcon icon-subscription', data: {action: 'create'},
|
||||
enable: 'canCreate',
|
||||
},{
|
||||
name: 'create_subscription', node: 'subscription', module: this,
|
||||
applies: ['object', 'context'], callback: 'show_obj_properties',
|
||||
category: 'create', priority: 4, label: gettext('Subscription...'),
|
||||
icon: 'wcTabIcon icon-subscription', data: {action: 'create'},
|
||||
enable: 'canCreate',
|
||||
}]);
|
||||
},
|
||||
// Define the model for subscription node
|
||||
model: pgBrowser.Node.Model.extend({
|
||||
idAttribute: 'oid',
|
||||
defaults: {
|
||||
name: undefined,
|
||||
subowner: undefined,
|
||||
pubtable: undefined,
|
||||
pub:[],
|
||||
enabled:true,
|
||||
create_slot: true,
|
||||
copy_data:true,
|
||||
connect:true,
|
||||
copy_data_after_refresh:false,
|
||||
sync:'off',
|
||||
refresh_pub: undefined,
|
||||
},
|
||||
|
||||
// Default values!
|
||||
initialize: function(attrs, args) {
|
||||
var isNew = (_.size(attrs) === 0);
|
||||
if (isNew) {
|
||||
var userInfo = pgBrowser.serverInfo[args.node_info.server._id].user;
|
||||
|
||||
this.set({'subowner': userInfo.name}, {silent: true});
|
||||
}
|
||||
pgBrowser.Node.Model.prototype.initialize.apply(this, arguments);
|
||||
},
|
||||
|
||||
// Define the schema for the subscription node
|
||||
schema: [{
|
||||
id: 'name', label: gettext('Name'), type: 'text',
|
||||
mode: ['properties', 'create', 'edit'],
|
||||
visible: function() {
|
||||
if(!_.isUndefined(this.node_info) && !_.isUndefined(this.node_info.server)
|
||||
&& !_.isUndefined(this.node_info.server.version) &&
|
||||
this.node_info.server.version >= 100000) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
},{
|
||||
id: 'oid', label: gettext('OID'), cell: 'string', mode: ['properties'],
|
||||
type: 'text',
|
||||
},
|
||||
{
|
||||
id: 'subowner', label: gettext('Owner'), type: 'text',
|
||||
control: Backform.NodeListByNameControl, node: 'role',
|
||||
mode: ['edit', 'properties', 'create'], select2: { allowClear: false},
|
||||
disabled: function(m){
|
||||
if(m.isNew())
|
||||
return true;
|
||||
return false;
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'host', label: gettext('Host name/address'), type: 'text', group: gettext('Connection'),
|
||||
mode: ['properties', 'edit', 'create'],
|
||||
control: Backform.InputControl.extend({
|
||||
onChange: function() {
|
||||
Backform.InputControl.prototype.onChange.apply(this, arguments);
|
||||
if (!this.model || !this.model.changed) {
|
||||
this.model.inform_text = undefined;
|
||||
return;
|
||||
}
|
||||
},
|
||||
}),
|
||||
},{
|
||||
id: 'port', label: gettext('Port'), type: 'int', group: gettext('Connection'),
|
||||
mode: ['properties', 'edit', 'create'], min: 1, max: 65535,
|
||||
control: Backform.InputControl.extend({
|
||||
onChange: function() {
|
||||
Backform.InputControl.prototype.onChange.apply(this, arguments);
|
||||
if (!this.model || !this.model.changed) {
|
||||
this.model.inform_text = undefined;
|
||||
return;
|
||||
}
|
||||
},
|
||||
}),
|
||||
},{
|
||||
id: 'username', label: gettext('Username'), type: 'text', group: gettext('Connection'),
|
||||
mode: ['properties', 'edit', 'create'],
|
||||
control: Backform.InputControl.extend({
|
||||
onChange: function() {
|
||||
Backform.InputControl.prototype.onChange.apply(this, arguments);
|
||||
if (!this.model || !this.model.changed) {
|
||||
this.model.inform_text = undefined;
|
||||
return;
|
||||
}
|
||||
},
|
||||
}),
|
||||
},{
|
||||
id: 'password', label: gettext('Password'), type: 'password', maxlength: null,
|
||||
group: gettext('Connection'), control: 'input', mode: ['create', 'edit'], deps: ['connect_now'],
|
||||
},{
|
||||
id: 'db', label: gettext('Database'), type: 'text', group: gettext('Connection'),
|
||||
mode: ['properties', 'edit', 'create'],
|
||||
},
|
||||
{
|
||||
id: 'connect_timeout', label: gettext('Connection timeout'), type: 'text',
|
||||
mode: ['properties', 'edit', 'create'],
|
||||
group: gettext('Connection'),
|
||||
},
|
||||
{
|
||||
id: 'passfile', label: gettext('Passfile'), type: 'text', group: gettext('Connection'),
|
||||
mode: ['properties', 'edit', 'create'],
|
||||
},
|
||||
{
|
||||
id: 'pub', label: gettext('Publication'), type: 'text', group: gettext('Connection'),
|
||||
mode: ['properties'],
|
||||
},
|
||||
{
|
||||
id: 'cur_pub', label: gettext('Current publication'), type: 'text', group: gettext('Connection'),
|
||||
mode: ['edit'], disabled:true,
|
||||
},
|
||||
{
|
||||
id: 'pub', label: gettext('Publication'), type: 'array', select2: { allowClear: true, multiple: true, width: '92%'},
|
||||
group: gettext('Connection'), mode: ['create', 'edit'], controlsClassName: 'pgadmin-controls pg-el-sm-11 pg-el-12',
|
||||
deps: ['all_table', 'host', 'port', 'username', 'db', 'password'], disabled: 'isAllConnectionDataEnter',
|
||||
helpMessage: gettext('Click the refresh button to get the publications"'),
|
||||
control: Backform.Select2Control.extend({
|
||||
defaults: _.extend(Backform.Select2Control.prototype.defaults, {
|
||||
select2: {
|
||||
allowClear: true,
|
||||
selectOnBlur: true,
|
||||
tags: true,
|
||||
placeholder: gettext('Select an item...'),
|
||||
width: 'style',
|
||||
},
|
||||
}),
|
||||
template: _.template([
|
||||
'<label class="<%=Backform.controlLabelClassName%>" for="<%=cId%>"><%=label%></label>',
|
||||
'<div class="<%=Backform.controlsClassName%>">',
|
||||
'<div class="input-group">',
|
||||
' <select title="<%=name%>" id="<%=cId%>" class="<%=Backform.controlClassName%> <%=extraClasses.join(\' \')%>"',
|
||||
' name="<%=name%>" value="<%-value%>" <%=disabled ? "disabled" : ""%> <%=readonly ? "disabled" : ""%>',
|
||||
' <%=required ? "required" : ""%><%= select2.multiple ? " multiple>" : ">" %>',
|
||||
' <%=select2.first_empty ? " <option></option>" : ""%>',
|
||||
' <% for (var i=0; i < options.length; i++) {%>',
|
||||
' <% var option = options[i]; %>',
|
||||
' <option ',
|
||||
' <% if (option.image) { %> data-image=<%=option.image%> <%}%>',
|
||||
' value=<%- formatter.fromRaw(option.value) %>',
|
||||
' <% if (option.selected) {%>selected="selected"<%} else {%>',
|
||||
' <% if (!select2.multiple && option.value === rawValue) {%>selected="selected"<%}%>',
|
||||
' <% if (select2.multiple && rawValue && rawValue.indexOf(option.value) != -1){%>selected="selected" data-index="rawValue.indexOf(option.value)"<%}%>',
|
||||
' <%}%>',
|
||||
' <%= disabled ? "disabled" : ""%> <%=readonly ? "disabled" : ""%>><%-option.label%></option>',
|
||||
' <%}%>',
|
||||
' </select>',
|
||||
'<div class="input-group-append">',
|
||||
'<button class="btn btn-primary-icon fa fa-sync get_publication" <%=disabled ? "disabled" : ""%> <%=readonly ? "disabled" : ""%> aria-hidden="true" aria-label="' + gettext('Get Publication') + '" title="' + gettext('Get Publication') + '"></button>',
|
||||
'</div>',
|
||||
'</div>',
|
||||
'<% if (helpMessage && helpMessage.length) { %>',
|
||||
'<span class="<%=Backform.helpMessageClassName%>"><%=helpMessage%></span>',
|
||||
'<% } %>',
|
||||
'</div>',
|
||||
].join('\n')),
|
||||
|
||||
events: _.extend({}, Backform.Select2Control.prototype.events(), {
|
||||
'click .get_publication': 'getPublication',
|
||||
}),
|
||||
|
||||
render: function(){
|
||||
return Backform.Select2Control.prototype.render.apply(this, arguments);
|
||||
},
|
||||
|
||||
getPublication: function() {
|
||||
var self = this;
|
||||
var publication_url = pgBrowser.Nodes['database'].generate_url.apply(
|
||||
pgBrowser.Nodes['subscription'], [
|
||||
null, 'get_publications', this.field.get('node_data'), null,
|
||||
this.field.get('node_info'), pgBrowser.Nodes['database'].url_jump_after_node,
|
||||
]);
|
||||
var result = '';
|
||||
|
||||
$.ajax({
|
||||
url: publication_url,
|
||||
type: 'GET',
|
||||
data: self.model.toJSON(true, 'GET'),
|
||||
dataType: 'json',
|
||||
contentType: 'application/json',
|
||||
})
|
||||
.done(function(res) {
|
||||
result = res.data;
|
||||
self.field.set('options', result);
|
||||
Backform.Select2Control.prototype.render.apply(self, arguments);
|
||||
|
||||
var transform = self.field.get('transform') || self.defaults.transform;
|
||||
if (transform && _.isFunction(transform)) {
|
||||
self.field.set('options', transform.bind(self, result));
|
||||
} else {
|
||||
self.field.set('options', result);
|
||||
}
|
||||
Alertify.info(
|
||||
gettext('Publication fetched successfully.')
|
||||
);
|
||||
|
||||
|
||||
})
|
||||
.fail(function(res) {
|
||||
Alertify.alert(
|
||||
gettext('Check connection?'),
|
||||
gettext(res.responseJSON.errormsg)
|
||||
);
|
||||
});
|
||||
},
|
||||
}),
|
||||
},
|
||||
{
|
||||
id: 'copy_data_after_refresh', label: gettext('Copy data?'),
|
||||
type: 'switch', mode: ['edit'],
|
||||
group: gettext('With'),
|
||||
readonly: 'isRefresh', deps :['refresh_pub'],
|
||||
helpMessage: gettext('Specifies whether the existing data in the publications that are being subscribed to should be copied once the replication starts.'),
|
||||
},
|
||||
{
|
||||
id: 'copy_data', label: gettext('Copy data?'),
|
||||
type: 'switch', mode: ['create'],
|
||||
group: gettext('With'),
|
||||
readonly: 'isConnect', deps :['connect'],
|
||||
helpMessage: gettext('Specifies whether the existing data in the publications that are being subscribed to should be copied once the replication starts.'),
|
||||
},
|
||||
{
|
||||
id: 'create_slot', label: gettext('Create slot?'),
|
||||
type: 'switch', mode: ['create'],
|
||||
group: gettext('With'),
|
||||
disabled: 'isDisable',
|
||||
readonly: 'isConnect', deps :['connect'],
|
||||
helpMessage: gettext('Specifies whether the command should create the replication slot on the publisher.'),
|
||||
|
||||
},
|
||||
{
|
||||
id: 'enabled', label: gettext('Enabled?'),
|
||||
type: 'switch', mode: ['create','edit', 'properties'],
|
||||
group: gettext('With'),
|
||||
readonly: 'isConnect', deps :['connect'],
|
||||
helpMessage: gettext('Specifies whether the subscription should be actively replicating, or whether it should be just setup but not started yet. '),
|
||||
},
|
||||
{
|
||||
id: 'refresh_pub', label: gettext('Refresh publication?'),
|
||||
type: 'switch', mode: ['edit'],
|
||||
group: gettext('With'),
|
||||
helpMessage: gettext('Fetch missing table information from publisher. '),
|
||||
deps:['enabled'], disabled: function(m){
|
||||
if (m.get('enabled'))
|
||||
return false;
|
||||
setTimeout( function() {
|
||||
m.set('refresh_pub', false);
|
||||
}, 10);
|
||||
return true;
|
||||
},
|
||||
},{
|
||||
id: 'connect', label: gettext('Connect?'),
|
||||
type: 'switch', mode: ['create'],
|
||||
group: gettext('With'),
|
||||
disabled: 'isDisable', deps:['enabled', 'create_slot', 'copy_data'],
|
||||
helpMessage: gettext('Specifies whether the CREATE SUBSCRIPTION should connect to the publisher at all. Setting this to false will change default values of enabled, create_slot and copy_data to false.'),
|
||||
},
|
||||
{
|
||||
id: 'slot_name', label: gettext('Slot name'),
|
||||
type: 'text', mode: ['create','edit', 'properties'],
|
||||
group: gettext('With'),
|
||||
helpMessage: gettext('Name of the replication slot to use. The default behavior is to use the name of the subscription for the slot name.'),
|
||||
},
|
||||
{
|
||||
id: 'sync', label: gettext('Synchronous commit'), control: 'select2', deps:['event'],
|
||||
group: gettext('With'), type: 'text',
|
||||
helpMessage: gettext('The value of this parameter overrides the synchronous_commit setting. The default value is off.'),
|
||||
select2: {
|
||||
width: '100%',
|
||||
allowClear: false,
|
||||
},
|
||||
options:[
|
||||
{label: 'local', value: 'local'},
|
||||
{label: 'remote_write', value: 'remote_write'},
|
||||
{label: 'remote_apply', value: 'remote_apply'},
|
||||
{label: 'on', value: 'on'},
|
||||
{label: 'off', value: 'off'},
|
||||
],
|
||||
},
|
||||
],
|
||||
isDisable:function(m){
|
||||
if (m.isNew())
|
||||
return false;
|
||||
return true;
|
||||
},
|
||||
isAllConnectionDataEnter: function(m){
|
||||
let host = m.get('host'),
|
||||
db = m.get('db'),
|
||||
port = m.get('port'),
|
||||
username = m.get('username'),
|
||||
password = m.get('password');
|
||||
if ((!_.isUndefined(host) && host) && (!_.isUndefined(db) && db) && (!_.isUndefined(port) && port) && (!_.isUndefined(username) && username) && (!_.isUndefined(password) && password))
|
||||
return false;
|
||||
return true;
|
||||
},
|
||||
isConnect: function(m){
|
||||
if(!m.get('connect')){
|
||||
setTimeout( function() {
|
||||
m.set('copy_data', false);
|
||||
m.set('create_slot', false);
|
||||
m.set('enabled', false);
|
||||
}, 10);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
isRefresh: function(m){
|
||||
if (!m.get('refresh_pub') || _.isUndefined(m.get('refresh_pub'))){
|
||||
setTimeout( function() {
|
||||
m.set('copy_data_after_refresh', false);
|
||||
}, 10);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
sessChanged: function() {
|
||||
if (_.isEqual(_.omit(this.attributes, ['refresh_pub']), _.omit(this.origSessAttrs, ['refresh_pub'])) && !this.isNew())
|
||||
return false;
|
||||
return pgBrowser.DataModel.prototype.sessChanged.apply(this);
|
||||
},
|
||||
/* validate function is used to validate the input given by
|
||||
* the user. In case of error, message will be displayed on
|
||||
* the GUI for the respective control.
|
||||
*/
|
||||
validate: function() {
|
||||
var msg;
|
||||
this.errorModel.clear();
|
||||
var name = this.get('name');
|
||||
|
||||
if (_.isUndefined(name) || _.isNull(name) ||
|
||||
String(name).replace(/^\s+|\s+$/g, '') == '') {
|
||||
msg = gettext('Name cannot be empty.');
|
||||
this.errorModel.set('name', msg);
|
||||
return msg;
|
||||
}
|
||||
|
||||
const validateModel = new modelValidation.ModelValidation(this);
|
||||
return validateModel.validate();
|
||||
},
|
||||
canCreate: function(itemData, item) {
|
||||
var treeData = this.getTreeNodeHierarchy(item),
|
||||
server = treeData['server'];
|
||||
|
||||
// If server is less than 10 then do not allow 'create' menu
|
||||
if (server && server.version < 100000)
|
||||
return false;
|
||||
|
||||
// by default we want to allow create menu
|
||||
return true;
|
||||
},
|
||||
|
||||
}),
|
||||
});
|
||||
}
|
||||
return pgBrowser.Nodes['coll-subscription'];
|
||||
});
|
@ -0,0 +1,24 @@
|
||||
{% if data.copy_data is defined or data.create_slot is defined or data.slot_name is defined or data.sync is defined %}
|
||||
{% set add_semicolon_after_enabled = 'enabled' %}
|
||||
{% endif %}
|
||||
{% if data.create_slot is defined or data.slot_name is defined %}
|
||||
{% set add_semicolon_after_copy_data = 'copy_data' %}
|
||||
{% endif %}
|
||||
{% if data.slot_name is defined or data.sync is defined %}
|
||||
{% set add_semicolon_after_create_slot = 'create_slot' %}
|
||||
{% endif %}
|
||||
{% if data.sync is defined %}
|
||||
{% set add_semicolon_after_slot_name = 'slot_name' %}
|
||||
{% endif %}
|
||||
|
||||
CREATE SUBSCRIPTION {{ conn|qtIdent(data.name) }}
|
||||
{% if data.host or data.port or data.username or data.password or data.db %}
|
||||
CONNECTION '{% if data.host %}host={{data.host}}{% endif %}{% if data.port %} port={{ data.port }}{% endif %}{% if data.username %} user={{ data.username }}{% endif %}{% if data.db %} dbname={{ data.db }}{% endif %}{% if data.connect_timeout %} connect_timeout={{ data.connect_timeout }}{% endif %}{% if data.passfile %} passfile={{ data.passfile }}{% endif %}{% if data.password %} {% if dummy %}password=xxxxxx{% else %}password={{ data.password}}{% endif %}{% endif %}'
|
||||
{% endif %}
|
||||
{% if data.pub %}
|
||||
PUBLICATION {% for pub in data.pub %}{% if loop.index != 1 %},{% endif %}{{ conn|qtIdent(pub) }}{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
WITH ({% if data.connect is defined %}connect = {{ data.connect|lower}}, {% endif %}enabled = {{ data.enabled|lower}}, {% if data.copy_data %}copy_data = {{ data.copy_data|lower}}{% if add_semicolon_after_copy_data == 'copy_data' %}, {% endif %}{% endif %}
|
||||
{% if data.create_slot is defined %}create_slot = {{ data.create_slot|lower }}{% if add_semicolon_after_create_slot == 'create_slot' %}, {% endif %}{% endif %}
|
||||
{% if data.slot_name is defined and data.slot_name != ''%}slot_name = {{ data.slot_name }}{% if add_semicolon_after_slot_name == 'slot_name' %}, {% endif %}{% endif %}{% if data.sync %}synchronous_commit = '{{ data.sync }}'{% endif %});
|
@ -0,0 +1,8 @@
|
||||
{# ============= Get the subscription name using oid ============= #}
|
||||
{% if subid %}
|
||||
SELECT subname FROM pg_subscription WHERE oid = {{subid}}::oid;
|
||||
{% endif %}
|
||||
{# ============= Drop the language ============= #}
|
||||
{% if subname %}
|
||||
DROP SUBSCRIPTION {{ conn|qtIdent(subname) }}{% if cascade %} CASCADE{% endif%};
|
||||
{% endif %}
|
@ -0,0 +1 @@
|
||||
SELECT oid, subname AS name FROM pg_subscription WHERE subname = '{{ subname }}';
|
@ -0,0 +1,7 @@
|
||||
SELECT oid, sub.subname AS name FROM pg_subscription sub
|
||||
WHERE
|
||||
{% if subid %}
|
||||
sub.oid = {{ subid }};
|
||||
{% else %}
|
||||
sub.subdbid = {{ did }};
|
||||
{% endif %};
|
@ -0,0 +1,21 @@
|
||||
SELECT sub.oid AS oid,
|
||||
subname AS name,
|
||||
subpublications AS pub,
|
||||
sub.subsynccommit AS sync,
|
||||
subpublications AS cur_pub,
|
||||
pga.rolname AS subowner,
|
||||
subslotname AS slot_name,
|
||||
subenabled AS enabled,
|
||||
SPLIT_PART(SPLIT_PART(subconninfo,' port',1), '=',2) AS host,
|
||||
SPLIT_PART(SPLIT_PART(subconninfo,'port=',2), ' ',1) AS port,
|
||||
SPLIT_PART(SPLIT_PART(subconninfo,'user=',2), ' ',1) AS username,
|
||||
SPLIT_PART(SPLIT_PART(subconninfo,'dbname=',2), ' ',1) AS db,
|
||||
SPLIT_PART(SPLIT_PART(subconninfo,'connect_timeout=',2), ' ',1) AS connect_timeout,
|
||||
SPLIT_PART(SPLIT_PART(subconninfo,'passfile=',2), ' ',1) AS passfile
|
||||
FROM pg_subscription sub JOIN pg_authid pga ON sub.subowner= pga.oid
|
||||
WHERE
|
||||
{% if subid %}
|
||||
sub.oid = {{ subid }};
|
||||
{% else %}
|
||||
sub.subdbid = {{ did }};
|
||||
{% endif %}
|
@ -0,0 +1,8 @@
|
||||
SELECT
|
||||
subname AS {{ conn|qtIdent(_('Subscription name')) }},
|
||||
latest_end_time AS {{ conn|qtIdent(_('Latest end time')) }},
|
||||
latest_end_lsn AS {{ conn|qtIdent(_('Latest end lsn')) }},
|
||||
last_msg_receipt_time AS {{ conn|qtIdent(_('Last message receipt')) }},
|
||||
last_msg_send_time AS {{ conn|qtIdent(_('Last message send time'))}}
|
||||
FROM pg_stat_subscription WHERE subid = {{ subid }};
|
||||
|
@ -0,0 +1,65 @@
|
||||
{% if data.sync is defined %}
|
||||
{% set add_semicolon_after_slot_name = 'slot_name' %}
|
||||
{% endif %}
|
||||
{#####################################################}
|
||||
{## Change owner of subscription ##}
|
||||
{#####################################################}
|
||||
{% if data.subowner and data.subowner != o_data.subowner %}
|
||||
ALTER SUBSCRIPTION {{ conn|qtIdent(o_data.name) }}
|
||||
OWNER TO {{ data.subowner }};
|
||||
|
||||
{% endif %}
|
||||
{### Disable subscription ###}
|
||||
{% if data.enabled is defined and data.enabled != o_data.enabled %}
|
||||
{% if not data.enabled %}
|
||||
ALTER SUBSCRIPTION {{ conn|qtIdent(o_data.name) }} DISABLE;
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
{### Alter slot name of subscription ###}
|
||||
{% if data.slot_name is defined or data.sync %}
|
||||
ALTER SUBSCRIPTION {{ conn|qtIdent(o_data.name) }}
|
||||
SET ({% if data.slot_name is defined and data.slot_name != o_data.slot_name %}slot_name = {{ data.slot_name }}{% if add_semicolon_after_slot_name == 'slot_name' %}, {% endif %}{% endif %}{% if data.sync %}synchronous_commit = '{{ data.sync }}'{% endif %});
|
||||
|
||||
{% endif %}
|
||||
{### Enable subscription ###}
|
||||
{% if data.enabled is defined and data.enabled != o_data.enabled %}
|
||||
{% if data.enabled %}
|
||||
ALTER SUBSCRIPTION {{ conn|qtIdent(o_data.name) }} ENABLE;
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
{### Refresh publication ###}
|
||||
{% if data.refresh_pub %}
|
||||
ALTER SUBSCRIPTION {{ conn|qtIdent(o_data.name) }}
|
||||
REFRESH PUBLICATION{% if not data.copy_data_after_refresh %} WITH (copy_data = false){% else %} WITH (copy_data = true){% endif %};
|
||||
|
||||
{% endif %}
|
||||
{### Alter publication of subscription ###}
|
||||
{% if data.pub%}
|
||||
{% if data.pub and not data.refresh_pub and not data.enabled %}
|
||||
ALTER SUBSCRIPTION {{ conn|qtIdent(o_data.name) }}
|
||||
SET PUBLICATION {% for pub in data.pub %}{% if loop.index != 1 %},{% endif %}{{ conn|qtIdent(pub) }}{% endfor %} WITH (refresh = false);
|
||||
{% else %}
|
||||
ALTER SUBSCRIPTION {{ conn|qtIdent(o_data.name) }}
|
||||
SET PUBLICATION {% for pub in data.pub %}{% if loop.index != 1 %},{% endif %}{{ conn|qtIdent(pub) }}{% endfor %};
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
{### Alter subscription connection info ###}
|
||||
{% if data.host or data.port or data.username or data.password or data.db or data.connect_timeout or data.passfile %}
|
||||
ALTER SUBSCRIPTION {{ conn|qtIdent(o_data.name) }}
|
||||
CONNECTION 'host={{ o_data.host}} port={{ o_data.port }} user={{ o_data.username }} dbname={{ o_data.db }}{% if data.connect_timeout %} connect_timeout={{ o_data.connect_timeout }}{% endif %}{% if data.passfile %} passfile={{ o_data.passfile }}{% endif %}{% if data.password %} {% if dummy %}password=xxxxxx{% else %} password={{ data.password}}{% endif %}{% endif %}';
|
||||
{% endif %}
|
||||
{### Alter subscription name ###}
|
||||
{% if data.name and data.name != o_data.name %}
|
||||
ALTER SUBSCRIPTION {{ conn|qtIdent(o_data.name) }}
|
||||
RENAME TO {{ conn|qtIdent(data.name) }};
|
||||
|
||||
{% endif %}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -0,0 +1,8 @@
|
||||
-- Subscription: test_alter_subscription
|
||||
|
||||
-- DROP SUBSCRIPTION test_alter_subscription;
|
||||
|
||||
CREATE SUBSCRIPTION test_alter_subscription
|
||||
CONNECTION 'host=localhost port=5432 user=postgres dbname=edb'
|
||||
PUBLICATION sample__1
|
||||
WITH (enabled = false, create_slot = false, slot_name = None, synchronous_commit = 'remote_apply');
|
@ -0,0 +1,8 @@
|
||||
-- Subscription: test_alter_subscription
|
||||
|
||||
-- DROP SUBSCRIPTION test_alter_subscription;
|
||||
|
||||
CREATE SUBSCRIPTION test_alter_subscription
|
||||
CONNECTION 'host=localhost port=5432 user=postgres dbname=postgres'
|
||||
PUBLICATION sample__1
|
||||
WITH (enabled = false, create_slot = false, slot_name = None, synchronous_commit = 'off');
|
@ -0,0 +1,8 @@
|
||||
-- Subscription: test_alter_subscription
|
||||
|
||||
-- DROP SUBSCRIPTION test_alter_subscription;
|
||||
|
||||
CREATE SUBSCRIPTION test_alter_subscription
|
||||
CONNECTION 'host=localhost port=5432 user=postgres dbname=postgres'
|
||||
PUBLICATION sample__1
|
||||
WITH (enabled = false, create_slot = false, slot_name = None, synchronous_commit = 'remote_apply');
|
@ -0,0 +1,8 @@
|
||||
-- Subscription: test_create_subscription
|
||||
|
||||
-- DROP SUBSCRIPTION test_create_subscription;
|
||||
|
||||
CREATE SUBSCRIPTION test_create_subscription
|
||||
CONNECTION 'host=localhost port=5432 user=postgres dbname=postgres'
|
||||
PUBLICATION sample__1
|
||||
WITH (enabled = false, create_slot = false, slot_name = None, synchronous_commit = 'off');
|
@ -0,0 +1,4 @@
|
||||
CREATE SUBSCRIPTION test_create_subscription
|
||||
CONNECTION 'host=localhost port=5432 user=postgres dbname=postgres password=xxxxxx'
|
||||
PUBLICATION sample__1
|
||||
WITH (connect = false, enabled = false, create_slot = false, slot_name = None, synchronous_commit = 'off');
|
@ -0,0 +1,70 @@
|
||||
{
|
||||
"scenarios": [
|
||||
{
|
||||
"type": "create",
|
||||
"name": "Create Subscription",
|
||||
"endpoint": "NODE-subscription.obj",
|
||||
"sql_endpoint": "NODE-subscription.sql_id",
|
||||
"msql_endpoint": "NODE-subscription.msql",
|
||||
"data": {
|
||||
"username": "postgres",
|
||||
"name": "test_create_subscription",
|
||||
"connect": false,
|
||||
"copy_data": false,
|
||||
"create_slot": false,
|
||||
"db": "postgres",
|
||||
"subowner": "postgres",
|
||||
"enabled": false,
|
||||
"host": "localhost",
|
||||
"slot_name": "None",
|
||||
"service": "",
|
||||
"port": 5432,
|
||||
"password": "edb",
|
||||
"sync": "off",
|
||||
"pub": "[\"sample__1\"]"
|
||||
},
|
||||
"expected_sql_file": "create_subscription.sql",
|
||||
"expected_msql_file": "create_subscription_msql.sql"
|
||||
},
|
||||
{
|
||||
"type": "alter",
|
||||
"name": "Alter Subscription",
|
||||
"endpoint": "NODE-subscription.obj_id",
|
||||
"sql_endpoint": "NODE-subscription.sql_id",
|
||||
"data": {
|
||||
"name": "test_alter_subscription"
|
||||
},
|
||||
"expected_sql_file": "alter_subscription.sql"
|
||||
},
|
||||
{
|
||||
"type": "alter",
|
||||
"name": "Alter sync of subscription",
|
||||
"endpoint": "NODE-subscription.obj_id",
|
||||
"sql_endpoint": "NODE-subscription.sql_id",
|
||||
"data": {
|
||||
"sync": "remote_apply"
|
||||
},
|
||||
"expected_sql_file": "alter_sync.sql"
|
||||
},
|
||||
|
||||
{
|
||||
"type": "alter",
|
||||
"name": "Alter maintainance DB in connection string of subscription",
|
||||
"endpoint": "NODE-subscription.obj_id",
|
||||
"sql_endpoint": "NODE-subscription.sql_id",
|
||||
"data": {
|
||||
"db": "edb"
|
||||
},
|
||||
"expected_sql_file": "alter_maintenance_db.sql"
|
||||
},
|
||||
|
||||
{
|
||||
"type": "delete",
|
||||
"name": "Drop subscription",
|
||||
"endpoint": "NODE-subscription.delete_id",
|
||||
"data": {
|
||||
"name": "test_alter_subscription"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
|
||||
|
||||
class RulesTestGenerator(BaseTestGenerator):
|
||||
|
||||
def runTest(self):
|
||||
return []
|
@ -0,0 +1,432 @@
|
||||
{
|
||||
"add_subscription": [
|
||||
{
|
||||
"name": "Create subscription with insert and update",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": true,
|
||||
"test_data": {
|
||||
"username": "postgres",
|
||||
"name": "PLACEHOLDER",
|
||||
"connect": false,
|
||||
"copy_data": false,
|
||||
"create_slot": false,
|
||||
"db": "postgres",
|
||||
"subowner": "postgres",
|
||||
"enabled": false,
|
||||
"host": "localhost",
|
||||
"slot_name": "NONE",
|
||||
"service": "",
|
||||
"port": 5432,
|
||||
"password": "",
|
||||
"sync": "off",
|
||||
"pub": "PLACE_HOLDER"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create subscription for few tables",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": true,
|
||||
"get_publication": true,
|
||||
"test_data": {
|
||||
"username": "postgres",
|
||||
"name": "PLACEHOLDER",
|
||||
"connect": false,
|
||||
"copy_data": false,
|
||||
"create_slot": false,
|
||||
"db": "postgres",
|
||||
"subowner": "postgres",
|
||||
"enabled": false,
|
||||
"host": "localhost",
|
||||
"slot_name": "NONE",
|
||||
"service": "",
|
||||
"port": 5432,
|
||||
"password": "",
|
||||
"sync": "off",
|
||||
"pub": "PLACE_HOLDER"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create a subscription without name",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": false,
|
||||
"without_name": true,
|
||||
"test_data": {
|
||||
"username": "postgres",
|
||||
"name": "PLACEHOLDER",
|
||||
"connect": false,
|
||||
"copy_data": false,
|
||||
"create_slot": false,
|
||||
"db": "postgres",
|
||||
"subowner": "postgres",
|
||||
"enabled": false,
|
||||
"host": "localhost",
|
||||
"slot_name": "NONE",
|
||||
"service": "",
|
||||
"port": 5432,
|
||||
"password": "",
|
||||
"sync": "off",
|
||||
"pub": "PLACE_HOLDER"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while adding a subscription",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": false,
|
||||
"error_creating_subscription": true,
|
||||
"test_data": {
|
||||
"username": "postgres",
|
||||
"name": "PLACEHOLDER",
|
||||
"connect": false,
|
||||
"copy_data": false,
|
||||
"create_slot": false,
|
||||
"db": "postgres",
|
||||
"subowner": "postgres",
|
||||
"enabled": false,
|
||||
"host": "localhost",
|
||||
"slot_name": "NONE",
|
||||
"service": "",
|
||||
"port": 5432,
|
||||
"password": "",
|
||||
"sync": "off",
|
||||
"pub": "PLACE_HOLDER"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False, 'Mocked Internal Server Error ')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Exception while adding a subscription",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": false,
|
||||
"test_data": {
|
||||
"username": "postgres",
|
||||
"name": "PLACEHOLDER",
|
||||
"connect": false,
|
||||
"copy_data": false,
|
||||
"create_slot": false,
|
||||
"db": "postgres",
|
||||
"subowner": "postgres",
|
||||
"enabled": false,
|
||||
"host": "localhost",
|
||||
"slot_name": "NONE",
|
||||
"service": "",
|
||||
"port": 5432,
|
||||
"password": "",
|
||||
"sync": "off",
|
||||
"pub": "PLACE_HOLDER"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(True, True)(False, 'Mocked Internal Server Error ')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
}
|
||||
],
|
||||
"get_subscription": [
|
||||
{
|
||||
"name": "Get a subscription URL",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while fetching a subscription properties",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": false,
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get a subscription properties under database nodes",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": true,
|
||||
"database_nodes": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while fetching a subscription properties under database nodes",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": false,
|
||||
"database_nodes": true,
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get a subscription Node",
|
||||
"url": "/browser/subscription/nodes/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get a subscription Node dependants",
|
||||
"url": "/browser/subscription/dependent/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get a subscription Node dependency",
|
||||
"url": "/browser/subscription/dependency/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while fetching the subscription under the database nodes using wrong database id",
|
||||
"url": "/browser/subscription/nodes/",
|
||||
"is_positive_test": false,
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get all the subscription under the database nodes",
|
||||
"url": "/browser/subscription/nodes/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"database_nodes": true,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get all the subscription under the database nodes using wrong database id",
|
||||
"url": "/browser/subscription/nodes/",
|
||||
"is_positive_test": true,
|
||||
"incorrect_database_id": true,
|
||||
"database_nodes": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while fetching all the subscription under the database nodes using wrong database id",
|
||||
"url": "/browser/subscription/nodes/",
|
||||
"is_positive_test": false,
|
||||
"database_nodes": true,
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
}
|
||||
],
|
||||
"delete_subscription": [
|
||||
{
|
||||
"name": "Delete a subscription URL",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while deleting the subscription",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": false,
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
}
|
||||
],
|
||||
"update_subscription": [
|
||||
{
|
||||
"name": "update a subscription name",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": true,
|
||||
"update_name": true,
|
||||
"test_data": {
|
||||
"name": "PLACE_HOLDER",
|
||||
"id": "PLACE_HOLDER"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "update a port in connection details of subscription",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": true,
|
||||
"owner_subscription": true,
|
||||
"test_data": {
|
||||
"id": "PLACE_HOLDER",
|
||||
"port": "5444"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "update a username in connection details of subscription",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": true,
|
||||
"owner_subscription": true,
|
||||
"test_data": {
|
||||
"id": "PLACE_HOLDER",
|
||||
"username": "sample_username"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while fetching a subscription to update",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": false,
|
||||
"mocking_required": true,
|
||||
"test_data": {
|
||||
"name": "PLACE_HOLDER",
|
||||
"id": "PLACE_HOLDER"
|
||||
},
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while fetching a subscription to update using wrong subscription id",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": true,
|
||||
"wrong_subscription_id": true,
|
||||
"mocking_required": false,
|
||||
"test_data": {
|
||||
"id": "PLACE_HOLDER"
|
||||
},
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
}
|
||||
],
|
||||
"delete_multiple_subscription": [
|
||||
{
|
||||
"name": "Delete multiple subscription",
|
||||
"url": "/browser/subscription/obj/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
}
|
||||
],
|
||||
"sql_subscription": [
|
||||
{
|
||||
"name": "Fetch the subscription SQL",
|
||||
"url": "/browser/subscription/sql/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Error while fetching a subscription SQL",
|
||||
"url": "/browser/subscription/sql/",
|
||||
"is_positive_test": false,
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Fetching a subscription mSQL",
|
||||
"url": "/browser/subscription/msql/",
|
||||
"is_positive_test": true,
|
||||
"mocking_required": false,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False, 'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 200
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
@ -0,0 +1,97 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as subscription_utils
|
||||
|
||||
|
||||
class SubscriptionAddTestCase(BaseTestGenerator):
|
||||
"""This class will add new subscription"""
|
||||
scenarios = utils.generate_scenarios('add_subscription',
|
||||
subscription_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
self.server_version = schema_info["server_version"]
|
||||
if self.server_version < 99999:
|
||||
self.skipTest(
|
||||
"Logical replication is not supported "
|
||||
"for server version less than 10"
|
||||
|
||||
)
|
||||
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to add a subscription.")
|
||||
|
||||
if self.is_positive_test and hasattr(self, 'few_tables_11'):
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
self.test_data['pubtable'] = subscription_utils.get_tables(self)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will subscription."""
|
||||
self.test_data['name'] = \
|
||||
"test_subscription_add_%s" % (str(uuid.uuid4())[1:8])
|
||||
|
||||
self.test_data['pub'] = """["sample__1"]"""
|
||||
|
||||
data = self.test_data
|
||||
if self.is_positive_test:
|
||||
response = self.create_subscription()
|
||||
else:
|
||||
if hasattr(self, 'without_name'):
|
||||
del data["name"]
|
||||
response = self.create_subscription()
|
||||
elif hasattr(self, 'error_creating_subscription'):
|
||||
with patch(self.mock_data["function_name"],
|
||||
return_value=eval(self.mock_data["return_value"])):
|
||||
response = self.create_subscription()
|
||||
else:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=self.mock_data["return_value"]):
|
||||
response = self.create_subscription()
|
||||
self.assertEqual(response.status_code,
|
||||
self.expected_data["status_code"])
|
||||
|
||||
def create_subscription(self):
|
||||
return self.tester.post(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(
|
||||
self.db_id) + '/',
|
||||
data=json.dumps(self.test_data),
|
||||
content_type='html/json')
|
||||
|
||||
def tearDown(self):
|
||||
if self.is_positive_test:
|
||||
subscription_utils.delete_subscription(self.server, self.db_name,
|
||||
self.test_data['name'])
|
||||
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,93 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as subscription_utils
|
||||
|
||||
|
||||
class SubscriptionDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will delete subscription."""
|
||||
scenarios = utils.generate_scenarios('delete_subscription',
|
||||
subscription_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
self.server_version = schema_info["server_version"]
|
||||
if self.server_version < 99999:
|
||||
self.skipTest(
|
||||
"Logical replication is not supported "
|
||||
"for server version less than 10"
|
||||
|
||||
)
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to delete subscription.")
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to delete "
|
||||
"subscription.")
|
||||
self.subscription_name = "test_subscription_delete_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
|
||||
self.subscription_id = subscription_utils.create_subscription(
|
||||
self.server,
|
||||
self.db_name,
|
||||
self.subscription_name)
|
||||
|
||||
def delete_subscription(self):
|
||||
return self.tester.delete(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(self.db_id) +
|
||||
'/' + str(self.subscription_id),
|
||||
follow_redirects=True)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete subscription"""
|
||||
subscription_response = subscription_utils.verify_subscription(
|
||||
self.server,
|
||||
self.db_name,
|
||||
self.subscription_name)
|
||||
if not subscription_response:
|
||||
raise Exception("Could not find the subscription to delete.")
|
||||
|
||||
if self.is_positive_test:
|
||||
if hasattr(self, "invalid_subscription_id"):
|
||||
self.subscription_id = 9999
|
||||
response = self.delete_subscription()
|
||||
else:
|
||||
with patch(self.mock_data["function_name"],
|
||||
return_value=eval(self.mock_data["return_value"])):
|
||||
response = self.delete_subscription()
|
||||
|
||||
self.assertEqual(response.status_code,
|
||||
self.expected_data["status_code"])
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
subscription_utils.delete_subscription(self.server, self.db_name,
|
||||
self.subscription_name)
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,106 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
import json
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as subscription_utils
|
||||
|
||||
|
||||
class SubscriptionDeleteTestCases(BaseTestGenerator):
|
||||
"""This class will delete subscription."""
|
||||
|
||||
scenarios = utils.generate_scenarios('delete_multiple_subscription',
|
||||
subscription_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
self.server_version = schema_info["server_version"]
|
||||
if self.server_version < 99999:
|
||||
self.skipTest(
|
||||
"Logical replication is not supported "
|
||||
"for server version less than 10"
|
||||
|
||||
)
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to delete subscription.")
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to "
|
||||
"delete subscription.")
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
self.subscription_name = "test_subscription_delete_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.subscription_name_1 = "test_subscription_delete_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.subscription_ids = [
|
||||
subscription_utils.create_subscription(self.server, self.db_name,
|
||||
self.subscription_name),
|
||||
subscription_utils.create_subscription(self.server, self.db_name,
|
||||
self.subscription_name_1),
|
||||
]
|
||||
|
||||
def delete_multiple_subscription(self, data):
|
||||
return self.tester.delete(
|
||||
"{0}{1}/{2}/{3}/".format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id
|
||||
),
|
||||
follow_redirects=True,
|
||||
data=json.dumps(data),
|
||||
content_type='html/json'
|
||||
)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete subscription."""
|
||||
subscription_response = subscription_utils.verify_subscription(
|
||||
self.server,
|
||||
self.db_name,
|
||||
self.subscription_name)
|
||||
if not subscription_response:
|
||||
raise Exception("Could not find the subscription to delete.")
|
||||
|
||||
subscription_response = subscription_utils.verify_subscription(
|
||||
self.server,
|
||||
self.db_name,
|
||||
self.subscription_name_1)
|
||||
if not subscription_response:
|
||||
raise Exception("Could not find the subscription to delete.")
|
||||
|
||||
data = {'ids': self.subscription_ids}
|
||||
if self.is_positive_test:
|
||||
response = self.delete_multiple_subscription(data)
|
||||
self.assertEqual(response.status_code,
|
||||
self.expected_data["status_code"])
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,99 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as subscription_utils
|
||||
|
||||
|
||||
class SubscriptionGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the subscription under table node."""
|
||||
scenarios = utils.generate_scenarios('get_subscription',
|
||||
subscription_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
self.server_version = schema_info["server_version"]
|
||||
if self.server_version < 99999:
|
||||
self.skipTest(
|
||||
"Logical replication is not supported "
|
||||
"for server version less than 10"
|
||||
|
||||
)
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to "
|
||||
"delete subscription.")
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to delete "
|
||||
"subscription.")
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
self.subscription_name = "test_subscription_delete_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.subscription_id = subscription_utils.create_subscription(
|
||||
self.server, self.db_name,
|
||||
self.subscription_name)
|
||||
|
||||
def get_subscription(self):
|
||||
return self.tester.get(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' + str(
|
||||
self.server_id) + '/' +
|
||||
str(self.db_id) + '/' + str(self.subscription_id),
|
||||
content_type='html/json')
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the subscription."""
|
||||
|
||||
if self.is_positive_test:
|
||||
if hasattr(self, "database_nodes"):
|
||||
self.subscription_id = ''
|
||||
response = self.get_subscription()
|
||||
else:
|
||||
response = self.get_subscription()
|
||||
else:
|
||||
if hasattr(self, "database_nodes"):
|
||||
self.subscription_id = ''
|
||||
with patch(self.mock_data["function_name"],
|
||||
return_value=eval(self.mock_data["return_value"])):
|
||||
response = self.get_subscription()
|
||||
else:
|
||||
with patch(self.mock_data["function_name"],
|
||||
return_value=eval(self.mock_data["return_value"])):
|
||||
response = self.get_subscription()
|
||||
|
||||
self.assertEqual(response.status_code,
|
||||
self.expected_data["status_code"])
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
subscription_utils.delete_subscription(self.server, self.db_name,
|
||||
self.subscription_name)
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,112 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as subscription_utils
|
||||
|
||||
|
||||
class SubscriptionUpdateTestCase(BaseTestGenerator):
|
||||
"""This class will update the subscription."""
|
||||
scenarios = utils.generate_scenarios('update_subscription',
|
||||
subscription_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
self.server_version = schema_info["server_version"]
|
||||
if self.server_version < 99999:
|
||||
self.skipTest(
|
||||
"Logical replication is not supported "
|
||||
"for server version less than 10"
|
||||
|
||||
)
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to delete subscription.")
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to delete "
|
||||
"subscription.")
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
self.subscription_name = "test_subscription_update_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.subscription_id = \
|
||||
subscription_utils.create_subscription(self.server, self.db_name,
|
||||
self.subscription_name)
|
||||
|
||||
def update_subscription(self, data):
|
||||
return self.tester.put(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(
|
||||
self.db_id) +
|
||||
'/' + str(self.subscription_id),
|
||||
data=json.dumps(data),
|
||||
follow_redirects=True)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will update the subscription."""
|
||||
subscription_name = \
|
||||
subscription_utils.verify_subscription(self.server, self.db_name,
|
||||
self.subscription_name)
|
||||
if hasattr(self, "update_name"):
|
||||
self.subscription_name = "test_subscription_update_2_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.test_data['name'] = self.subscription_name
|
||||
else:
|
||||
self.test_data['name'] = self.subscription_name
|
||||
self.test_data['id'] = self.subscription_id
|
||||
|
||||
if not subscription_name:
|
||||
raise Exception("Could not find the subscription to update.")
|
||||
|
||||
if self.is_positive_test:
|
||||
if hasattr(self, "wrong_subscription_id"):
|
||||
self.subscription_id = 9999
|
||||
if hasattr(self, "plid_none"):
|
||||
self.subscription_id = ''
|
||||
response = self.update_subscription(self.test_data)
|
||||
else:
|
||||
with patch(self.mock_data["function_name"],
|
||||
return_value=eval(self.mock_data["return_value"])):
|
||||
if hasattr(self, "wrong_subscription_id"):
|
||||
self.subscription_id = 9999
|
||||
response = self.update_subscription(self.test_data)
|
||||
|
||||
self.assertEqual(response.status_code,
|
||||
self.expected_data["status_code"])
|
||||
|
||||
def tearDown(self):
|
||||
|
||||
# Disconnect the database
|
||||
subscription_utils.delete_subscription(self.server, self.db_name,
|
||||
self.subscription_name)
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,88 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as subscription_utils
|
||||
|
||||
|
||||
class SubscriptionGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the subscription under table node."""
|
||||
scenarios = utils.generate_scenarios('sql_subscription',
|
||||
subscription_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
self.server_version = schema_info["server_version"]
|
||||
if self.server_version < 99999:
|
||||
self.skipTest(
|
||||
"Logical replication is not supported "
|
||||
"for server version less than 10"
|
||||
|
||||
)
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to delete subscription.")
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to delete "
|
||||
"subscription.")
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
self.subscription_name = "test_subscription_delete_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.subscription_id = \
|
||||
subscription_utils.create_subscription(self.server, self.db_name,
|
||||
self.subscription_name)
|
||||
|
||||
def get_sql(self):
|
||||
return self.tester.get(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' + str(
|
||||
self.server_id) + '/' +
|
||||
str(self.db_id) + '/' + str(self.subscription_id),
|
||||
content_type='html/json')
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the subscription under table node."""
|
||||
|
||||
if self.is_positive_test:
|
||||
response = self.get_sql()
|
||||
else:
|
||||
with patch(self.mock_data["function_name"],
|
||||
return_value=eval(self.mock_data["return_value"])):
|
||||
response = self.get_sql()
|
||||
|
||||
self.assertEqual(response.status_code,
|
||||
self.expected_data["status_code"])
|
||||
|
||||
def tearDown(self):
|
||||
subscription_utils.delete_subscription(self.server, self.db_name,
|
||||
self.subscription_name)
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,157 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import traceback
|
||||
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
||||
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
with open(CURRENT_PATH + "/subscription_test_data.json") as data_file:
|
||||
test_cases = json.load(data_file)
|
||||
|
||||
|
||||
def get_tables(self):
|
||||
tables = self.tester.get(
|
||||
'/browser/subscription/get_tables/' + str(
|
||||
utils.SERVER_GROUP) + '/' + str(
|
||||
self.server_id) + '/' +
|
||||
str(self.db_id) + '/',
|
||||
content_type='html/json')
|
||||
return json.dumps([tables.json['data'][1]['value']])
|
||||
|
||||
|
||||
def create_subscription_api(self):
|
||||
return self.tester.post(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(
|
||||
self.db_id) + '/',
|
||||
data=json.dumps(self.test_data),
|
||||
content_type='html/json')
|
||||
|
||||
|
||||
def create_subscription(server, db_name, subscription_name):
|
||||
"""
|
||||
This function creates a subscription.
|
||||
:param server: server details
|
||||
:type server: dict
|
||||
:param db_name: database name
|
||||
:type db_name: str
|
||||
:param subscription_name: subscription name
|
||||
:type subscription_name: str
|
||||
:return subscription_id: subscription id
|
||||
:rtype: int
|
||||
"""
|
||||
try:
|
||||
connection = utils.get_db_connection(db_name,
|
||||
server['username'],
|
||||
server['db_password'],
|
||||
server['host'],
|
||||
server['port'],
|
||||
server['sslmode'])
|
||||
old_isolation_level = connection.isolation_level
|
||||
connection.set_isolation_level(0)
|
||||
pg_cursor = connection.cursor()
|
||||
query = """CREATE SUBSCRIPTION "%s" """ \
|
||||
"""CONNECTION 'host=192.168.1.50 port=5432 user=foo """ \
|
||||
"""dbname=foodb' """ \
|
||||
"""PUBLICATION insert_only WITH (create_slot = false, """ \
|
||||
"""enabled = false, slot_name=NONE, connect=false);""" % (
|
||||
subscription_name)
|
||||
pg_cursor.execute(query)
|
||||
connection.set_isolation_level(old_isolation_level)
|
||||
connection.commit()
|
||||
# Get role oid of newly added subscription
|
||||
pg_cursor.execute("select oid from pg_subscription sub where "
|
||||
"sub.subname='%s'" %
|
||||
subscription_name)
|
||||
subscription = pg_cursor.fetchone()
|
||||
subscription_id = ''
|
||||
if subscription:
|
||||
subscription_id = subscription[0]
|
||||
connection.close()
|
||||
return subscription_id
|
||||
except Exception:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
raise
|
||||
|
||||
|
||||
def verify_subscription(server, db_name, subscription_name):
|
||||
"""
|
||||
This function verifies subscription exist in database or not.
|
||||
:param server: server details
|
||||
:type server: dict
|
||||
:param db_name: database name
|
||||
:type db_name: str
|
||||
:param subscription_name: subscription name
|
||||
:type subscription_name: str
|
||||
:return subscription: subscription record from database
|
||||
:rtype: tuple
|
||||
"""
|
||||
try:
|
||||
connection = utils.get_db_connection(db_name,
|
||||
server['username'],
|
||||
server['db_password'],
|
||||
server['host'],
|
||||
server['port'],
|
||||
server['sslmode'])
|
||||
pg_cursor = connection.cursor()
|
||||
pg_cursor.execute("select * from pg_subscription sub "
|
||||
"where sub.subname='%s'" %
|
||||
subscription_name)
|
||||
subscription = pg_cursor.fetchone()
|
||||
connection.close()
|
||||
return subscription
|
||||
except Exception:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
raise
|
||||
|
||||
|
||||
def delete_subscription(server, db_name, subscription_name):
|
||||
"""
|
||||
This function use to delete the existing subscription
|
||||
|
||||
:param db_name: db_name
|
||||
:type db_name: db_name object
|
||||
:param server: server
|
||||
:type server: server object
|
||||
:param subscription_name: subscription name
|
||||
:type subscription_name: str
|
||||
:return: None
|
||||
"""
|
||||
|
||||
try:
|
||||
connection = utils.get_db_connection(db_name,
|
||||
server['username'],
|
||||
server['db_password'],
|
||||
server['host'],
|
||||
server['port'],
|
||||
server['sslmode'])
|
||||
pg_cursor = connection.cursor()
|
||||
|
||||
pg_cursor.execute("select * from pg_subscription sub where "
|
||||
"sub.subname='%s'" %
|
||||
subscription_name)
|
||||
subscription_count = pg_cursor.fetchone()
|
||||
if subscription_count:
|
||||
old_isolation_level = connection.isolation_level
|
||||
connection.set_isolation_level(0)
|
||||
pg_cursor = connection.cursor()
|
||||
query = "DROP subscription %s" % \
|
||||
(subscription_name)
|
||||
pg_cursor.execute(query)
|
||||
connection.set_isolation_level(old_isolation_level)
|
||||
connection.commit()
|
||||
connection.close()
|
||||
except Exception:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
raise
|
@ -0,0 +1,79 @@
|
||||
SELECT DISTINCT dep.deptype, dep.classid, cl.relkind, ad.adbin, ad.adsrc,
|
||||
CASE WHEN cl.relkind IS NOT NULL THEN CASE WHEN cl.relkind = 'r' THEN cl.relkind || COALESCE(dep.objsubid::text, '') ELSE cl.relkind END
|
||||
WHEN tg.oid IS NOT NULL THEN 'Tr'::text
|
||||
WHEN ty.oid IS NOT NULL THEN CASE WHEN ty.typtype = 'd' THEN 'd'::text ELSE 'Ty'::text END
|
||||
WHEN ns.oid IS NOT NULL THEN 'n'::text
|
||||
WHEN pr.oid IS NOT NULL AND (prtyp.typname = 'trigger' OR prtyp.typname = 'event_trigger') THEN 'Pt'::text
|
||||
WHEN pr.oid IS NOT NULL THEN 'Pf'::text
|
||||
WHEN la.oid IS NOT NULL THEN 'l'::text
|
||||
WHEN rw.oid IS NOT NULL THEN 'Rl'::text
|
||||
WHEN co.oid IS NOT NULL THEN CASE WHEN co.contypid > 0 THEN 'Cd' ELSE 'C'::text || contype END
|
||||
WHEN ad.oid IS NOT NULL THEN 'A'::text
|
||||
WHEN fs.oid IS NOT NULL THEN 'Fs'::text
|
||||
WHEN fdw.oid IS NOT NULL THEN 'Fw'::text
|
||||
WHEN evt.oid IS NOT NULL THEN 'Et'::text
|
||||
WHEN col.oid IS NOT NULL THEN 'Co'::text
|
||||
WHEN ftsc.oid IS NOT NULL THEN 'Fc'::text
|
||||
WHEN ftsp.oid IS NOT NULL THEN 'Fp'::text
|
||||
WHEN ftsd.oid IS NOT NULL THEN 'Fd'::text
|
||||
WHEN ftst.oid IS NOT NULL THEN 'Ft'::text
|
||||
WHEN ext.oid IS NOT NULL THEN 'Ex'::text
|
||||
WHEN pl.oid IS NOT NULL THEN 'Rs'::text
|
||||
WHEN pub_rel.oid IS NOT NULL THEN 'r'::text
|
||||
ELSE ''
|
||||
END AS type,
|
||||
COALESCE(coc.relname, clrw.relname) AS ownertable,
|
||||
CASE WHEN cl.relname IS NOT NULL AND att.attname IS NOT NULL THEN cl.relname || COALESCE('.' || att.attname, '')
|
||||
ELSE COALESCE(cl.relname, co.conname, pr.proname, tg.tgname, ty.typname, la.lanname, rw.rulename, ns.nspname,
|
||||
fs.srvname, fdw.fdwname, evt.evtname, col.collname, ftsc.cfgname, ftsd.dictname, ftsp.prsname,
|
||||
ftst.tmplname, ext.extname, pl.polname, quote_ident(pubns.nspname)||'.'||quote_ident(pubcl.relname))
|
||||
END AS refname,
|
||||
COALESCE(nsc.nspname, nso.nspname, nsp.nspname, nst.nspname, nsrw.nspname, colns.nspname, ftscns.nspname,
|
||||
ftsdns.nspname, ftspns.nspname, ftstns.nspname) AS nspname,
|
||||
CASE WHEN inhits.inhparent IS NOT NULL THEN '1' ELSE '0' END AS is_inherits,
|
||||
CASE WHEN inhed.inhparent IS NOT NULL THEN '1' ELSE '0' END AS is_inherited
|
||||
FROM pg_depend dep
|
||||
LEFT JOIN pg_class cl ON dep.objid=cl.oid
|
||||
LEFT JOIN pg_attribute att ON dep.objid=att.attrelid AND dep.objsubid=att.attnum
|
||||
LEFT JOIN pg_namespace nsc ON cl.relnamespace=nsc.oid
|
||||
LEFT JOIN pg_proc pr ON dep.objid=pr.oid
|
||||
LEFT JOIN pg_namespace nsp ON pr.pronamespace=nsp.oid
|
||||
LEFT JOIN pg_trigger tg ON dep.objid=tg.oid
|
||||
LEFT JOIN pg_type ty ON dep.objid=ty.oid
|
||||
LEFT JOIN pg_namespace nst ON ty.typnamespace=nst.oid
|
||||
LEFT JOIN pg_constraint co ON dep.objid=co.oid
|
||||
LEFT JOIN pg_class coc ON co.conrelid=coc.oid
|
||||
LEFT JOIN pg_namespace nso ON co.connamespace=nso.oid
|
||||
LEFT JOIN pg_rewrite rw ON dep.objid=rw.oid
|
||||
LEFT JOIN pg_class clrw ON clrw.oid=rw.ev_class
|
||||
LEFT JOIN pg_namespace nsrw ON clrw.relnamespace=nsrw.oid
|
||||
LEFT JOIN pg_language la ON dep.objid=la.oid
|
||||
LEFT JOIN pg_namespace ns ON dep.objid=ns.oid
|
||||
LEFT JOIN pg_attrdef ad ON ad.oid=dep.objid
|
||||
LEFT JOIN pg_foreign_server fs ON fs.oid=dep.objid
|
||||
LEFT JOIN pg_foreign_data_wrapper fdw ON fdw.oid=dep.objid
|
||||
LEFT JOIN pg_type prtyp ON prtyp.oid = pr.prorettype
|
||||
LEFT JOIN pg_inherits inhits ON (inhits.inhrelid=dep.objid)
|
||||
LEFT JOIN pg_inherits inhed ON (inhed.inhparent=dep.objid)
|
||||
LEFT JOIN pg_event_trigger evt ON evt.oid=dep.objid
|
||||
LEFT JOIN pg_collation col ON col.oid=dep.objid
|
||||
LEFT JOIN pg_namespace colns ON col.collnamespace=colns.oid
|
||||
LEFT JOIN pg_ts_config ftsc ON ftsc.oid=dep.objid
|
||||
LEFT JOIN pg_namespace ftscns ON ftsc.cfgnamespace=ftscns.oid
|
||||
LEFT JOIN pg_ts_dict ftsd ON ftsd.oid=dep.objid
|
||||
LEFT JOIN pg_namespace ftsdns ON ftsd.dictnamespace=ftsdns.oid
|
||||
LEFT JOIN pg_ts_parser ftsp ON ftsp.oid=dep.objid
|
||||
LEFT JOIN pg_namespace ftspns ON ftsp.prsnamespace=ftspns.oid
|
||||
LEFT JOIN pg_ts_template ftst ON ftst.oid=dep.objid
|
||||
LEFT JOIN pg_namespace ftstns ON ftst.tmplnamespace=ftstns.oid
|
||||
LEFT JOIN pg_extension ext ON ext.oid=dep.objid
|
||||
LEFT JOIN pg_policy pl ON pl.oid=dep.objid
|
||||
LEFT JOIN pg_publication_rel pub_rel ON pub_rel.oid = dep.objid
|
||||
LEFT JOIN pg_class pubcl ON pubcl.oid = pub_rel.prrelid
|
||||
LEFT JOIN pg_namespace pubns ON pubns.oid=pubcl.relnamespace
|
||||
{{where_clause}} AND
|
||||
classid IN ( SELECT oid FROM pg_class WHERE relname IN
|
||||
('pg_class', 'pg_constraint', 'pg_conversion', 'pg_language', 'pg_proc', 'pg_rewrite', 'pg_namespace',
|
||||
'pg_trigger', 'pg_type', 'pg_attrdef', 'pg_event_trigger', 'pg_foreign_server', 'pg_foreign_data_wrapper',
|
||||
'pg_collation', 'pg_ts_config', 'pg_ts_dict', 'pg_ts_parser', 'pg_ts_template', 'pg_extension', 'pg_policy', 'pg_subscription', 'pg_publication_rel'))
|
||||
ORDER BY classid, cl.relkind
|
@ -19,13 +19,14 @@ SELECT DISTINCT dep.deptype, dep.classid, cl.relkind, ad.adbin, ad.adsrc,
|
||||
WHEN ftst.oid IS NOT NULL THEN 'Ft'::text
|
||||
WHEN ext.oid IS NOT NULL THEN 'Ex'::text
|
||||
WHEN pl.oid IS NOT NULL THEN 'Rs'::text
|
||||
WHEN pub_rel.oid IS NOT NULL THEN 'r'::text
|
||||
ELSE ''
|
||||
END AS type,
|
||||
COALESCE(coc.relname, clrw.relname) AS ownertable,
|
||||
CASE WHEN cl.relname IS NOT NULL AND att.attname IS NOT NULL THEN cl.relname || COALESCE('.' || att.attname, '')
|
||||
ELSE COALESCE(cl.relname, co.conname, pr.proname, tg.tgname, ty.typname, la.lanname, rw.rulename, ns.nspname,
|
||||
fs.srvname, fdw.fdwname, evt.evtname, col.collname, ftsc.cfgname, ftsd.dictname, ftsp.prsname,
|
||||
ftst.tmplname, ext.extname, pl.polname)
|
||||
ftst.tmplname, ext.extname, pl.polname, quote_ident(pubns.nspname)||'.'||quote_ident(pubcl.relname))
|
||||
END AS refname,
|
||||
COALESCE(nsc.nspname, nso.nspname, nsp.nspname, nst.nspname, nsrw.nspname, colns.nspname, ftscns.nspname,
|
||||
ftsdns.nspname, ftspns.nspname, ftstns.nspname) AS nspname,
|
||||
@ -67,9 +68,12 @@ LEFT JOIN pg_ts_template ftst ON ftst.oid=dep.objid
|
||||
LEFT JOIN pg_namespace ftstns ON ftst.tmplnamespace=ftstns.oid
|
||||
LEFT JOIN pg_extension ext ON ext.oid=dep.objid
|
||||
LEFT JOIN pg_policy pl ON pl.oid=dep.objid
|
||||
LEFT JOIN pg_publication_rel pub_rel ON pub_rel.oid = dep.objid
|
||||
LEFT JOIN pg_class pubcl ON pubcl.oid = pub_rel.prrelid
|
||||
LEFT JOIN pg_namespace pubns ON pubns.oid=pubcl.relnamespace
|
||||
{{where_clause}} AND
|
||||
classid IN ( SELECT oid FROM pg_class WHERE relname IN
|
||||
('pg_class', 'pg_constraint', 'pg_conversion', 'pg_language', 'pg_proc', 'pg_rewrite', 'pg_namespace',
|
||||
'pg_trigger', 'pg_type', 'pg_attrdef', 'pg_event_trigger', 'pg_foreign_server', 'pg_foreign_data_wrapper',
|
||||
'pg_collation', 'pg_ts_config', 'pg_ts_dict', 'pg_ts_parser', 'pg_ts_template', 'pg_extension', 'pg_policy'))
|
||||
'pg_collation', 'pg_ts_config', 'pg_ts_dict', 'pg_ts_parser', 'pg_ts_template', 'pg_extension', 'pg_policy', 'pg_subscription', 'pg_publication_rel'))
|
||||
ORDER BY classid, cl.relkind
|
||||
|
@ -19,13 +19,14 @@ SELECT DISTINCT dep.deptype, dep.classid, cl.relkind, ad.adbin, pg_get_expr(ad.a
|
||||
WHEN ftst.oid IS NOT NULL THEN 'Ft'::text
|
||||
WHEN ext.oid IS NOT NULL THEN 'Ex'::text
|
||||
WHEN pl.oid IS NOT NULL THEN 'Rs'::text
|
||||
WHEN pub_rel.oid IS NOT NULL THEN 'r'::text
|
||||
ELSE ''
|
||||
END AS type,
|
||||
COALESCE(coc.relname, clrw.relname) AS ownertable,
|
||||
CASE WHEN cl.relname IS NOT NULL AND att.attname IS NOT NULL THEN cl.relname || COALESCE('.' || att.attname, '')
|
||||
ELSE COALESCE(cl.relname, co.conname, pr.proname, tg.tgname, ty.typname, la.lanname, rw.rulename, ns.nspname,
|
||||
fs.srvname, fdw.fdwname, evt.evtname, col.collname, ftsc.cfgname, ftsd.dictname, ftsp.prsname,
|
||||
ftst.tmplname, ext.extname, pl.polname)
|
||||
ftst.tmplname, ext.extname, pl.polname, quote_ident(pubns.nspname)||'.'||quote_ident(pubcl.relname))
|
||||
END AS refname,
|
||||
COALESCE(nsc.nspname, nso.nspname, nsp.nspname, nst.nspname, nsrw.nspname, colns.nspname, ftscns.nspname,
|
||||
ftsdns.nspname, ftspns.nspname, ftstns.nspname) AS nspname,
|
||||
@ -67,9 +68,12 @@ LEFT JOIN pg_ts_template ftst ON ftst.oid=dep.objid
|
||||
LEFT JOIN pg_namespace ftstns ON ftst.tmplnamespace=ftstns.oid
|
||||
LEFT JOIN pg_extension ext ON ext.oid=dep.objid
|
||||
LEFT JOIN pg_policy pl ON pl.oid=dep.objid
|
||||
LEFT JOIN pg_publication_rel pub_rel ON pub_rel.oid = dep.objid
|
||||
LEFT JOIN pg_class pubcl ON pubcl.oid = pub_rel.prrelid
|
||||
LEFT JOIN pg_namespace pubns ON pubns.oid=pubcl.relnamespace
|
||||
{{where_clause}} AND
|
||||
classid IN ( SELECT oid FROM pg_class WHERE relname IN
|
||||
('pg_class', 'pg_constraint', 'pg_conversion', 'pg_language', 'pg_proc', 'pg_rewrite', 'pg_namespace',
|
||||
'pg_trigger', 'pg_type', 'pg_attrdef', 'pg_event_trigger', 'pg_foreign_server', 'pg_foreign_data_wrapper',
|
||||
'pg_collation', 'pg_ts_config', 'pg_ts_dict', 'pg_ts_parser', 'pg_ts_template', 'pg_extension', 'pg_policy'))
|
||||
'pg_collation', 'pg_ts_config', 'pg_ts_dict', 'pg_ts_parser', 'pg_ts_template', 'pg_extension', 'pg_policy', 'pg_subscription', 'pg_publication_rel'))
|
||||
ORDER BY classid, cl.relkind
|
||||
|
@ -0,0 +1,82 @@
|
||||
SELECT DISTINCT dep.deptype, dep.classid, cl.relkind, ad.adbin, ad.adsrc,
|
||||
CASE WHEN cl.relkind IS NOT NULL THEN CASE WHEN cl.relkind = 'r' THEN cl.relkind || COALESCE(dep.objsubid::text, '') ELSE cl.relkind END
|
||||
WHEN tg.oid IS NOT NULL THEN 'Tr'::text
|
||||
WHEN ty.oid IS NOT NULL THEN CASE WHEN ty.typtype = 'd' THEN 'd'::text ELSE 'Ty'::text END
|
||||
WHEN ns.oid IS NOT NULL THEN CASE WHEN ns.nspparent != 0 THEN 'Pa'::text ELSE 'n'::text END
|
||||
WHEN pr.oid IS NOT NULL AND (prtyp.typname = 'trigger' OR prtyp.typname = 'event_trigger') THEN 'Pt'::text
|
||||
WHEN pr.oid IS NOT NULL THEN 'Pf'::text
|
||||
WHEN la.oid IS NOT NULL THEN 'l'::text
|
||||
WHEN rw.oid IS NOT NULL THEN 'Rl'::text
|
||||
WHEN co.oid IS NOT NULL THEN CASE WHEN co.contypid > 0 THEN 'Cd' ELSE 'C'::text || contype END
|
||||
WHEN ad.oid IS NOT NULL THEN 'A'::text
|
||||
WHEN fs.oid IS NOT NULL THEN 'Fs'::text
|
||||
WHEN fdw.oid IS NOT NULL THEN 'Fw'::text
|
||||
WHEN evt.oid IS NOT NULL THEN 'Et'::text
|
||||
WHEN col.oid IS NOT NULL THEN 'Co'::text
|
||||
WHEN ftsc.oid IS NOT NULL THEN 'Fc'::text
|
||||
WHEN ftsp.oid IS NOT NULL THEN 'Fp'::text
|
||||
WHEN ftsd.oid IS NOT NULL THEN 'Fd'::text
|
||||
WHEN ftst.oid IS NOT NULL THEN 'Ft'::text
|
||||
WHEN ext.oid IS NOT NULL THEN 'Ex'::text
|
||||
WHEN syn.oid IS NOT NULL THEN 'Sy'::text
|
||||
WHEN pl.oid IS NOT NULL THEN 'Rs'::text
|
||||
WHEN pub_rel.oid IS NOT NULL THEN 'r'::text
|
||||
ELSE ''
|
||||
END AS type,
|
||||
COALESCE(coc.relname, clrw.relname) AS ownertable,
|
||||
CASE WHEN cl.relname IS NOT NULL AND att.attname IS NOT NULL THEN cl.relname || COALESCE('.' || att.attname, '')
|
||||
ELSE COALESCE(cl.relname, co.conname, pr.proname, tg.tgname, ty.typname, la.lanname, rw.rulename, ns.nspname,
|
||||
fs.srvname, fdw.fdwname, evt.evtname, col.collname, ftsc.cfgname, ftsd.dictname, ftsp.prsname,
|
||||
ftst.tmplname, ext.extname, syn.synname, pl.polname, quote_ident(pubns.nspname)||'.'||quote_ident(pubcl.relname))
|
||||
END AS refname,
|
||||
COALESCE(nsc.nspname, nso.nspname, nsp.nspname, nst.nspname, nsrw.nspname, colns.nspname, ftscns.nspname,
|
||||
ftsdns.nspname, ftspns.nspname, ftstns.nspname, synns.nspname) AS nspname,
|
||||
CASE WHEN inhits.inhparent IS NOT NULL THEN '1' ELSE '0' END AS is_inherits,
|
||||
CASE WHEN inhed.inhparent IS NOT NULL THEN '1' ELSE '0' END AS is_inherited
|
||||
FROM pg_depend dep
|
||||
LEFT JOIN pg_class cl ON dep.objid=cl.oid
|
||||
LEFT JOIN pg_attribute att ON dep.objid=att.attrelid AND dep.objsubid=att.attnum
|
||||
LEFT JOIN pg_namespace nsc ON cl.relnamespace=nsc.oid
|
||||
LEFT JOIN pg_proc pr ON dep.objid=pr.oid
|
||||
LEFT JOIN pg_namespace nsp ON pr.pronamespace=nsp.oid
|
||||
LEFT JOIN pg_trigger tg ON dep.objid=tg.oid
|
||||
LEFT JOIN pg_type ty ON dep.objid=ty.oid
|
||||
LEFT JOIN pg_namespace nst ON ty.typnamespace=nst.oid
|
||||
LEFT JOIN pg_constraint co ON dep.objid=co.oid
|
||||
LEFT JOIN pg_class coc ON co.conrelid=coc.oid
|
||||
LEFT JOIN pg_namespace nso ON co.connamespace=nso.oid
|
||||
LEFT JOIN pg_rewrite rw ON dep.objid=rw.oid
|
||||
LEFT JOIN pg_class clrw ON clrw.oid=rw.ev_class
|
||||
LEFT JOIN pg_namespace nsrw ON clrw.relnamespace=nsrw.oid
|
||||
LEFT JOIN pg_language la ON dep.objid=la.oid
|
||||
LEFT JOIN pg_namespace ns ON dep.objid=ns.oid
|
||||
LEFT JOIN pg_attrdef ad ON ad.oid=dep.objid
|
||||
LEFT JOIN pg_foreign_server fs ON fs.oid=dep.objid
|
||||
LEFT JOIN pg_foreign_data_wrapper fdw ON fdw.oid=dep.objid
|
||||
LEFT JOIN pg_type prtyp ON prtyp.oid = pr.prorettype
|
||||
LEFT JOIN pg_inherits inhits ON (inhits.inhrelid=dep.objid)
|
||||
LEFT JOIN pg_inherits inhed ON (inhed.inhparent=dep.objid)
|
||||
LEFT JOIN pg_event_trigger evt ON evt.oid=dep.objid
|
||||
LEFT JOIN pg_collation col ON col.oid=dep.objid
|
||||
LEFT JOIN pg_namespace colns ON col.collnamespace=colns.oid
|
||||
LEFT JOIN pg_ts_config ftsc ON ftsc.oid=dep.objid
|
||||
LEFT JOIN pg_namespace ftscns ON ftsc.cfgnamespace=ftscns.oid
|
||||
LEFT JOIN pg_ts_dict ftsd ON ftsd.oid=dep.objid
|
||||
LEFT JOIN pg_namespace ftsdns ON ftsd.dictnamespace=ftsdns.oid
|
||||
LEFT JOIN pg_ts_parser ftsp ON ftsp.oid=dep.objid
|
||||
LEFT JOIN pg_namespace ftspns ON ftsp.prsnamespace=ftspns.oid
|
||||
LEFT JOIN pg_ts_template ftst ON ftst.oid=dep.objid
|
||||
LEFT JOIN pg_namespace ftstns ON ftst.tmplnamespace=ftstns.oid
|
||||
LEFT JOIN pg_extension ext ON ext.oid=dep.objid
|
||||
LEFT JOIN pg_synonym syn ON syn.oid=dep.objid
|
||||
LEFT JOIN pg_namespace synns ON syn.synnamespace=synns.oid
|
||||
LEFT JOIN pg_policy pl ON pl.oid=dep.objid
|
||||
LEFT JOIN pg_publication_rel pub_rel ON pub_rel.oid = dep.objid
|
||||
LEFT JOIN pg_class pubcl ON pubcl.oid = pub_rel.prrelid
|
||||
LEFT JOIN pg_namespace pubns ON pubns.oid=pubcl.relnamespace
|
||||
{{where_clause}} AND
|
||||
classid IN ( SELECT oid FROM pg_class WHERE relname IN
|
||||
('pg_class', 'pg_constraint', 'pg_conversion', 'pg_language', 'pg_proc', 'pg_rewrite', 'pg_namespace',
|
||||
'pg_trigger', 'pg_type', 'pg_attrdef', 'pg_event_trigger', 'pg_foreign_server', 'pg_foreign_data_wrapper',
|
||||
'pg_collation', 'pg_ts_config', 'pg_ts_dict', 'pg_ts_parser', 'pg_ts_template', 'pg_extension', 'pg_policy', 'pg_subscription', 'pg_publication_rel'))
|
||||
ORDER BY classid, cl.relkind
|
@ -20,13 +20,14 @@ SELECT DISTINCT dep.deptype, dep.classid, cl.relkind, ad.adbin, ad.adsrc,
|
||||
WHEN ext.oid IS NOT NULL THEN 'Ex'::text
|
||||
WHEN syn.oid IS NOT NULL THEN 'Sy'::text
|
||||
WHEN pl.oid IS NOT NULL THEN 'Rs'::text
|
||||
WHEN pub_rel.oid IS NOT NULL THEN 'r'::text
|
||||
ELSE ''
|
||||
END AS type,
|
||||
COALESCE(coc.relname, clrw.relname) AS ownertable,
|
||||
CASE WHEN cl.relname IS NOT NULL AND att.attname IS NOT NULL THEN cl.relname || COALESCE('.' || att.attname, '')
|
||||
ELSE COALESCE(cl.relname, co.conname, pr.proname, tg.tgname, ty.typname, la.lanname, rw.rulename, ns.nspname,
|
||||
fs.srvname, fdw.fdwname, evt.evtname, col.collname, ftsc.cfgname, ftsd.dictname, ftsp.prsname,
|
||||
ftst.tmplname, ext.extname, syn.synname, pl.polname)
|
||||
ftst.tmplname, ext.extname, syn.synname, pl.polname, quote_ident(pubns.nspname)||'.'||quote_ident(pubcl.relname))
|
||||
END AS refname,
|
||||
COALESCE(nsc.nspname, nso.nspname, nsp.nspname, nst.nspname, nsrw.nspname, colns.nspname, ftscns.nspname,
|
||||
ftsdns.nspname, ftspns.nspname, ftstns.nspname, synns.nspname) AS nspname,
|
||||
@ -70,10 +71,12 @@ LEFT JOIN pg_extension ext ON ext.oid=dep.objid
|
||||
LEFT JOIN pg_synonym syn ON syn.oid=dep.objid
|
||||
LEFT JOIN pg_namespace synns ON syn.synnamespace=synns.oid
|
||||
LEFT JOIN pg_policy pl ON pl.oid=dep.objid
|
||||
LEFT JOIN pg_publication_rel pub_rel ON pub_rel.oid = dep.objid
|
||||
LEFT JOIN pg_class pubcl ON pubcl.oid = pub_rel.prrelid
|
||||
LEFT JOIN pg_namespace pubns ON pubns.oid=pubcl.relnamespace
|
||||
{{where_clause}} AND
|
||||
classid IN ( SELECT oid FROM pg_class WHERE relname IN
|
||||
('pg_class', 'pg_constraint', 'pg_conversion', 'pg_language', 'pg_proc', 'pg_rewrite', 'pg_namespace',
|
||||
'pg_trigger', 'pg_type', 'pg_attrdef', 'pg_event_trigger', 'pg_foreign_server', 'pg_foreign_data_wrapper',
|
||||
'pg_collation', 'pg_ts_config', 'pg_ts_dict', 'pg_ts_parser', 'pg_ts_template', 'pg_extension',
|
||||
'pg_synonym', 'pg_policy'))
|
||||
'pg_collation', 'pg_ts_config', 'pg_ts_dict', 'pg_ts_parser', 'pg_ts_template', 'pg_extension', 'pg_policy', 'pg_subscription', 'pg_publication_rel'))
|
||||
ORDER BY classid, cl.relkind
|
||||
|
@ -20,13 +20,14 @@ SELECT DISTINCT dep.deptype, dep.classid, cl.relkind, ad.adbin, pg_get_expr(ad.a
|
||||
WHEN ext.oid IS NOT NULL THEN 'Ex'::text
|
||||
WHEN syn.oid IS NOT NULL THEN 'Sy'::text
|
||||
WHEN pl.oid IS NOT NULL THEN 'Rs'::text
|
||||
WHEN pub_rel.oid IS NOT NULL THEN 'r'::text
|
||||
ELSE ''
|
||||
END AS type,
|
||||
COALESCE(coc.relname, clrw.relname) AS ownertable,
|
||||
CASE WHEN cl.relname IS NOT NULL AND att.attname IS NOT NULL THEN cl.relname || COALESCE('.' || att.attname, '')
|
||||
ELSE COALESCE(cl.relname, co.conname, pr.proname, tg.tgname, ty.typname, la.lanname, rw.rulename, ns.nspname,
|
||||
fs.srvname, fdw.fdwname, evt.evtname, col.collname, ftsc.cfgname, ftsd.dictname, ftsp.prsname,
|
||||
ftst.tmplname, ext.extname, syn.synname, pl.polname)
|
||||
ftst.tmplname, ext.extname, syn.synname, pl.polname, quote_ident(pubns.nspname)||'.'||quote_ident(pubcl.relname))
|
||||
END AS refname,
|
||||
COALESCE(nsc.nspname, nso.nspname, nsp.nspname, nst.nspname, nsrw.nspname, colns.nspname, ftscns.nspname,
|
||||
ftsdns.nspname, ftspns.nspname, ftstns.nspname, synns.nspname) AS nspname,
|
||||
@ -70,10 +71,13 @@ LEFT JOIN pg_extension ext ON ext.oid=dep.objid
|
||||
LEFT JOIN pg_synonym syn ON syn.oid=dep.objid
|
||||
LEFT JOIN pg_namespace synns ON syn.synnamespace=synns.oid
|
||||
LEFT JOIN pg_policy pl ON pl.oid=dep.objid
|
||||
LEFT JOIN pg_publication_rel pub_rel ON pub_rel.oid = dep.objid
|
||||
LEFT JOIN pg_class pubcl ON pubcl.oid = pub_rel.prrelid
|
||||
LEFT JOIN pg_namespace pubns ON pubns.oid=pubcl.relnamespace
|
||||
{{where_clause}} AND
|
||||
classid IN ( SELECT oid FROM pg_class WHERE relname IN
|
||||
('pg_class', 'pg_constraint', 'pg_conversion', 'pg_language', 'pg_proc', 'pg_rewrite', 'pg_namespace',
|
||||
'pg_trigger', 'pg_type', 'pg_attrdef', 'pg_event_trigger', 'pg_foreign_server', 'pg_foreign_data_wrapper',
|
||||
'pg_collation', 'pg_ts_config', 'pg_ts_dict', 'pg_ts_parser', 'pg_ts_template', 'pg_extension',
|
||||
'pg_synonym', 'pg_policy'))
|
||||
'pg_synonym', 'pg_policy', 'pg_subscription', 'pg_publication_rel'))
|
||||
ORDER BY classid, cl.relkind
|
||||
|
@ -2,7 +2,7 @@
|
||||
//
|
||||
// pgAdmin 4 - PostgreSQL Tools
|
||||
//
|
||||
// Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
// Copyright (C) 2013 - 2021, The pgAdmin Development Team
|
||||
// This software is released under the PostgreSQL Licence
|
||||
//
|
||||
//////////////////////////////////////////////////////////////
|
||||
|
@ -638,7 +638,9 @@ class PGChildNodeView(NodeView):
|
||||
# if type is present in the types dictionary, but it's
|
||||
# value is None then it requires special handling.
|
||||
if type_str[0] == 'r':
|
||||
if (type_str[1].isdigit() and int(type_str[1]) > 0) or \
|
||||
if len(type_str) == 1:
|
||||
type_name = 'table'
|
||||
elif (type_str[1].isdigit() and int(type_str[1]) > 0) or \
|
||||
(len(type_str) > 2 and type_str[2].isdigit() and
|
||||
int(type_str[2]) > 0):
|
||||
type_name = 'column'
|
||||
|
@ -19,7 +19,8 @@ export class ModelValidation {
|
||||
}
|
||||
|
||||
validate() {
|
||||
const serviceId = this.model.get('service');
|
||||
const serviceId = this.model.get('service'),
|
||||
pub = this.model.get('pub');
|
||||
|
||||
if (!this.model.isNew() && 'id' in this.model.sessAttrs) {
|
||||
this.err['id'] = gettext('The ID cannot be changed.');
|
||||
@ -38,8 +39,14 @@ export class ModelValidation {
|
||||
this.checkForEmpty('db', gettext('Maintenance database must be specified.'));
|
||||
this.checkForEmpty('username', gettext('Username must be specified.'));
|
||||
this.checkForEmpty('port', gettext('Port must be specified.'));
|
||||
if(!_.isUndefined(pub) && pub.length == 0){
|
||||
this.checkForEmpty('pub', gettext('Publication must be specified.'));
|
||||
}
|
||||
} else {
|
||||
this.checkForEmpty('db', gettext('Maintenance database must be specified.'));
|
||||
if(!_.isUndefined(pub) && pub.length == 0){
|
||||
this.checkForEmpty('pub', gettext('Publication must be specified.'));
|
||||
}
|
||||
this.clearHostAddressAndDbErrors();
|
||||
}
|
||||
|
||||
@ -80,8 +87,17 @@ export class ModelValidation {
|
||||
}
|
||||
|
||||
checkHostAndHostAddress() {
|
||||
const translatedStr = gettext('Either Host name, Address or Service must ' +
|
||||
|
||||
let pub = this.model.get('pub'),
|
||||
errmsg;
|
||||
if(!_.isUndefined(pub) && pub.length == 0){
|
||||
errmsg = gettext('Host name, Address must ' +
|
||||
'be specified.');
|
||||
}else{
|
||||
errmsg = gettext('Either Host name, Address or Service must ' +
|
||||
'be specified.');
|
||||
}
|
||||
const translatedStr = errmsg;
|
||||
if (this.checkForEmpty('host', translatedStr) &&
|
||||
this.checkForEmpty('hostaddr', translatedStr)) {
|
||||
this.errmsg = this.errmsg || translatedStr;
|
||||
|
@ -320,6 +320,24 @@ FROM (
|
||||
{% if all_obj %}
|
||||
UNION
|
||||
{% endif %}
|
||||
|
||||
{% if all_obj or obj_type in ['publication'] %}
|
||||
SELECT 'publication'::text AS obj_type, pubname AS obj_name, ':publication.'||pub.oid||':/' || pubname AS obj_path, ''::text AS schema_name,
|
||||
{{ show_node_prefs['publication'] }} AS show_node, NULL AS other_info
|
||||
FROM pg_publication pub
|
||||
{% endif %}
|
||||
{% if all_obj %}
|
||||
UNION
|
||||
{% endif %}
|
||||
|
||||
{% if all_obj or obj_type in ['subscription'] %}
|
||||
SELECT 'subscription'::text AS obj_type, subname AS obj_name, ':subscription.'||pub.oid||':/' || subname AS obj_path, ''::text AS schema_name,
|
||||
{{ show_node_prefs['subscription'] }} AS show_node, NULL AS other_info
|
||||
FROM pg_subscription pub
|
||||
{% endif %}
|
||||
{% if all_obj %}
|
||||
UNION
|
||||
{% endif %}
|
||||
{% if all_obj or obj_type in ['language'] %}
|
||||
SELECT 'language'::text AS obj_type, lanname AS obj_name, ':language.'||lan.oid||':/' || lanname AS obj_path, ''::text AS schema_name,
|
||||
{{ show_node_prefs['language'] }} AS show_node, NULL AS other_info
|
||||
|
@ -337,6 +337,24 @@ FROM (
|
||||
{% if all_obj %}
|
||||
UNION
|
||||
{% endif %}
|
||||
|
||||
{% if all_obj or obj_type in ['publication'] %}
|
||||
SELECT 'publication'::text AS obj_type, pubname AS obj_name, ':publication.'||pub.oid||':/' || pubname AS obj_path, ''::text AS schema_name,
|
||||
{{ show_node_prefs['publication'] }} AS show_node, NULL AS other_info
|
||||
FROM pg_publication pub
|
||||
{% endif %}
|
||||
{% if all_obj %}
|
||||
UNION
|
||||
{% endif %}
|
||||
|
||||
{% if all_obj or obj_type in ['subscription'] %}
|
||||
SELECT 'subscription'::text AS obj_type, subname AS obj_name, ':subscription.'||pub.oid||':/' || subname AS obj_path, ''::text AS schema_name,
|
||||
{{ show_node_prefs['subscription'] }} AS show_node, NULL AS other_info
|
||||
FROM pg_subscription pub
|
||||
{% endif %}
|
||||
{% if all_obj %}
|
||||
UNION
|
||||
{% endif %}
|
||||
{% if all_obj or obj_type in ['language'] %}
|
||||
SELECT 'language'::text AS obj_type, lanname AS obj_name, ':language.'||lan.oid||':/' || lanname AS obj_path, ''::text AS schema_name,
|
||||
{{ show_node_prefs['language'] }} AS show_node, NULL AS other_info
|
||||
|
@ -361,6 +361,25 @@ FROM (
|
||||
{% if all_obj %}
|
||||
UNION
|
||||
{% endif %}
|
||||
|
||||
{% if all_obj or obj_type in ['publication'] %}
|
||||
SELECT 'publication'::text AS obj_type, pubname AS obj_name, ':publication.'||pub.oid||':/' || pubname AS obj_path, ''::text AS schema_name,
|
||||
{{ show_node_prefs['publication'] }} AS show_node, NULL AS other_info
|
||||
FROM pg_publication pub
|
||||
{% endif %}
|
||||
{% if all_obj %}
|
||||
UNION
|
||||
{% endif %}
|
||||
|
||||
{% if all_obj or obj_type in ['subscription'] %}
|
||||
SELECT 'subscription'::text AS obj_type, subname AS obj_name, ':subscription.'||pub.oid||':/' || subname AS obj_path, ''::text AS schema_name,
|
||||
{{ show_node_prefs['subscription'] }} AS show_node, NULL AS other_info
|
||||
FROM pg_subscription pub
|
||||
{% endif %}
|
||||
{% if all_obj %}
|
||||
UNION
|
||||
{% endif %}
|
||||
|
||||
{% if all_obj or obj_type in ['language'] %}
|
||||
SELECT 'language'::text AS obj_type, lanname AS obj_name, ':language.'||lan.oid||':/' || lanname AS obj_path, ''::text AS schema_name,
|
||||
{{ show_node_prefs['language'] }} AS show_node, NULL AS other_info
|
||||
|
@ -368,6 +368,25 @@ FROM (
|
||||
{% if all_obj %}
|
||||
UNION
|
||||
{% endif %}
|
||||
|
||||
{% if all_obj or obj_type in ['publication'] %}
|
||||
SELECT 'publication'::text AS obj_type, pubname AS obj_name, ':publication.'||pub.oid||':/' || pubname AS obj_path, ''::text AS schema_name,
|
||||
{{ show_node_prefs['publication'] }} AS show_node, NULL AS other_info
|
||||
FROM pg_publication pub
|
||||
{% endif %}
|
||||
{% if all_obj %}
|
||||
UNION
|
||||
{% endif %}
|
||||
|
||||
{% if all_obj or obj_type in ['subscription'] %}
|
||||
SELECT 'subscription'::text AS obj_type, subname AS obj_name, ':subscription.'||pub.oid||':/' || subname AS obj_path, ''::text AS schema_name,
|
||||
{{ show_node_prefs['subscription'] }} AS show_node, NULL AS other_info
|
||||
FROM pg_subscription pub
|
||||
{% endif %}
|
||||
{% if all_obj %}
|
||||
UNION
|
||||
{% endif %}
|
||||
|
||||
{% if all_obj or obj_type in ['language'] %}
|
||||
SELECT 'language'::text AS obj_type, lanname AS obj_name, ':language.'||lan.oid||':/' || lanname AS obj_path, ''::text AS schema_name,
|
||||
{{ show_node_prefs['language'] }} AS show_node, NULL AS other_info
|
||||
|
@ -43,7 +43,8 @@ class SearchObjectsHelper:
|
||||
'trigger_function', 'fts_template', 'collation', 'view', 'mview',
|
||||
'fts_configuration', 'extension', 'language',
|
||||
'event_trigger', 'foreign_server', 'user_mapping',
|
||||
'foreign_data_wrapper', 'row_security_policy'
|
||||
'foreign_data_wrapper', 'row_security_policy',
|
||||
'publication', 'subscription'
|
||||
] if node_types is None else node_types
|
||||
|
||||
@property
|
||||
|
@ -433,6 +433,8 @@ module.exports = [{
|
||||
',pgadmin.node.database' +
|
||||
',pgadmin.node.role' +
|
||||
',pgadmin.node.cast' +
|
||||
',pgadmin.node.publication' +
|
||||
',pgadmin.node.subscription' +
|
||||
',pgadmin.node.tablespace' +
|
||||
',pgadmin.node.resource_group' +
|
||||
',pgadmin.node.event_trigger' +
|
||||
|
@ -207,6 +207,8 @@ var webpackShimConfig = {
|
||||
'pgadmin.help': path.join(__dirname, './pgadmin/help/static/js/help'),
|
||||
'pgadmin.misc.explain': path.join(__dirname, './pgadmin/misc/static/explain/js/explain'),
|
||||
'pgadmin.node.cast': path.join(__dirname, './pgadmin/browser/server_groups/servers/databases/casts/static/js/cast'),
|
||||
'pgadmin.node.publication': path.join(__dirname, './pgadmin/browser/server_groups/servers/databases/publications/static/js/publication'),
|
||||
'pgadmin.node.subscription': path.join(__dirname, './pgadmin/browser/server_groups/servers/databases/subscriptions/static/js/subscription'),
|
||||
'pgadmin.node.catalog': path.join(__dirname, './pgadmin/browser/server_groups/servers/databases/schemas/static/js/catalog'),
|
||||
'pgadmin.node.catalog_object': path.join(__dirname, './pgadmin/browser/server_groups/servers/databases/schemas/catalog_objects/static/js/catalog_object'),
|
||||
'pgadmin.node.catalog_object_column': path.join(__dirname, './pgadmin/browser/server_groups/servers/databases/schemas/catalog_objects/columns/static/js/catalog_object_column'),
|
||||
|