Implement new PostgreSQL 15 features in publication dialog and SQL. #5868

This commit is contained in:
Anil Sahoo 2023-05-25 16:25:12 +05:30 committed by GitHub
parent 702bc8c8ce
commit 8b7eeca545
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
91 changed files with 3105 additions and 202 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

View File

@ -6,9 +6,9 @@
`Logical replication <https://www.postgresql.org/docs/13/logical-replication.html>`_ uses a *publish* and *subscribe* model with one or more *subscribers* subscribing to one or more *publications* on a publisher node.
Use the *publication* dialog to create a publication. A publication is a set of changes generated from a table or a group of tables, and might also be described as a change set or replication set.
Use the *publication* dialog to create a publication. A publication is a set of changes generated from a table or a group of tables or a schema or a group of schemas, and might also be described as a change set or replication set.
The *publication* dialog organizes the development of a publication through the following dialog tabs: *General* and *Definition*. The *SQL* tab displays the SQL code generated by dialog selections.
The *publication* dialog organizes the development of a publication through the following dialog tabs: *General* and *Definition*. The *SQL* tab displays the SQL code generated by dialog selections. In place of *Definition* tab *Tables* and *Options* tab will come for PostgreSQL version >= 15.
.. image:: images/publication_general.png
:alt: Publication dialog general tab
@ -32,6 +32,32 @@ Use the *Definition* tab to set properties for the publication:
* Specify a table or list of tables separated by a comma in *Tables* field to replicate all the listed table.
* Use the *With* section to determine which DML operations will be published by the new publication to the subscribers. Move the switch next to *INSERT*, *UPDATE*, *DELETE*, or *TRUNCATE* to *No* if you do not want to replicate any of these DML operations from Publication to Subscription. By default, all the switches are set to *Yes* allowing all the DML operations.
With PostgreSQL 15 forward, the *Tables* and *Options* tab will be visible.
Click the *Tables* tab to continue.
.. image:: images/publication_tables.png
:alt: Publication dialog tables tab
:align: center
Use the *Tables* tab to set table properties for the publication:
* Move the switch next to *All tables?* to *Yes* to replicate all the tables of the database, including tables created in the future.
* Move the switch next to *Only table?* to *Yes* to replicate only the listed tables excluding all its descendant tables.
* Specify a schema or list of schemas separated by a comma in *Tables In Schema* field to replicate all the listed schemas. This field will be disabled if any columns are selected for tables in subsequent field.
* Specify a table or list of tables to replicate all the listed table along with specific columns and/or WHERE clause to filter rows.
Click the *Options* tab to continue.
.. image:: images/publication_options.png
:alt: Publication dialog options tab
:align: center
Use the *Options* tab to set option properties for the publication:
* Use the *With* section to determine which DML operations will be published by the new publication to the subscribers. Move the switch next to *INSERT*, *UPDATE*, *DELETE*, or *TRUNCATE* to *No* if you do not want to replicate any of these DML operations from Publication to Subscription. By default, all the switches are set to *Yes* allowing all the DML operations.
.. note:: A published table must have a “replica identity” configured in order to be able to replicate UPDATE and DELETE operations. You can change with ALTER TABLE statement. For more information on replica identity see `Logical Replication Publication <https://www.postgresql.org/docs/13/logical-replication-publication.html>`_.
Click the *SQL* tab to continue.
@ -44,7 +70,7 @@ The following is an example of the sql command generated by user selections in
the *Publication* dialog:
.. image:: images/publication_sql.png
:alt: Publication dialog sql tab
:alt: Publication dialog sql tab for PostgreSQL version <= 14
:align: center
The example creates a publication named *pub1* that is owned by *postgres*. It

View File

@ -12,7 +12,7 @@ import json
from functools import wraps
from pgadmin.browser.server_groups.servers import databases
from flask import render_template, request, jsonify
from flask import render_template, request, jsonify, current_app as app
from flask_babel import gettext
from pgadmin.browser.collection import CollectionNodeModule
from pgadmin.browser.utils import PGChildNodeView
@ -21,8 +21,6 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.tools.schema_diff.compare import SchemaDiffObjectCompare
from pgadmin.tools.schema_diff.node_registry import SchemaDiffRegistry
from urllib.parse import unquote
class PublicationModule(CollectionNodeModule):
@ -192,7 +190,9 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
'stats': [{'get': 'statistics'}],
'dependency': [{'get': 'dependencies'}],
'dependent': [{'get': 'dependents'}],
'get_schemas': [{}, {'get': 'get_schemas'}],
'get_tables': [{}, {'get': 'get_tables'}],
'get_all_columns': [{}, {'get': 'get_all_columns'}],
'delete': [{'delete': 'delete'}, {'delete': 'delete'}]
})
@ -253,22 +253,16 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
return internal_server_error(errormsg=res)
for rows in res['rows']:
if not rows['all_table']:
get_name_sql = render_template(
"/".join([self.template_path, self._DELETE_SQL]),
pbid=rows['oid'], conn=self.conn
)
status, pname = self.conn.execute_scalar(get_name_sql)
table_sql = render_template(
"/".join([self.template_path,
self._GET_TABLE_FOR_PUBLICATION]),
pname=pname
pbid=rows['oid']
)
pub_table = []
status, table_res = self.conn.execute_dict(table_sql)
for table in table_res['rows']:
pub_table.append(table['pubtable'])
pub_table = \
[table['table_name'] for table in table_res['rows']]
pub_table = ", ".join(str(elem) for elem in pub_table)
@ -385,24 +379,42 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
return False, gone(self._NOT_FOUND_PUB_INFORMATION)
if not res['rows'][0]['all_table']:
get_name_sql = render_template(
"/".join([self.template_path, self._DELETE_SQL]),
pbid=pbid, conn=self.conn
)
status, pname = self.conn.execute_scalar(get_name_sql)
if self.manager.version >= 150000:
schema_name_sql = render_template(
"/".join([self.template_path, 'get_pub_schemas.sql']),
pbid=pbid
)
status, snames_list_res = self.conn.execute_dict(
schema_name_sql)
if len(snames_list_res['rows']) != 0:
res['rows'][0]['pubschema'] = \
[sname_dict['sname'] for sname_dict
in snames_list_res['rows']]
table_sql = render_template(
"/".join([self.template_path,
self._GET_TABLE_FOR_PUBLICATION]),
pname=pname
pbid=pbid
)
pub_table = []
pub_table_names_list = []
status, table_res = self.conn.execute_dict(table_sql)
for table in table_res['rows']:
pub_table.append(table['pubtable'])
pub_table_names_list.append(table['table_name'])
if 'columns' in table and 'where' in table:
pub_table.append({
'table_name': table['table_name'],
'columns': table['columns'],
'where': table['where'],
})
else:
pub_table.append(table['table_name'])
res['rows'][0]['pubtable'] = pub_table
res['rows'][0]['pubtable_names'] = pub_table_names_list
return True, res['rows'][0]
@ -630,21 +642,74 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
"""
drop_table_data = []
add_table_data = []
update_table_data = []
drop_table = False
add_table = False
update_table = False
for table in old_data['pubtable']:
if 'pubtable' in data and table not in data['pubtable']:
drop_table_data.append(table)
drop_table = True
if self.manager.version < 150000:
for table in old_data['pubtable']:
if 'pubtable' in data and table not in data['pubtable']:
drop_table_data.append(table)
drop_table = True
if 'pubtable' in data:
for table in data['pubtable']:
if table not in old_data['pubtable']:
if 'pubtable' in data:
for table in data['pubtable']:
if table not in old_data['pubtable']:
add_table_data.append(table)
add_table = True
elif self.manager.version >= 150000:
if 'pubtable' in data and 'deleted' in data['pubtable']:
for table in data['pubtable']['deleted']:
drop_table_data.append(table)
drop_table = True
if 'pubtable' in data and 'changed' in data['pubtable']:
update_table_data = [*old_data['pubtable']]
for index, table1 in enumerate(old_data['pubtable']):
for table2 in data['pubtable']['changed']:
if table1['table_name'] == table2['table_name']:
update_table_data[index] = table2
update_table = True
break
if 'pubtable' in data and 'added' in data['pubtable']:
for table in data['pubtable']['added']:
add_table_data.append(table)
add_table = True
return drop_table, add_table, drop_table_data, add_table_data
return drop_table, add_table, update_table, drop_table_data, \
add_table_data, update_table_data
def _get_schema_details_to_add_and_delete(self, old_data, data):
"""
This function returns the schemas which need to add and delete
:param old_data:
:param data:
:return:
"""
drop_schema_data = []
add_schema_data = []
drop_schema = False
add_schema = False
if 'pubschema' in old_data:
for schema in old_data['pubschema']:
if 'pubschema' in data and schema not in data['pubschema']:
drop_schema_data.append(schema)
drop_schema = True
if 'pubschema' in data:
for schema in data['pubschema']:
if 'pubschema' in old_data and \
schema not in old_data['pubschema']:
add_schema_data.append(schema)
add_schema = True
elif 'pubschema' not in old_data:
add_schema_data.append(schema)
add_schema = True
return drop_schema, add_schema, drop_schema_data, add_schema_data
def get_sql(self, data, pbid=None):
"""
@ -668,11 +733,33 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
if len(res['rows']) == 0:
return gone(self._NOT_FOUND_PUB_INFORMATION)
old_data = self._get_old_table_data(res['rows'][0]['name'], res)
snames_list = []
drop_table, add_table, drop_table_data, add_table_data = \
if self.manager.version >= 150000:
schema_name_sql = render_template(
"/".join([self.template_path, 'get_pub_schemas.sql']),
pbid=pbid
)
status, snames_list_res = self.conn.execute_dict(
schema_name_sql)
if len(snames_list_res['rows']) != 0:
snames_list = [sname_dict['sname'] for sname_dict
in snames_list_res['rows']]
old_data = self._get_old_table_data(res['rows'][0]['name'], res,
snames_list)
if len(snames_list) != 0:
old_data['pubschema'] = snames_list
drop_table, add_table, update_table, drop_table_data, \
add_table_data, update_table_data = \
self._get_table_details_to_add_and_delete(old_data, data)
drop_schema, add_schema, drop_schema_data, add_schema_data = \
self._get_schema_details_to_add_and_delete(old_data, data)
for arg in required_args:
if arg not in data:
data[arg] = old_data[arg]
@ -684,7 +771,10 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
"/".join([self.template_path, self._UPDATE_SQL]),
data=data, o_data=old_data, conn=self.conn,
drop_table=drop_table, drop_table_data=drop_table_data,
add_table=add_table, add_table_data=add_table_data
add_table=add_table, add_table_data=add_table_data,
update_table=update_table, update_table_data=update_table_data,
drop_schema=drop_schema, drop_schema_data=drop_schema_data,
add_schema=add_schema, add_schema_data=add_schema_data,
)
return sql.strip('\n'), data['name'] if 'name' in data \
else old_data['name']
@ -695,6 +785,41 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
data=data, conn=self.conn)
return sql.strip('\n'), data['name']
@check_precondition
def get_schemas(self, gid, sid, did):
"""
This function will return the list of schemas for the specified
server group id, server id and database id.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
"""
res = []
sql = render_template("/".join([self.template_path,
'get_all_schemas.sql']),
show_sys_objects=self.blueprint.
show_system_objects,
server_type=self.manager.server_type
)
status, rset = self.conn.execute_2darray(sql)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
res.append(
{
'label': row['nspname'],
'value': row['nspname'],
}
)
return make_json_response(
data=res,
status=200
)
@check_precondition
def get_tables(self, gid, sid, did):
"""
@ -715,12 +840,14 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
)
status, rset = self.conn.execute_2darray(sql)
if not status:
return internal_server_error(errormsg=res)
return internal_server_error(errormsg=rset)
for row in rset['rows']:
res.append(
{
'label': row['table'],
'value': row['table']
'value': row['table'],
'tid': row['tid']
}
)
return make_json_response(
@ -728,24 +855,64 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
status=200
)
def _get_old_table_data(self, pname, res):
@check_precondition
def get_all_columns(self, gid, sid, did):
"""
This function returns the columns list.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
tid: Table ID
"""
data = request.args
res = []
sql = render_template("/".join([self.template_path,
'get_all_columns.sql']),
tid=data['tid'], conn=self.conn)
status, rset = self.conn.execute_2darray(sql)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
res.append(
{
'label': row['column'],
'value': row['column'],
}
)
return make_json_response(
data=res,
status=200
)
def _get_old_table_data(self, pname, res, exempt_schema_list=[]):
"""
This function return table details before update
:param pname:
:param res:
:return:old_data
"""
table_sql = render_template(
"/".join([self.template_path, self._GET_TABLE_FOR_PUBLICATION]),
pname=pname
"/".join([self.template_path,
self._GET_TABLE_FOR_PUBLICATION]),
pbid=res['rows'][0]['oid']
)
pub_table = []
status, table_res = self.conn.execute_dict(table_sql)
for table in table_res['rows']:
pub_table.append(table['pubtable'])
if 'columns' in table and 'where' in table:
pub_table.append({
'table_name': table['table_name'],
'columns': table['columns'],
'where': table['where'],
})
else:
pub_table.append(table['table_name'])
res['rows'][0]['pubtable'] = pub_table
@ -781,6 +948,20 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
if len(res['rows']) == 0:
return gone(self._NOT_FOUND_PUB_INFORMATION)
snames_list = []
if self.manager.version >= 150000:
schema_name_sql = render_template(
"/".join([self.template_path, 'get_pub_schemas.sql']),
pbid=pbid
)
status, snames_list_res = self.conn.execute_dict(
schema_name_sql)
if len(snames_list_res['rows']) != 0:
snames_list = [sname_dict['sname'] for sname_dict
in snames_list_res['rows']]
get_name_sql = render_template(
"/".join([self.template_path, self._DELETE_SQL]),
pbid=pbid, conn=self.conn
@ -788,7 +969,10 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
status, pname = self.conn.execute_scalar(get_name_sql)
# Get old table details
old_data = self._get_old_table_data(pname, res)
old_data = self._get_old_table_data(pname, res, snames_list)
if len(snames_list) != 0:
old_data['pubschema'] = snames_list
sql = render_template("/".join([self.template_path,
self._CREATE_SQL]),
@ -940,5 +1124,5 @@ class PublicationView(PGChildNodeView, SchemaDiffObjectCompare):
return sql
SchemaDiffRegistry(blueprint.node_type, PublicationView, 'Database')
# SchemaDiffRegistry(blueprint.node_type, PublicationView, 'Database')
PublicationView.register_node_view(blueprint)

View File

@ -7,7 +7,7 @@
//
//////////////////////////////////////////////////////////////
import { getNodeAjaxOptions, getNodeListByName } from '../../../../../../static/js/node_ajax';
import { getNodeAjaxOptions,getNodeListByName } from '../../../../../../static/js/node_ajax';
import PublicationSchema from './publication.ui';
define('pgadmin.node.publication', [
@ -23,7 +23,6 @@ define('pgadmin.node.publication', [
label: gettext('Publications'),
type: 'coll-publication',
columns: ['name', 'pubowner', 'pubtable', 'all_table'],
});
}
@ -40,6 +39,7 @@ define('pgadmin.node.publication', [
canDrop: true,
canDropCascade: true,
hasDepends: true,
width: pgBrowser.stdW.md + 'px',
Init: function() {
@ -69,12 +69,13 @@ define('pgadmin.node.publication', [
data: {action: 'create'},
}]);
},
getSchema: function(treeNodeInfo, itemNodeData){
return new PublicationSchema(
{
publicationTable: ()=>getNodeAjaxOptions('get_tables', this, treeNodeInfo, itemNodeData),
allTables: ()=>getNodeAjaxOptions('get_tables', this, treeNodeInfo, itemNodeData),
allSchemas:()=>getNodeAjaxOptions('get_schemas', this, treeNodeInfo, itemNodeData),
getColumns: (params)=>getNodeAjaxOptions('get_all_columns', this, treeNodeInfo, itemNodeData,{urlParams: params, useCache:false}),
role:()=>getNodeListByName('role', treeNodeInfo, itemNodeData),
},{
node_info: treeNodeInfo.server,

View File

@ -10,7 +10,6 @@ import gettext from 'sources/gettext';
import BaseUISchema from 'sources/SchemaView/base_schema.ui';
import _ from 'lodash';
export class DefaultWithSchema extends BaseUISchema {
constructor(node_info) {
super();
@ -33,21 +32,126 @@ export class DefaultWithSchema extends BaseUISchema {
},{
id: 'evnt_truncate', label: gettext('TRUNCATE'),
type: 'switch', group: gettext('With'),
visible: function() {
return !_.isUndefined(this.node_info['node_info'])
&& !_.isUndefined(this.node_info['node_info'].version)
&& this.node_info['node_info'].version >= 110000;
},
min_version: 110000,
},{
id: 'publish_via_partition_root', label: gettext('Publish via root?'),
type: 'switch', group: gettext('With'),
visible: function() {
return !_.isUndefined(this.node_info['node_info'])
&& !_.isUndefined(this.node_info['node_info'].version)
&& this.node_info['node_info'].version >= 130000;
min_version: 130000,
},
];
}
}
export class PublicationTableSchema extends BaseUISchema {
constructor(allTables,getColumns) {
super({
table_name: undefined,
where: undefined,
columns:undefined,
});
this.allTables = allTables;
this.getColumns=getColumns;
this.allTablesOptions = [];
this.varTypes = {};
this.allReadOnly = false;
}
isConnected(state) {
return Boolean(state.connected);
}
getPlaceHolderMsg(variable) {
let msg = '';
if (variable?.min_server_version && variable?.max_server_version) {
msg = gettext('%s <= Supported version >= %s', variable?.max_server_version, variable?.min_server_version);
} else if (variable?.min_server_version) {
msg = gettext('Supported version >= %s', variable?.min_server_version);
} else if (variable?.max_server_version) {
msg = gettext('Supported version <= %s', variable?.max_server_version);
}
return msg;
}
getTableOid(tabName) {
// Here we will fetch the table oid from table name
// iterate over list to find table oid
for(const t of this.allTablesOptions) {
if(t.label === tabName) {
return t.tid;
}
}
}
isTableName(state){
return Boolean(state.table_name);
}
get baseFields() {
let obj = this;
return [
{
id: 'table_name',
label: gettext('Table Name'),
type: 'select',
noEmpty: true,
disabled:function (state) {
return !obj.isNew(state);
},
editable: function (state) {
return obj.isNew(state) || !obj.allReadOnly;
},
cell: () => ({
cell: 'select',
options: this.allTables,
optionsLoaded: (options) => {
obj.allTablesOptions=options;
},
controlProps: { allowClear: false },
}),
},
}];
{
id: 'columns',
label: gettext('Columns'),
type: 'select',
deps:['table_name'],
disabled: (state) => !obj.isTableName(state),
depChange: (state) => {
if(!state.table_name) {
return {
columns: null,
};
}
},
editable: function (state) {
return obj.isNew(state) || !obj.allReadOnly;
},
cell: (state) => {
let tid = obj.getTableOid(state.table_name);
return{
cell: 'select',
options: (state.table_name && tid) ? ()=>obj.getColumns({tid: tid}) : [],
optionsReloadBasis: tid,
controlProps: { allowClear: true, multiple: true},
};
},
},
{
id: 'where',
label: gettext('Where'),
type: 'sql',
deps: ['table_name'],
disabled: (state) => !obj.isTableName(state),
editable: function (state) {
return obj.isNew(state) || !obj.allReadOnly;
},
cell: () => ({
cell: 'sql',
controlProps: {
lineWrapping: true,
}
}),
},
];
}
}
@ -55,9 +159,11 @@ export default class PublicationSchema extends BaseUISchema {
constructor(fieldOptions={}, node_info={}, initValues={}) {
super({
name: undefined,
pubowner: (node_info) ? node_info['node_info'].user.name: undefined,
pubtable: undefined,
all_table: undefined,
pubowner: (node_info) ? node_info['node_info']?.user.name: undefined,
pubtable: [],
pubtable_names: [],
pubschema: undefined,
all_table: false,
evnt_insert:true,
evnt_delete:true,
evnt_update:true,
@ -69,11 +175,16 @@ export default class PublicationSchema extends BaseUISchema {
this.fieldOptions = {
role: [],
publicationTable: [],
allTables: [],
allSchemas:[],
...fieldOptions,
};
this.node_info = node_info;
this.paramSchema = new PublicationTableSchema(this.fieldOptions.allTables, this.fieldOptions.getColumns);
this.version=!_.isUndefined(this.node_info['node_info']) && !_.isUndefined(this.node_info['node_info'].version) && this.node_info['node_info'].version;
}
get idAttribute() {
return 'oid';
}
@ -81,7 +192,9 @@ export default class PublicationSchema extends BaseUISchema {
isAllTable(state) {
let allTable = state.all_table;
if(allTable){
state.pubtable = '';
state.pubtable = [];
state.pubtable_names = '';
state.pubschema = undefined;
return true;
}
return false;
@ -101,16 +214,40 @@ export default class PublicationSchema extends BaseUISchema {
return true;
}
isColumn(state){
let table=state.pubtable, columnsList=[];
if(!_.isUndefined(table) && table.length > 0){
table?.forEach(i=>{
if(i.columns!=undefined && i.columns.length!==0){
columnsList.push(i.columns);
}
});
if(columnsList?.length > 0){
state.pubschema=undefined;
return true;
}
return false;
}
}
isConnected(state) {
return Boolean(state.connected);
}
getVersion(){
return (
!_.isUndefined(this.node_info['node_info']) &&
!_.isUndefined(this.node_info['node_info'].version) &&
this.node_info['node_info'].version
);
}
get baseFields() {
let obj = this;
return [{
id: 'name', label: gettext('Name'), type: 'text',
mode: ['properties', 'create', 'edit'], noEmpty: true,
visible: function() {
return !_.isUndefined(this.node_info['node_info'])
&& !_.isUndefined(this.node_info['node_info'].version)
&& this.node_info['node_info'].version >= 100000;
},
min_version: 100000,
},{
id: 'oid', label: gettext('OID'), cell: 'string', mode: ['properties'],
type: 'text',
@ -123,26 +260,56 @@ export default class PublicationSchema extends BaseUISchema {
mode: ['edit', 'properties', 'create'], controlProps: { allowClear: false},
},{
id: 'all_table', label: gettext('All tables?'), type: 'switch',
group: gettext('Definition'), mode: ['edit', 'properties', 'create'], deps: ['name'],
group: this.version < 150000 ? gettext('Definition') : gettext('Tables'), mode: ['edit', 'properties', 'create'], deps: ['name'],
readonly: (state) => {return !obj.isNew(state);},
},{
id: 'only_table', label: gettext('Only table?'), type: 'switch',
group: gettext('Definition'), mode: ['edit', 'create'],
deps: ['name', 'pubtable', 'all_table'], readonly: obj.isTable,
group: this.version < 150000 ? gettext('Definition') : gettext('Tables'), mode: ['edit', 'create'],
deps: ['name', 'pubtable', 'all_table'], readonly: (state) => {
if(obj.isNew(state))
return obj.isTable(state);
else
return true;
},
helpMessageMode: ['edit', 'create'],
helpMessage: gettext('If ONLY is specified before the table name, only that table is added to the publication. If ONLY is not specified, the table and all its descendant tables (if any) are added.'),
},{
id: 'pubtable', label: gettext('Tables'), type: 'select',
id: 'pubschema', label: gettext('Tables in Schema'), type: 'select',
controlProps: { allowClear: true, multiple: true, creatable: true },
options: this.fieldOptions.publicationTable,
group: gettext('Definition'), mode: ['edit', 'create', 'properties'],
options: this.fieldOptions.allSchemas, deps: ['all_table','pubtable'],
disabled: (state)=>{return obj.isColumn(state) || obj.isAllTable(state);},
group: this.version < 150000 ? null : gettext('Tables'), mode: ['edit', 'create', 'properties'],
min_version: 150000,
},
{
id: 'pubtable_names', label: gettext('Tables'), cell: 'string',
type: (state)=>{
let table= (!_.isUndefined(state?.pubtable_names) && state?.pubtable_names.length > 0) && state?.pubtable_names;
return {
type: 'select',
options: table,
controlProps: { allowClear: true, multiple: true, creatable: true },
};
},
group: this.version < 150000? gettext('Definition') : gettext('Tables'), mode: ['properties'],
deps: ['all_table'], disabled: obj.isAllTable,
},{
},
{
id: 'pubtable', label: this.version < 150000 ? gettext('Tables') : gettext(''),
type: this.version < 150000 ? 'select' : 'collection',
controlProps: this.version < 150000 ? { allowClear: true, multiple: true, creatable: true } : null,
options: this.version < 150000 ? this.fieldOptions.allTables : null,
group: this.version < 150000 ? gettext('Definition') : gettext('Tables'), mode: ['edit', 'create'],
deps: ['all_table'], disabled: obj.isAllTable, schema: this.version < 150000 ? null : this.paramSchema,
uniqueCol: this.version < 150000 ? null : ['table_name'],
canAdd: this.version < 150000 ? null : (state)=> !obj.isConnected(state),
canDelete: this.version<150000?null : (state)=> !obj.isConnected(state),
},
{
type: 'nested-fieldset', mode: ['create','edit', 'properties'],
label: gettext('With'), group: gettext('Definition'),
label: gettext('With'), group: this.version < 150000 ? gettext('Definition') : gettext('Options'),
schema : new DefaultWithSchema(this.node_info),
},
];
}
}
}

View File

@ -0,0 +1,26 @@
{% if data.evnt_delete or data.evnt_update or data.evnt_truncate %}
{% set add_comma_after_insert = 'insert' %}
{% endif %}
{% if data.evnt_truncate %}
{% set add_comma_after_delete = 'delete' %}
{% endif %}
{% if data.evnt_delete or data.evnt_truncate%}
{% set add_comma_after_update = 'update' %}
{% endif %}
{% if data.publish_via_partition_root%}
{% set add_comma_after_truncate = 'truncate' %}
{% endif %}
{### Create PUBLICATION ###}
CREATE PUBLICATION {{ conn|qtIdent(data.name) }}
{% if data.all_table %}
FOR ALL TABLES
{% elif data.pubtable or data.pubschema %}
FOR {% if data.pubtable %}TABLE {% if data.only_table%}ONLY {% endif %}{% for pub_table in data.pubtable %}{% if loop.index != 1 %}, {% endif %}{{pub_table['table_name']}}{% if pub_table['columns'] %} ({% for column in pub_table['columns'] %}{% if loop.index != 1 %}, {% endif %}{{column}}{% endfor %}){% endif %}{% if pub_table['where'] %} WHERE ({{pub_table['where']}}){% endif %}{% endfor %}{% endif %}{% if data.pubtable and data.pubschema %},{% endif %}
{% if data.pubschema %}
TABLES IN SCHEMA {% for pub_schema in data.pubschema %}{% if loop.index != 1 %}, {% endif %}{{ pub_schema }}{% endfor %}
{% endif %}
{% endif %}
{% if data.evnt_insert or data.evnt_update or data.evnt_delete or data.evnt_truncate %}
WITH (publish = '{% if data.evnt_insert %}insert{% if add_comma_after_insert == 'insert' %}, {% endif %}{% endif %}{% if data.evnt_update %}update{% if add_comma_after_update == 'update' %}, {% endif %}{% endif %}{% if data.evnt_delete %}delete{% if add_comma_after_delete == 'delete' %}, {% endif %}{% endif %}{% if data.evnt_truncate %}truncate{% endif %}', publish_via_partition_root = {{ data.publish_via_partition_root|lower }});
{% endif %}

View File

@ -0,0 +1,5 @@
select nspname from pg_catalog.pg_namespace c WHERE
c.nspname NOT LIKE 'pg\_%'
AND c.nspname NOT IN ('information_schema')
ORDER BY
1;

View File

@ -0,0 +1,4 @@
SELECT n.nspname AS sname
FROM pg_catalog.pg_publication_namespace pubnsp
JOIN pg_catalog.pg_namespace n ON pubnsp.pnnspid = n.oid
WHERE pnpubid = {{pbid}} :: oid;

View File

@ -0,0 +1,6 @@
SELECT pg_catalog.quote_ident(n.nspname) || '.' || pg_catalog.quote_ident(cls.relname) AS table_name,
(SELECT array_agg(attname) FROM pg_attribute att WHERE attrelid = prel.prrelid AND attnum IN (SELECT unnest(prattrs) FROM pg_publication_rel WHERE oid = prel.oid ) ) AS columns,
pg_catalog.pg_get_expr(prel.prqual, prel.prrelid) AS where
FROM pg_publication_rel prel
JOIN pg_class cls ON cls.oid = prel.prrelid
JOIN pg_catalog.pg_namespace n ON cls.relnamespace = n.oid WHERE prel.prpubid = {{pbid}} :: oid;

View File

@ -0,0 +1,67 @@
{% if data.evnt_delete or data.evnt_update or data.evnt_truncate %}
{% set add_comma_after_insert = 'insert' %}
{% endif %}
{% if data.evnt_truncate %}
{% set add_comma_after_delete = 'delete' %}
{% endif %}
{% if data.evnt_delete or data.evnt_truncate%}
{% set add_comma_after_update = 'update' %}
{% endif %}
{### Alter publication owner ###}
{% if data.pubowner %}
ALTER PUBLICATION {{ conn|qtIdent(o_data.name) }}
OWNER TO {{ conn|qtIdent(data.pubowner) }};
{% endif %}
{### Alter publication event ###}
{% if (data.evnt_insert is defined and data.evnt_insert != o_data.evnt_insert) or (data.evnt_update is defined and data.evnt_update != o_data.evnt_update) or (data.evnt_delete is defined and data.evnt_delete != o_data.evnt_delete) or (data.evnt_truncate is defined and data.evnt_truncate != o_data.evnt_truncate) %}
ALTER PUBLICATION {{ conn|qtIdent(o_data.name) }} SET
(publish = '{% if data.evnt_insert %}insert{% if add_comma_after_insert == 'insert' %}, {% endif %}{% endif %}{% if data.evnt_update %}update{% if add_comma_after_update == 'update' %}, {% endif %}{% endif %}{% if data.evnt_delete %}delete{% if add_comma_after_delete == 'delete' %}, {% endif %}{% endif %}{% if data.evnt_truncate %}truncate{% endif %}');
{% endif %}
{### Alter publication partition root ###}
{% if data.publish_via_partition_root is defined and data.publish_via_partition_root != o_data.publish_via_partition_root%}
ALTER PUBLICATION {{ conn|qtIdent(o_data.name) }} SET
(publish_via_partition_root = {{ data.publish_via_partition_root|lower }});
{% endif %}
{### Alter drop publication table ###}
{% if drop_table %}
ALTER PUBLICATION {{ conn|qtIdent(o_data.name) }}
DROP TABLE {% if data.only_table%}ONLY {% endif %}{% for pub_table in drop_table_data %}{% if loop.index != 1 %}, {% endif %}{{ pub_table['table_name'] or pub_table }}{% endfor %};
{% endif %}
{### Alter drop publication schema ###}
{% if drop_schema %}
ALTER PUBLICATION {{ conn|qtIdent(o_data.name) }}
DROP TABLES IN SCHEMA {% for pub_schema in drop_schema_data %}{% if loop.index != 1 %}, {% endif %}{{ pub_schema }}{% endfor %};
{% endif %}
{### Alter update publication table ###}
{% if update_table %}
ALTER PUBLICATION {{ conn|qtIdent(o_data.name) }}
SET TABLE {% for pub_table in update_table_data %}{% if loop.index != 1 %}, TABLE {% endif %}{{ pub_table['table_name'] or pub_table }}{% if pub_table['columns'] %} ({% for column in pub_table['columns'] %}{% if loop.index != 1 %}, {% endif %}{{ column }}{% endfor %}){% endif %}{% if pub_table['where'] %} WHERE ({{pub_table['where']}}){% endif %}{% endfor %};
{% endif %}
{### Alter publication table ###}
{% if add_table %}
ALTER PUBLICATION {{ conn|qtIdent(o_data.name) }}
ADD TABLE {% if data.only_table%}ONLY {% endif %}{% for pub_table in add_table_data %}{% if loop.index != 1 %}, {% endif %}{{ pub_table['table_name'] or pub_table }}{% if pub_table['columns'] %} ({% for column in pub_table['columns'] %}{% if loop.index != 1 %}, {% endif %}{{ column }}{% endfor %}){% endif %}{% if pub_table['where'] %} WHERE ({{pub_table['where']}}){% endif %}{% endfor %};
{% endif %}
{### Alter add publication schema ###}
{% if add_schema %}
ALTER PUBLICATION {{ conn|qtIdent(o_data.name) }}
ADD TABLES IN SCHEMA {% for pub_schema in add_schema_data %}{% if loop.index != 1 %}, {% endif %}{{ pub_schema }}{% endfor %};
{% endif %}
{### Alter publication name ###}
{% if data.name != o_data.name %}
ALTER PUBLICATION {{ conn|qtIdent(o_data.name) }}
RENAME TO {{ conn|qtIdent(data.name) }};
{% endif %}

View File

@ -0,0 +1,7 @@
SELECT
pg_catalog.quote_ident(attname) as column
FROM
pg_attribute
WHERE
attrelid = '{{ tid }}' :: regclass
and attstattarget =-1;

View File

@ -1,7 +1,15 @@
SELECT pg_catalog.quote_ident(c.table_schema)||'.'||pg_catalog.quote_ident(c.table_name) AS table
FROM information_schema.tables c
WHERE c.table_type = 'BASE TABLE'
AND c.table_schema NOT LIKE 'pg\_%'
AND c.table_schema NOT LIKE 'pgagent'
AND c.table_schema NOT LIKE 'sys'
AND c.table_schema NOT IN ('information_schema') ORDER BY 1;
SELECT
pg_catalog.quote_ident(c.table_schema)|| '.' || pg_catalog.quote_ident(c.table_name) AS table,
(
pg_catalog.quote_ident(c.table_schema)|| '.' || pg_catalog.quote_ident(c.table_name)
):: regclass :: oid as tid
FROM
information_schema.tables c
WHERE
c.table_type = 'BASE TABLE'
AND c.table_schema NOT LIKE 'pg\_%'
AND c.table_schema NOT LIKE 'pgagent'
AND c.table_schema NOT LIKE 'sys'
AND c.table_schema NOT IN ('information_schema')
ORDER BY
1;

View File

@ -1,6 +1,4 @@
SELECT pg_catalog.quote_ident(pgb_table.schemaname)||'.'||pg_catalog.quote_ident(pgb_table.tablename)
AS pubtable,
pg_catalog.quote_ident(pgb_table.schemaname)||'.'||pg_catalog.quote_ident(pgb_table.tablename)
AS proptable
FROM pg_catalog.pg_publication_tables pgb_table WHERE pubname = '{{ pname }}'
AND pgb_table.schemaname NOT LIKE 'pgagent';
SELECT pg_catalog.quote_ident(n.nspname) || '.' || pg_catalog.quote_ident(cls.relname) AS table_name
FROM pg_publication_rel prel
JOIN pg_class cls ON cls.oid = prel.prrelid
JOIN pg_catalog.pg_namespace n ON cls.relnamespace = n.oid WHERE prel.prpubid = {{pbid}} :: oid;

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication, public.test_table_publication_2
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
ADD TABLE public.test_table_publication_2;

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication_2
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
DROP TABLE public.test_table_publication;

View File

@ -1,2 +1,2 @@
ALTER PUBLICATION alterd_publication_event SET
(publish = 'insert, update', publish_via_partition_root = false);
ALTER PUBLICATION alterd_publication SET
(publish = 'insert, update, delete');

View File

@ -1 +1,2 @@
ALTER PUBLICATION test_publication_to_alter RENAME TO alterd_publication;
ALTER PUBLICATION test_publication_create
RENAME TO alterd_publication;

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,3 @@
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -6,7 +6,7 @@
"endpoint": "NODE-table.obj",
"sql_endpoint": "NODE-table.sql_id",
"data": {
"name": "test_publication",
"name": "test_table_publication",
"columns": [
{
"name": "emp_id",
@ -28,6 +28,30 @@
},
"store_object_id": true
},
{
"type": "create",
"name": "Create Second Table For Publication",
"endpoint": "NODE-table.obj",
"sql_endpoint": "NODE-table.sql_id",
"data": {
"name": "test_table_publication_2",
"columns": [
{
"name": "dept_id",
"cltype": "integer",
"is_primary_key": true
},
{
"name": "dept_name",
"cltype": "text"
}
],
"is_partitioned": false,
"schema": "public",
"spcname": "pg_default"
},
"store_object_id": true
},
{
"type": "create",
"name": "Create Publication",
@ -53,27 +77,84 @@
"name": "Alter Publication name",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"name": "alterd_publication"
},
"expected_sql_file": "alter_publication.sql"
"expected_sql_file": "alter_publication.sql",
"expected_msql_file": "alter_publication_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication event",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"evnt_delete": true
},
"expected_sql_file": "alter_publication_event.sql"
"expected_sql_file": "alter_publication_event.sql",
"expected_msql_file": "alter_publication_event_msql.sql"
},
{
"type": "delete",
"name": "Drop publication",
"endpoint": "NODE-publication.delete_id",
"data": {
"name": "alterd_publication_event"
"name": "alterd_publication"
}
},
{
"type": "create",
"name": "Create Publication for few tables",
"endpoint": "NODE-publication.obj",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql",
"data": {
"name": "test_publication_create",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": true,
"evnt_truncate": true,
"pubowner": "postgres",
"publish_via_partition_root": false,
"all_table": false,
"only_table": false,
"pubtable": ["public.test_table_publication"]
},
"expected_sql_file": "create_publication_few_tables.sql",
"expected_msql_file": "create_publication_few_tables_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few tables via adding new tables",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubtable": ["public.test_table_publication", "public.test_table_publication_2"]
},
"expected_sql_file": "alter_publication_add_tables.sql",
"expected_msql_file": "alter_publication_add_tables_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few tables via deleting tables",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubtable": ["public.test_table_publication_2"]
},
"expected_sql_file": "alter_publication_drop_tables.sql",
"expected_msql_file": "alter_publication_drop_tables_msql.sql"
},
{
"type": "delete",
"name": "Drop publication for few tables",
"endpoint": "NODE-publication.delete_id",
"data": {
"name": "test_publication_create"
}
}
]

View File

@ -0,0 +1,7 @@
-- Publication: alterd_publication
-- DROP PUBLICATION IF EXISTS alterd_publication;
CREATE PUBLICATION alterd_publication
FOR ALL TABLES
WITH (publish = 'insert, update', publish_via_partition_root = false);

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLES IN SCHEMA test_schema_publication, test_schema_publication_2
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
ADD TABLES IN SCHEMA test_schema_publication_2;

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication, public.test_table_publication_2
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication, public.test_table_publication_2 (dept_id)
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
ADD TABLE public.test_table_publication_2 (dept_id);

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication, public.test_table_publication_2 (dept_id) WHERE ((dept_id = 2))
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
ADD TABLE public.test_table_publication_2 (dept_id) WHERE (dept_id=2);

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
ADD TABLE public.test_table_publication_2;

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication, public.test_table_publication_2 WHERE ((dept_id = 2))
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
ADD TABLE public.test_table_publication_2 WHERE (dept_id=2);

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLES IN SCHEMA test_schema_publication_2
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
DROP TABLES IN SCHEMA test_schema_publication;

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
DROP TABLE public.test_table_publication_2;

View File

@ -0,0 +1,7 @@
-- Publication: alterd_publication
-- DROP PUBLICATION IF EXISTS alterd_publication;
CREATE PUBLICATION alterd_publication
FOR ALL TABLES
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION alterd_publication SET
(publish = 'insert, update, delete, truncate');

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
RENAME TO alterd_publication;

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication, public.test_table_publication_2 (dept_name)
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
SET TABLE public.test_table_publication, TABLE public.test_table_publication_2 (dept_name);

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication, public.test_table_publication_2 (dept_name) WHERE ((dept_name = 'test'::text))
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
SET TABLE public.test_table_publication, TABLE public.test_table_publication_2 (dept_name) WHERE (dept_name='test');

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication, public.test_table_publication_2 WHERE ((dept_name = 'test'::text))
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
SET TABLE public.test_table_publication, TABLE public.test_table_publication_2 WHERE (dept_name='test');

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR ALL TABLES
WITH (publish = 'insert, update', publish_via_partition_root = false);

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLES IN SCHEMA test_schema_publication
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,3 @@
CREATE PUBLICATION test_publication_create
FOR TABLES IN SCHEMA test_schema_publication
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication (emp_id, name)
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,3 @@
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication (emp_id, name)
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication (emp_id, name) WHERE (((emp_id = 2) AND (name = 'test'::text)))
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,3 @@
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication (emp_id, name) WHERE (emp_id=2 and name='test')
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,3 @@
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,3 @@
CREATE PUBLICATION test_publication_create
FOR TABLE ONLY public.test_table_publication
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication, TABLES IN SCHEMA test_schema_publication
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,3 @@
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication, TABLES IN SCHEMA test_schema_publication
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication WHERE (((emp_id = 2) AND (name = 'test'::text)))
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,3 @@
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication WHERE (emp_id=2 and name='test')
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication WHERE (((emp_id = 2) AND (name = 'test'::text))), TABLES IN SCHEMA test_schema_publication
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,3 @@
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication WHERE (emp_id=2 and name='test'), TABLES IN SCHEMA test_schema_publication
WITH (publish = 'insert, update, delete, truncate', publish_via_partition_root = false);

View File

@ -0,0 +1,3 @@
CREATE PUBLICATION test_publication_create
FOR ALL TABLES
WITH (publish = 'insert, update', publish_via_partition_root = false);

View File

@ -0,0 +1,582 @@
{
"scenarios": [
{
"type": "create",
"name": "Create Table For Publication",
"endpoint": "NODE-table.obj",
"sql_endpoint": "NODE-table.sql_id",
"data": {
"name": "test_table_publication",
"columns": [
{
"name": "emp_id",
"cltype": "integer",
"is_primary_key": true
},
{
"name": "name",
"cltype": "text"
},
{
"name": "salary",
"cltype": "bigint"
}
],
"is_partitioned": false,
"schema": "public",
"spcname": "pg_default"
},
"store_object_id": true
},
{
"type": "create",
"name": "Create Second Table For Publication",
"endpoint": "NODE-table.obj",
"sql_endpoint": "NODE-table.sql_id",
"data": {
"name": "test_table_publication_2",
"columns": [
{
"name": "dept_id",
"cltype": "integer",
"is_primary_key": true
},
{
"name": "dept_name",
"cltype": "text"
}
],
"is_partitioned": false,
"schema": "public",
"spcname": "pg_default"
},
"store_object_id": true
},
{
"type": "create",
"name": "Create Schema For Publication",
"endpoint": "NODE-schema.obj",
"sql_endpoint": "NODE-schema.sql_id",
"data": {
"name": "test_schema_publication"
},
"store_object_id": true
},
{
"type": "create",
"name": "Create Second Schema For Publication",
"endpoint": "NODE-schema.obj",
"sql_endpoint": "NODE-schema.sql_id",
"data": {
"name": "test_schema_publication_2"
},
"store_object_id": true
},
{
"type": "create",
"name": "Create Publication for all tables with insert and update",
"endpoint": "NODE-publication.obj",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql",
"data": {
"name": "test_publication_create",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"pubowner": "postgres",
"publish_via_partition_root": false,
"all_table": true,
"only_table": false,
"pubtable": "",
"pubschema": ""
},
"expected_sql_file": "create_publication.sql",
"expected_msql_file": "create_publication_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication name for all tables",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"name": "alterd_publication"
},
"expected_sql_file": "alter_publication.sql",
"expected_msql_file": "alter_publication_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication event for all tables",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"evnt_delete": true,
"evnt_truncate": true
},
"expected_sql_file": "alter_publication_event.sql",
"expected_msql_file": "alter_publication_event_msql.sql"
},
{
"type": "delete",
"name": "Drop publication for all tables",
"endpoint": "NODE-publication.delete_id",
"data": {
"name": "alterd_publication"
}
},
{
"type": "create",
"name": "Create Publication for few tables",
"endpoint": "NODE-publication.obj",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql",
"data": {
"name": "test_publication_create",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": true,
"evnt_truncate": true,
"pubowner": "postgres",
"publish_via_partition_root": false,
"all_table": false,
"only_table": false,
"pubtable": [{
"table_name": "public.test_table_publication"
}],
"pubschema": ""
},
"expected_sql_file": "create_publication_few_tables.sql",
"expected_msql_file": "create_publication_few_tables_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few tables via adding new tables",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubtable": {
"added": [{
"table_name": "public.test_table_publication_2"
}]
}
},
"expected_sql_file": "alter_publication_add_tables.sql",
"expected_msql_file": "alter_publication_add_tables_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few tables via deleting tables with",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubtable": {
"deleted": [{
"table_name": "public.test_table_publication_2"
}]
}
},
"expected_sql_file": "alter_publication_drop_tables.sql",
"expected_msql_file": "alter_publication_drop_tables_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few tables via adding new tables with columns",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubtable": {
"added": [{
"table_name": "public.test_table_publication_2",
"columns": ["dept_id"]
}]
}
},
"expected_sql_file": "alter_publication_add_tables_columns.sql",
"expected_msql_file": "alter_publication_add_tables_columns_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few tables via updating tables with columns",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubtable": {
"changed": [{
"table_name": "public.test_table_publication_2",
"columns": ["dept_name"]
}]
}
},
"expected_sql_file": "alter_publication_set_tables_columns.sql",
"expected_msql_file": "alter_publication_set_tables_columns_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few tables via deleting tables with",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubtable": {
"deleted": [{
"table_name": "public.test_table_publication_2"
}]
}
},
"expected_sql_file": "alter_publication_drop_tables.sql",
"expected_msql_file": "alter_publication_drop_tables_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few tables via adding new tables with where",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubtable": {
"added": [{
"table_name": "public.test_table_publication_2",
"where": "dept_id=2"
}]
}
},
"expected_sql_file": "alter_publication_add_tables_where.sql",
"expected_msql_file": "alter_publication_add_tables_where_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few tables via updating tables with where",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubtable": {
"changed": [{
"table_name": "public.test_table_publication_2",
"where": "dept_name='test'"
}]
}
},
"expected_sql_file": "alter_publication_set_tables_where.sql",
"expected_msql_file": "alter_publication_set_tables_where_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few tables via deleting tables with",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubtable": {
"deleted": [{
"table_name": "public.test_table_publication_2"
}]
}
},
"expected_sql_file": "alter_publication_drop_tables.sql",
"expected_msql_file": "alter_publication_drop_tables_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few tables via adding new tables with columns and where",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubtable": {
"added": [{
"table_name": "public.test_table_publication_2",
"columns": ["dept_id"],
"where": "dept_id=2"
}]
}
},
"expected_sql_file": "alter_publication_add_tables_columns_where.sql",
"expected_msql_file": "alter_publication_add_tables_columns_where_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few tables via updating tables with columns and where",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubtable": {
"changed": [{
"table_name": "public.test_table_publication_2",
"columns": ["dept_name"],
"where": "dept_name='test'"
}]
}
},
"expected_sql_file": "alter_publication_set_tables_columns_where.sql",
"expected_msql_file": "alter_publication_set_tables_columns_where_msql.sql"
},
{
"type": "delete",
"name": "Drop publication for few tables",
"endpoint": "NODE-publication.delete_id",
"data": {
"name": "test_publication_create"
}
},
{
"type": "create",
"name": "Create Publication for few schemas",
"endpoint": "NODE-publication.obj",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql",
"data": {
"name": "test_publication_create",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": true,
"evnt_truncate": true,
"pubowner": "postgres",
"publish_via_partition_root": false,
"all_table": false,
"only_table": false,
"pubtable": "",
"pubschema": ["test_schema_publication"]
},
"expected_sql_file": "create_publication_few_schemas.sql",
"expected_msql_file": "create_publication_few_schemas_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few schemas via adding new schemas",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubschema": ["test_schema_publication","test_schema_publication_2"]
},
"expected_sql_file": "alter_publication_add_schemas.sql",
"expected_msql_file": "alter_publication_add_schemas_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few schemas via deleting schemas",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubschema": ["test_schema_publication_2"]
},
"expected_sql_file": "alter_publication_drop_schemas.sql",
"expected_msql_file": "alter_publication_drop_schemas_msql.sql"
},
{
"type": "delete",
"name": "Drop publication for few schemas",
"endpoint": "NODE-publication.delete_id",
"data": {
"name": "test_publication_create"
}
},
{
"type": "create",
"name": "Create Publication for few tables with only",
"endpoint": "NODE-publication.obj",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql",
"data": {
"name": "test_publication_create",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": true,
"evnt_truncate": true,
"pubowner": "postgres",
"publish_via_partition_root": false,
"all_table": false,
"only_table": true,
"pubtable": [{
"table_name": "public.test_table_publication"
}],
"pubschema": ""
},
"expected_sql_file": "create_publication_few_tables_only.sql",
"expected_msql_file": "create_publication_few_tables_only_msql.sql"
},
{
"type": "delete",
"name": "Drop publication for few tables with only",
"endpoint": "NODE-publication.delete_id",
"data": {
"name": "test_publication_create"
}
},
{
"type": "create",
"name": "Create Publication for few tables and few schemas",
"endpoint": "NODE-publication.obj",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql",
"data": {
"name": "test_publication_create",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": true,
"evnt_truncate": true,
"pubowner": "postgres",
"publish_via_partition_root": false,
"all_table": false,
"only_table": false,
"pubtable": [{
"table_name": "public.test_table_publication"
}],
"pubschema": ["test_schema_publication"]
},
"expected_sql_file": "create_publication_few_tables_schemas.sql",
"expected_msql_file": "create_publication_few_tables_schemas_msql.sql"
},
{
"type": "delete",
"name": "Drop publication for few tables and few schemas",
"endpoint": "NODE-publication.delete_id",
"data": {
"name": "test_publication_create"
}
},
{
"type": "create",
"name": "Create Publication for few tables with columns",
"endpoint": "NODE-publication.obj",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql",
"data": {
"name": "test_publication_create",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": true,
"evnt_truncate": true,
"pubowner": "postgres",
"publish_via_partition_root": false,
"all_table": false,
"only_table": false,
"pubtable": [{
"table_name": "public.test_table_publication",
"columns": ["emp_id", "name"]
}],
"pubschema": ""
},
"expected_sql_file": "create_publication_few_tables_columns.sql",
"expected_msql_file": "create_publication_few_tables_columns_msql.sql"
},
{
"type": "delete",
"name": "Drop publication for few tables with columns",
"endpoint": "NODE-publication.delete_id",
"data": {
"name": "test_publication_create"
}
},
{
"type": "create",
"name": "Create Publication for few tables with where",
"endpoint": "NODE-publication.obj",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql",
"data": {
"name": "test_publication_create",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": true,
"evnt_truncate": true,
"pubowner": "postgres",
"publish_via_partition_root": false,
"all_table": false,
"only_table": false,
"pubtable": [{
"table_name": "public.test_table_publication",
"where": "emp_id=2 and name='test'"
}],
"pubschema": ""
},
"expected_sql_file": "create_publication_few_tables_where.sql",
"expected_msql_file": "create_publication_few_tables_where_msql.sql"
},
{
"type": "delete",
"name": "Drop publication for few tables with where",
"endpoint": "NODE-publication.delete_id",
"data": {
"name": "test_publication_create"
}
},
{
"type": "create",
"name": "Create Publication for few tables with columns and where",
"endpoint": "NODE-publication.obj",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql",
"data": {
"name": "test_publication_create",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": true,
"evnt_truncate": true,
"pubowner": "postgres",
"publish_via_partition_root": false,
"all_table": false,
"only_table": false,
"pubtable": [{
"table_name": "public.test_table_publication",
"columns": ["emp_id", "name"],
"where": "emp_id=2 and name='test'"
}],
"pubschema": ""
},
"expected_sql_file": "create_publication_few_tables_columns_where.sql",
"expected_msql_file": "create_publication_few_tables_columns_where_msql.sql"
},
{
"type": "delete",
"name": "Drop publication for few tables with columns and where",
"endpoint": "NODE-publication.delete_id",
"data": {
"name": "test_publication_create"
}
},
{
"type": "create",
"name": "Create Publication for few tables with where and few schemas",
"endpoint": "NODE-publication.obj",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql",
"data": {
"name": "test_publication_create",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": true,
"evnt_truncate": true,
"pubowner": "postgres",
"publish_via_partition_root": false,
"all_table": false,
"only_table": false,
"pubtable": [{
"table_name": "public.test_table_publication",
"where": "emp_id=2 and name='test'"
}],
"pubschema": ["test_schema_publication"]
},
"expected_sql_file": "create_publication_few_tables_where_schemas.sql",
"expected_msql_file": "create_publication_few_tables_where_schemas_msql.sql"
},
{
"type": "delete",
"name": "Drop publication for few tables with where and few schemas",
"endpoint": "NODE-publication.delete_id",
"data": {
"name": "test_publication_create"
}
}
]
}

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication, public.test_table_publication_2
WITH (publish = 'insert, update, delete, truncate');

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
ADD TABLE public.test_table_publication_2;

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication_2
WITH (publish = 'insert, update, delete, truncate');

View File

@ -0,0 +1,2 @@
ALTER PUBLICATION test_publication_create
DROP TABLE public.test_table_publication;

View File

@ -1,2 +1,2 @@
ALTER PUBLICATION alterd_publication_event SET
(publish = 'insert, update');
ALTER PUBLICATION alterd_publication SET
(publish = 'insert, update, delete');

View File

@ -1 +1,2 @@
ALTER PUBLICATION test_publication_to_alter RENAME TO alterd_publication;
ALTER PUBLICATION test_publication_create
RENAME TO alterd_publication;

View File

@ -0,0 +1,7 @@
-- Publication: test_publication_create
-- DROP PUBLICATION IF EXISTS test_publication_create;
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication
WITH (publish = 'insert, update, delete, truncate');

View File

@ -0,0 +1,3 @@
CREATE PUBLICATION test_publication_create
FOR TABLE public.test_table_publication
WITH (publish = 'insert, update, delete, truncate');

View File

@ -6,7 +6,7 @@
"endpoint": "NODE-table.obj",
"sql_endpoint": "NODE-table.sql_id",
"data": {
"name": "test_publication",
"name": "test_table_publication",
"columns": [
{
"name": "emp_id",
@ -28,6 +28,30 @@
},
"store_object_id": true
},
{
"type": "create",
"name": "Create Second Table For Publication",
"endpoint": "NODE-table.obj",
"sql_endpoint": "NODE-table.sql_id",
"data": {
"name": "test_table_publication_2",
"columns": [
{
"name": "dept_id",
"cltype": "integer",
"is_primary_key": true
},
{
"name": "dept_name",
"cltype": "text"
}
],
"is_partitioned": false,
"schema": "public",
"spcname": "pg_default"
},
"store_object_id": true
},
{
"type": "create",
"name": "Create Publication",
@ -52,27 +76,83 @@
"name": "Alter Publication name",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"name": "alterd_publication"
},
"expected_sql_file": "alter_publication.sql"
"expected_sql_file": "alter_publication.sql",
"expected_msql_file": "alter_publication_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication event",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"evnt_delete": true
},
"expected_sql_file": "alter_publication_event.sql"
"expected_sql_file": "alter_publication_event.sql",
"expected_msql_file": "alter_publication_event_msql.sql"
},
{
"type": "delete",
"name": "Drop publication",
"endpoint": "NODE-publication.delete_id",
"data": {
"name": "alterd_publication_event"
"name": "alterd_publication"
}
},
{
"type": "create",
"name": "Create Publication for few tables",
"endpoint": "NODE-publication.obj",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql",
"data": {
"name": "test_publication_create",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": true,
"evnt_truncate": true,
"pubowner": "postgres",
"all_table": false,
"only_table": false,
"pubtable": ["public.test_table_publication"]
},
"expected_sql_file": "create_publication_few_tables.sql",
"expected_msql_file": "create_publication_few_tables_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few tables via adding new tables",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubtable": ["public.test_table_publication", "public.test_table_publication_2"]
},
"expected_sql_file": "alter_publication_add_tables.sql",
"expected_msql_file": "alter_publication_add_tables_msql.sql"
},
{
"type": "alter",
"name": "Alter Publication for few tables via deleting tables",
"endpoint": "NODE-publication.obj_id",
"sql_endpoint": "NODE-publication.sql_id",
"msql_endpoint": "NODE-publication.msql_id",
"data": {
"pubtable": ["public.test_table_publication_2"]
},
"expected_sql_file": "alter_publication_drop_tables.sql",
"expected_msql_file": "alter_publication_drop_tables_msql.sql"
},
{
"type": "delete",
"name": "Drop publication for few tables",
"endpoint": "NODE-publication.delete_id",
"data": {
"name": "test_publication_create"
}
}
]

View File

@ -1,7 +1,7 @@
{
"add_publication": [
{
"name": "Create publication with insert and update",
"name": "Create publication with insert and update and all tables",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"test_data": {
@ -13,7 +13,8 @@
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": true,
"pubtable": ""
"pubtable": "",
"pubschema": ""
},
"mocking_required": false,
"mock_data": {},
@ -35,7 +36,8 @@
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER"
"pubtable": "PLACE_HOLDER",
"pubschema": ""
},
"mocking_required": false,
"mock_data": {},
@ -43,6 +45,160 @@
"status_code": 200
}
},
{
"name": "Create publication for few schemas",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"few_schemas": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACEHOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "",
"pubschema": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "Create publication for few schemas and few tables",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"few_schemas": true,
"few_tables": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACEHOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "Create publication for few tables and where clause",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"few_tables": true,
"with_where": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACEHOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": ""
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "Create publication for few tables and columns",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"few_tables": true,
"with_columns": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACEHOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": ""
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "Create publication for few tables and columns and where clause",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"few_tables": true,
"with_columns": true,
"with_where": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACEHOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": ""
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "Create publication for few schemas and few tables and columns",
"url": "/browser/publication/obj/",
"is_positive_test": false,
"few_schemas": true,
"few_tables": true,
"with_columns": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACEHOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "PLACE_HOLDER"
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(False, 'Mocked Internal Server Error')"
},
"expected_data": {
"status_code": 500
}
},
{
"name": "Create a publication without name",
"url": "/browser/publication/obj/",
@ -56,7 +212,8 @@
"evnt_truncate": false,
"pubowner": "postgres",
"all_table": true,
"pubtable": ""
"pubtable": "",
"pubschema": ""
},
"mocking_required": false,
"mock_data": {},
@ -64,6 +221,30 @@
"status_code": 410
}
},
{
"name": "Exception while adding a publication",
"url": "/browser/publication/obj/",
"is_positive_test": false,
"test_data": {
"name": "PLACEHOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"pubowner": "postgres",
"all_table": true,
"pubtable": "",
"pubschema": ""
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(True, True)(False, 'Mocked Internal Server Error ')"
},
"expected_data": {
"status_code": 500
}
},
{
"name": "Error while adding a publication",
"url": "/browser/publication/obj/",
@ -77,7 +258,8 @@
"evnt_truncate": false,
"pubowner": "postgres",
"all_table": true,
"pubtable": ""
"pubtable": "",
"pubschema": ""
},
"mocking_required": true,
"mock_data": {
@ -87,29 +269,6 @@
"expected_data": {
"status_code": 500
}
},
{
"name": "Exception while adding a publication",
"url": "/browser/publication/obj/",
"is_positive_test": false,
"test_data": {
"name": "PLACEHOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"pubowner": "postgres",
"all_table": true,
"pubtable": ""
},
"mocking_required": true,
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
"return_value": "(True, True)(False, 'Mocked Internal Server Error ')"
},
"expected_data": {
"status_code": 500
}
}
],
"get_publication": [
@ -275,6 +434,15 @@
"update_name": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
@ -290,7 +458,16 @@
"owner_publication": true,
"test_data": {
"id": "PLACE_HOLDER",
"evnt_insert": "PLACEHOLDER"
"name": "PLACE_HOLDER",
"evnt_insert": "PLACEHOLDER",
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "",
"pubschema": ""
},
"mocking_required": false,
"mock_data": {},
@ -305,7 +482,16 @@
"owner_publication": true,
"test_data": {
"id": "PLACE_HOLDER",
"evnt_delete": "PLACEHOLDER"
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": "PLACE_HOLDER",
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "",
"pubschema": ""
},
"mocking_required": false,
"mock_data": {},
@ -320,7 +506,16 @@
"mocking_required": true,
"test_data": {
"name": "PLACE_HOLDER",
"id": "PLACE_HOLDER"
"id": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "",
"pubschema": ""
},
"mock_data": {
"function_name": "pgadmin.utils.driver.psycopg3.connection.Connection.execute_dict",
@ -337,7 +532,17 @@
"wrong_publication_id": true,
"mocking_required": false,
"test_data": {
"id": "PLACE_HOLDER"
"id": "PLACE_HOLDER",
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "",
"pubschema": ""
},
"mock_data": {},
"expected_data": {
@ -345,6 +550,438 @@
}
}
],
"update_publication_add_table": [
{
"name": "update a publication via adding a table",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"add_table": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "update a publication via adding a table with where and columns",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"add_table_with_where_columns": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "update a publication via adding a table with columns",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"add_table_with_columns": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "update a publication via adding a table with where",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"add_table_with_where": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
}
],
"update_publication_update_table": [
{
"name": "update a publication via updating a table columns",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"update_table_with_columns": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "update a publication via updating a table where clause",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"update_table_with_where": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "update a publication via updating a table columns and where clause",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"update_table_with_where_columns": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "update a publication via updating old new tables with columns",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"update_old_new_tables_with_columns": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "update a publication via updating old new tables with where clause",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"update_old_new_tables_with_where": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "update a publication via updating old new tables with columns and where clause",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"update_old_new_tables_with_columns_where": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "update a publication via updating old new tables with where clause and schema",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"update_old_new_tables_with_where_schema": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "PLACE_HOLDER",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
}
],
"update_publication_drop_table": [
{
"name": "update a publication via dropping a table",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"drop_tables": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "update a publication via dropping a table with columns",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"drop_tables_with_columns": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
},
{
"name": "update a publication via dropping a table with where clause",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"drop_tables_with_where": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "PLACE_HOLDER",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
}
],
"update_publication_add_schema": [
{
"name": "update a publication via adding a schema",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "",
"pubschema": "",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
}
],
"update_publication_update_schema": [
{
"name": "update a publication via updating old new schemas",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "",
"pubschema": "PLACE_HOLDER",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
}
],
"update_publication_drop_schema": [
{
"name": "update a publication via dropping a schema",
"url": "/browser/publication/obj/",
"is_positive_test": true,
"compatible_sversion": true,
"test_data": {
"name": "PLACE_HOLDER",
"evnt_insert": true,
"evnt_update": true,
"evnt_delete": false,
"evnt_truncate": false,
"publish_via_partition_root": false,
"pubowner": "postgres",
"all_table": false,
"pubtable": "",
"pubschema": "PLACE_HOLDER",
"id": "PLACE_HOLDER"
},
"mocking_required": false,
"mock_data": {},
"expected_data": {
"status_code": 200
}
}
],
"delete_multiple_publication": [
{
"name": "Delete multiple publication",

View File

@ -41,6 +41,10 @@ class PublicationsAddTestCase(BaseTestGenerator):
"for server version less than 10"
)
if self.server_version < 150000 and \
hasattr(self, 'compatible_sversion'):
self.skipTest("The version is not compatible for"
" the current test case")
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
@ -48,13 +52,15 @@ class PublicationsAddTestCase(BaseTestGenerator):
raise Exception(
"Could not connect to database to add a publication.")
if self.is_positive_test and hasattr(self, 'few_tables'):
if hasattr(self, 'few_tables'):
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
self.table_id = tables_utils. \
create_table(self.server, self.db_name, self.schema_name,
self.table_name)
self.test_data['pubtable'] = publication_utils.get_tables(self)
if self.server_version >= 150000 and hasattr(self, 'few_schemas'):
self.test_data['pubschema'] = publication_utils.get_schemas(self)
def runTest(self):
"""This function will publication."""
@ -68,6 +74,8 @@ class PublicationsAddTestCase(BaseTestGenerator):
if hasattr(self, 'without_name'):
del data["name"]
response = self.create_publication()
elif hasattr(self, 'with_columns'):
response = self.create_publication()
elif hasattr(self, 'error_creating_publication'):
with patch(self.mock_data["function_name"],
return_value=eval(self.mock_data["return_value"])):

View File

@ -54,9 +54,7 @@ class PublicationDeleteTestCase(BaseTestGenerator):
str(uuid.uuid4())[1:8])
self.publication_id = \
publication_utils.create_publication(self.server,
self.db_name,
self.publication_name)
publication_utils.create_publication(self)
def delete_publication(self):
return self.tester.delete(

View File

@ -61,9 +61,8 @@ class PublicationDeleteTestCases(BaseTestGenerator):
self.publication_name_1 = "test_publication_delete_%s" % (
str(uuid.uuid4())[1:8])
self.publication_ids = [
publication_utils.create_publication(self.server, self.db_name,
self.publication_name),
publication_utils.create_publication(self.server, self.db_name,
publication_utils.create_publication(self),
publication_utils.create_publication(self,
self.publication_name_1),
]

View File

@ -60,9 +60,7 @@ class PublicationGetTestCase(BaseTestGenerator):
self.publication_name = "test_publication_get_%s" % (
str(uuid.uuid4())[1:8])
self.publication_id = publication_utils. \
create_publication(self.server,
self.db_name,
self.publication_name)
create_publication(self)
def get_publication(self):
return self.tester.get(

View File

@ -41,35 +41,16 @@ class PublicationUpdateTestCase(BaseTestGenerator):
"for server version less than 10"
)
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception(
"Could not connect to database to delete publication.")
self.schema_id = schema_info["schema_id"]
self.schema_name = schema_info["schema_name"]
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to delete publication.")
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
self.server_version = schema_info["server_version"]
if self.server_version < 99999:
self.skipTest(
"Logical replication is not supported "
"for server version less than 10"
)
self.table_id = tables_utils.create_table(self.server, self.db_name,
self.schema_name,
self.table_name)
self.publication_name = "test_publication_update_%s" % (
self.test_data['name'] = "test_publication_add_%s" % (
str(uuid.uuid4())[1:8])
self.publication_id = \
publication_utils.create_publication(self.server, self.db_name,
self.publication_name)
self.publication_id = publication_utils.create_publication(self)
def update_publication(self, data):
return self.tester.put(
@ -86,22 +67,19 @@ class PublicationUpdateTestCase(BaseTestGenerator):
publication_name = publication_utils. \
verify_publication(self.server,
self.db_name,
self.publication_name)
self.test_data['name'])
if not publication_name:
raise Exception("Could not find the publication to update.")
if hasattr(self, "update_name"):
self.test_data['name'] = "test_publication_update_%s" % (
str(uuid.uuid4())[1:8])
else:
self.test_data['name'] = self.publication_name
self.test_data['id'] = self.publication_id
if not publication_name:
raise Exception("Could not find the publication to update.")
self.test_data['id'] = self.publication_id
if self.is_positive_test:
if hasattr(self, "wrong_publication_id"):
self.publication_id = 9999
if hasattr(self, "plid_none"):
self.publication_id = ''
response = self.update_publication(self.test_data)
else:
with patch(self.mock_data["function_name"],
@ -114,13 +92,6 @@ class PublicationUpdateTestCase(BaseTestGenerator):
self.expected_data["status_code"])
def tearDown(self):
connection = utils.get_db_connection(self.server['db'],
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'],
self.server['sslmode'])
publication_utils.delete_publication(self.server, self.db_name,
self.test_data['name'])

View File

@ -0,0 +1,89 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2023, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import json
import uuid
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression.python_test_utils import test_utils as utils
from . import utils as publication_utils
class PublicationUpdateAddSchemaTestCase(BaseTestGenerator):
"""This class will update the publication."""
scenarios = utils.generate_scenarios('update_publication_add_schema',
publication_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
self.server_version = schema_info["server_version"]
if self.server_version < 99999:
self.skipTest(
"Logical replication is not supported "
"for server version less than 10"
)
if self.server_version < 150000 and \
hasattr(self, 'compatible_sversion'):
self.skipTest("The version is not compatible for "
"the current test case")
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception(
"Could not connect to database to delete publication.")
self.test_data['name'] = "test_publication_add_%s" % (
str(uuid.uuid4())[1:8])
self.publication_id = publication_utils.create_publication(self)
def update_publication(self, data):
return self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(
self.db_id) +
'/' + str(self.publication_id),
data=json.dumps(data),
follow_redirects=True)
def runTest(self):
"""This function will update the publication."""
publication_name = publication_utils. \
verify_publication(self.server,
self.db_name,
self.test_data['name'])
if not publication_name:
raise Exception("Could not find the publication to update.")
self.test_data['id'] = self.publication_id
if self.is_positive_test:
self.test_data['pubtable'] = ''
self.test_data['pubschema'] = \
publication_utils.get_schemas(self)
response = self.update_publication(self.test_data)
self.assertEqual(response.status_code,
self.expected_data["status_code"])
def tearDown(self):
publication_utils.delete_publication(self.server, self.db_name,
self.test_data['name'])
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -0,0 +1,116 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2023, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import json
import uuid
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression.python_test_utils import test_utils as utils
from . import utils as publication_utils
class PublicationUpdateAddTableTestCase(BaseTestGenerator):
"""This class will update the publication."""
scenarios = utils.generate_scenarios('update_publication_add_table',
publication_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
self.server_version = schema_info["server_version"]
self.schema_name = schema_info["schema_name"]
if self.server_version < 99999:
self.skipTest(
"Logical replication is not supported "
"for server version less than 10"
)
if self.server_version < 150000 and \
hasattr(self, 'compatible_sversion'):
self.skipTest("The version is not compatible for "
"the current test case")
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception(
"Could not connect to database to delete publication.")
publication_utils.create_table_for_publication(self)
self.test_data['pubtable'] = publication_utils.get_tables(self)
self.test_data['name'] = "test_publication_add_%s" % (
str(uuid.uuid4())[1:8])
self.publication_id = publication_utils.create_publication(self)
def update_publication(self, data):
return self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(
self.db_id) +
'/' + str(self.publication_id),
data=json.dumps(data),
follow_redirects=True)
def runTest(self):
"""This function will update the publication."""
publication_name = publication_utils. \
verify_publication(self.server,
self.db_name,
self.test_data['name'])
if not publication_name:
raise Exception("Could not find the publication to update.")
self.test_data['id'] = self.publication_id
if self.is_positive_test:
if hasattr(self, 'add_table'):
publication_utils.create_table_for_publication(self)
if self.server_version >= 150000:
self.test_data['pubtable'] = {'added': [
{'table_name': self.schema_name + '.' + self.table_name
}]}
else:
self.test_data['pubtable'] = publication_utils.get_tables(
self)
if hasattr(self, 'add_table_with_where'):
publication_utils.create_table_for_publication(self)
self.test_data['pubtable'] = {'added': [
{'table_name': self.schema_name + '.' + self.table_name,
'where': 'id=2'}]}
if hasattr(self, 'add_table_with_columns'):
publication_utils.create_table_for_publication(self)
columns = publication_utils.get_all_columns(self)
self.test_data['pubtable'] = {'added': [
{'table_name': self.schema_name + '.' + self.table_name,
"columns": [columns[0]['value'], columns[1]['value']]}]}
if hasattr(self, 'add_table_with_where_columns'):
publication_utils.create_table_for_publication(self)
columns = publication_utils.get_all_columns(self)
self.test_data['pubtable'] = {'added': [
{'table_name': self.schema_name + '.' + self.table_name,
"columns": [columns[0]['value'],
columns[1]['value']], 'where': 'id=2'}]}
response = self.update_publication(self.test_data)
self.assertEqual(response.status_code,
self.expected_data["status_code"])
def tearDown(self):
publication_utils.delete_publication(self.server, self.db_name,
self.test_data['name'])
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -0,0 +1,100 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2023, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import json
import uuid
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression.python_test_utils import test_utils as utils
from . import utils as publication_utils
class PublicationUpdateDropSchemaTestCase(BaseTestGenerator):
"""This class will update the publication."""
scenarios = utils.generate_scenarios('update_publication_drop_schema',
publication_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
self.server_version = schema_info["server_version"]
if self.server_version < 99999:
self.skipTest(
"Logical replication is not supported "
"for server version less than 10"
)
if self.server_version < 150000 and \
hasattr(self, 'compatible_sversion'):
self.skipTest("The version is not compatible for "
"the current test case")
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception(
"Could not connect to database to delete publication.")
self.schema_name = "schema_get_%s" % (str(uuid.uuid4())[1:8])
connection = utils.get_db_connection(self.db_name,
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'],
self.server['sslmode'])
self.schema_details = schema_utils.create_schema(connection,
self.schema_name)
self.test_data['pubschema'] = publication_utils.get_schemas(self)
self.test_data['name'] = "test_publication_add_%s" % (
str(uuid.uuid4())[1:8])
self.publication_id = publication_utils.create_publication(self)
def update_publication(self, data):
return self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(
self.db_id) +
'/' + str(self.publication_id),
data=json.dumps(data),
follow_redirects=True)
def runTest(self):
"""This function will update the publication."""
publication_name = publication_utils. \
verify_publication(self.server,
self.db_name,
self.test_data['name'])
if not publication_name:
raise Exception("Could not find the publication to update.")
self.test_data['id'] = self.publication_id
if self.is_positive_test:
schemas = publication_utils.get_schemas(self)
self.test_data['pubschema'] = [schemas[0]]
response = self.update_publication(self.test_data)
self.assertEqual(response.status_code,
self.expected_data["status_code"])
def tearDown(self):
publication_utils.delete_publication(self.server, self.db_name,
self.test_data['name'])
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -0,0 +1,121 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2023, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import json
import uuid
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
import utils as tables_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression.python_test_utils import test_utils as utils
from . import utils as publication_utils
class PublicationUpdateDropTableTestCase(BaseTestGenerator):
"""This class will update the publication."""
scenarios = utils.generate_scenarios('update_publication_drop_table',
publication_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
self.server_version = schema_info["server_version"]
self.schema_name = schema_info["schema_name"]
if self.server_version < 99999:
self.skipTest(
"Logical replication is not supported "
"for server version less than 10"
)
if self.server_version < 150000 and \
hasattr(self, 'compatible_sversion'):
self.skipTest("The version is not compatible for "
"the current test case")
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception(
"Could not connect to database to delete publication.")
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
self.table_id = tables_utils.create_table(self.server, self.db_name,
self.schema_name,
self.table_name)
if hasattr(self, 'drop_tables') or \
hasattr(self, 'drop_tables_with_columns'):
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
self.table_id = \
tables_utils.create_table(self.server,
self.db_name,
self.schema_name,
self.table_name)
self.test_data['pubtable'] = publication_utils.get_tables(self)
self.test_data['name'] = "test_publication_add_%s" % (
str(uuid.uuid4())[1:8])
self.publication_id = publication_utils.create_publication(self)
def update_publication(self, data):
return self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(
self.db_id) +
'/' + str(self.publication_id),
data=json.dumps(data),
follow_redirects=True)
def runTest(self):
"""This function will update the publication."""
publication_name = publication_utils. \
verify_publication(self.server,
self.db_name,
self.test_data['name'])
if not publication_name:
raise Exception("Could not find the publication to update.")
self.test_data['id'] = self.publication_id
if self.is_positive_test:
if hasattr(self, 'drop_tables'):
if self.server_version >= 150000:
self.test_data['pubtable'] = {'deleted': [
{'table_name': self.schema_name + '.' + self.table_name
}]}
else:
tables = publication_utils.get_tables(self)
self.test_data['pubtable'] = [tables[0]]
if hasattr(self, 'drop_tables_with_columns'):
columns = publication_utils.get_all_columns(self)
self.test_data['pubtable'] = {'deleted': [
{'table_name': self.schema_name + '.' + self.table_name,
"columns": [columns[0]['value']]}]}
if hasattr(self, 'drop_tables_with_where'):
self.test_data['pubtable'] = {'deleted': [
{'table_name': self.schema_name + '.' + self.table_name,
"where": 'id=2'}]}
response = self.update_publication(self.test_data)
self.assertEqual(response.status_code,
self.expected_data["status_code"])
def tearDown(self):
publication_utils.delete_publication(self.server, self.db_name,
self.test_data['name'])
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -0,0 +1,101 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2023, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import json
import uuid
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression.python_test_utils import test_utils as utils
from . import utils as publication_utils
class PublicationUpdateSchemaUpdateTestCase(BaseTestGenerator):
"""This class will update the publication."""
scenarios = utils.generate_scenarios('update_publication_update_schema',
publication_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
self.server_version = schema_info["server_version"]
if self.server_version < 99999:
self.skipTest(
"Logical replication is not supported "
"for server version less than 10"
)
if self.server_version < 150000 and \
hasattr(self, 'compatible_sversion'):
self.skipTest("The version is not compatible for "
"the current test case")
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception(
"Could not connect to database to delete publication.")
self.test_data['pubschema'] = publication_utils.get_schemas(self)
self.test_data['name'] = "test_publication_add_%s" % (
str(uuid.uuid4())[1:8])
self.publication_id = publication_utils.create_publication(self)
def update_publication(self, data):
return self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(
self.db_id) +
'/' + str(self.publication_id),
data=json.dumps(data),
follow_redirects=True)
def runTest(self):
"""This function will update the publication."""
publication_name = publication_utils. \
verify_publication(self.server,
self.db_name,
self.test_data['name'])
if not publication_name:
raise Exception("Could not find the publication to update.")
self.test_data['id'] = self.publication_id
if self.is_positive_test:
self.schema_name = "schema_get_%s" % (str(uuid.uuid4())[1:8])
connection = \
utils.get_db_connection(self.db_name,
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'],
self.server['sslmode'])
self.schema_details = \
schema_utils.create_schema(connection, self.schema_name)
self.test_data['pubschema'] = \
publication_utils.get_schemas(self)
response = self.update_publication(self.test_data)
self.assertEqual(response.status_code,
self.expected_data["status_code"])
def tearDown(self):
publication_utils.delete_publication(self.server, self.db_name,
self.test_data['name'])
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -0,0 +1,184 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2023, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import json
import uuid
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
import utils as tables_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression.python_test_utils import test_utils as utils
from . import utils as publication_utils
class PublicationUpdateTableUpdateTestCase(BaseTestGenerator):
"""This class will update the publication."""
scenarios = utils.generate_scenarios('update_publication_update_table',
publication_utils.test_cases)
def setUp(self):
super().setUp()
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
self.server_version = schema_info["server_version"]
self.schema_name = schema_info["schema_name"]
if self.server_version < 99999:
self.skipTest(
"Logical replication is not supported "
"for server version less than 10"
)
if self.server_version < 150000 and \
hasattr(self, 'compatible_sversion'):
self.skipTest("The version is not compatible for "
"the current test case")
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception(
"Could not connect to database to delete publication.")
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
self.table_id = tables_utils.create_table(self.server, self.db_name,
self.schema_name,
self.table_name)
self.test_data['pubtable'] = publication_utils.get_tables(self)
self.test_data['pubschema'] = publication_utils.get_schemas(self)
self.test_data['name'] = "test_publication_add_%s" % (
str(uuid.uuid4())[1:8])
self.publication_id = publication_utils.create_publication(self)
def update_publication(self, data):
return self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(
self.db_id) +
'/' + str(self.publication_id),
data=json.dumps(data),
follow_redirects=True)
def runTest(self):
"""This function will update the publication."""
publication_name = publication_utils. \
verify_publication(self.server,
self.db_name,
self.test_data['name'])
if not publication_name:
raise Exception("Could not find the publication to update.")
self.test_data['id'] = self.publication_id
if self.is_positive_test:
if hasattr(self, 'update_table_with_columns'):
tables = publication_utils.get_tables(self)
columns = publication_utils.get_all_columns(self)
self.test_data['pubtable'] = {'changed': [
{'table_name': tables[0]['table_name'],
"columns": [columns[0]['value']]}]}
if hasattr(self, 'update_table_with_where'):
tables = publication_utils.get_tables(self)
self.test_data['pubtable'] = {'changed': [
{'table_name': tables[0]['table_name'],
'where': 'id=2'}]}
if hasattr(self, 'update_table_with_where_columns'):
tables = publication_utils.get_tables(self)
columns = publication_utils.get_all_columns(self)
self.test_data['pubtable'] = {'changed': [
{'table_name': tables[0]['table_name'],
"columns": [columns[0]['value'],
columns[1]['value']], 'where': 'id=2'}]}
if hasattr(self, 'update_old_new_tables_with_columns'):
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
self.table_id = tables_utils.create_table(self.server,
self.db_name,
self.schema_name,
self.table_name)
tables = publication_utils.get_tables(self)
columns = publication_utils.get_all_columns(self)
self.test_data['pubtable'] = {'added': [
{'table_name': self.schema_name + '.' + self.table_name}],
'changed': [
{'table_name': tables[0]['table_name'],
"columns": [columns[0]['value'],
columns[1]['value']]}]
}
if hasattr(self, 'update_old_new_tables_with_where'):
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
self.table_id = tables_utils.create_table(self.server,
self.db_name,
self.schema_name,
self.table_name)
tables = publication_utils.get_tables(self)
self.test_data['pubtable'] = {'added': [
{'table_name': self.schema_name + '.' + self.table_name}],
'changed': [
{'table_name': tables[0]['table_name'], 'where': 'id=2'}]
}
if hasattr(self, 'update_old_new_tables_with_columns_where'):
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
self.table_id = tables_utils.create_table(self.server,
self.db_name,
self.schema_name,
self.table_name)
tables = publication_utils.get_tables(self)
columns = publication_utils.get_all_columns(self)
self.test_data['pubtable'] = {'added': [
{'table_name': self.schema_name + '.' + self.table_name}],
'changed': [
{'table_name': tables[0]['table_name'],
"columns": [columns[0]['value'],
columns[1]['value']], 'where': 'id=2'}]
}
if hasattr(self, 'update_old_new_tables_with_where_schema'):
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
self.table_id = tables_utils.create_table(self.server,
self.db_name,
self.schema_name,
self.table_name)
tables = publication_utils.get_tables(self)
self.test_data['pubtable'] = {'added': [
{'table_name': self.schema_name + '.' + self.table_name}],
'changed': [
{'table_name': tables[0]['table_name'],
'where': 'id=2'}]
}
self.schema_name = "schema_get_%s" % (str(uuid.uuid4())[1:8])
connection = \
utils.get_db_connection(self.db_name,
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'],
self.server['sslmode'])
self.schema_details = \
schema_utils.create_schema(connection, self.schema_name)
self.test_data['pubschema'] = \
publication_utils.get_schemas(self)
response = self.update_publication(self.test_data)
self.assertEqual(response.status_code,
self.expected_data["status_code"])
def tearDown(self):
publication_utils.delete_publication(self.server, self.db_name,
self.test_data['name'])
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -59,8 +59,7 @@ class PublicationGetTestCase(BaseTestGenerator):
self.publication_name = "test_publication_delete_%s" % (
str(uuid.uuid4())[1:8])
self.publication_id = \
publication_utils.create_publication(self.server, self.db_name,
self.publication_name)
publication_utils.create_publication(self)
def get_sql(self):
return self.tester.get(

View File

@ -12,14 +12,25 @@ import sys
import os
import json
import traceback
import uuid
from regression.python_test_utils import test_utils as utils
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
import utils as tables_utils
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
with open(CURRENT_PATH + "/publication_test_data.json") as data_file:
test_cases = json.load(data_file)
def create_table_for_publication(self):
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
self.table_id = tables_utils.create_table(self.server,
self.db_name,
self.schema_name,
self.table_name)
def get_tables(self):
tables = self.tester.get(
'/browser/publication/get_tables/' + str(
@ -27,7 +38,48 @@ def get_tables(self):
self.server_id) + '/' +
str(self.db_id) + '/',
content_type='html/json')
return [tables.json['data'][0]['value']]
if self.server_version >= 150000:
columns = get_all_columns(self)
if hasattr(self, 'with_columns'):
return [{"table_name": tables.json['data'][0]['value'],
"columns": [columns[0]['value'],
columns[1]['value']]
}]
elif hasattr(self, 'with_where'):
return [{"table_name": tables.json['data'][0]['value'],
"where": 'id=5'
}]
elif hasattr(self, 'with_columns') and hasattr(self, 'with_where'):
return [{"table_name": tables.json['data'][0]['value'],
"columns": [columns[0]['value'],
columns[1]['value']],
"where": 'id=5'
}]
else:
return [{"table_name": i['value']} for i in tables.json['data']]
else:
return [i['value'] for i in tables.json['data']]
def get_all_columns(self):
columns = self.tester.get(
'/browser/publication/get_all_columns/' + str(
utils.SERVER_GROUP) + '/' + str(
self.server_id) + '/' +
str(self.db_id) + '/' + '?tid=' + str(self.table_id),
content_type='html/json')
return columns.json['data']
def get_schemas(self):
schemas = self.tester.get(
'/browser/publication/get_schemas/' + str(
utils.SERVER_GROUP) + '/' + str(
self.server_id) + '/' +
str(self.db_id) + '/',
content_type='html/json')
return [i['value'] for i in schemas.json['data']]
def create_publication_api(self):
@ -39,7 +91,7 @@ def create_publication_api(self):
content_type='html/json')
def create_publication(server, db_name, publication_name):
def create_publication(self, publication_name=None):
"""
This function creates a publication under provided table.
:param server: server details
@ -52,23 +104,47 @@ def create_publication(server, db_name, publication_name):
:rtype: int
"""
try:
connection = utils.get_db_connection(db_name,
server['username'],
server['db_password'],
server['host'],
server['port'],
server['sslmode'])
connection = utils.get_db_connection(self.db_name,
self.server['username'],
self.server['db_password'],
self.server['host'],
self.server['port'],
self.server['sslmode'])
old_isolation_level = connection.isolation_level
utils.set_isolation_level(connection, 0)
pg_cursor = connection.cursor()
query = "CREATE publication %s FOR ALL TABLES" % \
(publication_name)
pg_cursor.execute(query)
if not hasattr(self, "test_data"):
if publication_name:
query = "CREATE publication %s FOR ALL TABLES" % \
(publication_name)
else:
query = "CREATE publication %s FOR ALL TABLES" % \
(self.publication_name)
pg_cursor.execute(query)
else:
self.tester.post(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(
self.db_id) + '/',
data=json.dumps(self.test_data),
content_type='html/json')
utils.set_isolation_level(connection, old_isolation_level)
connection.commit()
# Get role oid of newly added publication
pg_cursor.execute("select oid from pg_catalog.pg_publication pub "
"where pub.pubname='%s'" % publication_name)
if not hasattr(self, "test_data"):
if publication_name:
pg_cursor.execute(
"select oid from pg_catalog.pg_publication pub "
"where pub.pubname='%s'" % publication_name)
else:
pg_cursor.execute(
"select oid from pg_catalog.pg_publication pub "
"where pub.pubname='%s'" % self.publication_name)
else:
pg_cursor.execute("select oid from pg_catalog.pg_publication pub "
"where pub.pubname='%s'"
% self.test_data['name'])
publication = pg_cursor.fetchone()
publication_id = ''
if publication:

View File

@ -76,7 +76,7 @@ export default function QueryThresholds({ value, onChange }) {
<InputText cid={alertCid} helpid={alerthelpid} type='numeric' value={value?.alert} onChange={onAlertChange} />
</Grid>
<Grid item lg={4} md={4} sm={4} xs={12} className={classes.contentStyle}>
<Typography>{gettext('(in minuts)')}</Typography>
<Typography>{gettext('(in minutes)')}</Typography>
</Grid>
</Grid>
</FormGroup >

View File

@ -16,7 +16,9 @@ describe('PublicationSchema', ()=>{
let mount;
let schemaObj = new PublicationSchema(
{
publicationTable: ()=>[],
allTables: ()=>[],
allSchemas:()=>[],
getColumns: ()=>[],
role: ()=>[],
},
{
@ -69,5 +71,11 @@ describe('PublicationSchema', ()=>{
expect(status).toBe(true);
});
it('pubschema disabled', ()=>{
let disabled = _.find(schemaObj.fields, (f)=>f.id=='pubschema').disabled;
let status = disabled({pubtable: [],all_table: true});
expect(status).toBe(true);
});
});