mirror of
https://github.com/pgadmin-org/pgadmin4.git
synced 2024-11-21 16:27:39 -06:00
PEP8 fixes for the pgAgent and Tables nodes (and subnodes). Fixes #3148
This commit is contained in:
parent
bcdb8eb275
commit
6753cd7334
@ -79,7 +79,8 @@ class TableModule(SchemaChildModule):
|
||||
|
||||
scripts.append({
|
||||
'name': 'pgadmin.browser.table.partition.utils',
|
||||
'path': url_for('browser.index') + 'table/static/js/partition.utils',
|
||||
'path': url_for('browser.index') +
|
||||
'table/static/js/partition.utils',
|
||||
'when': 'database', 'is_template': False
|
||||
})
|
||||
|
||||
@ -249,7 +250,8 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
@BaseTableView.check_precondition
|
||||
def list(self, gid, sid, did, scid):
|
||||
"""
|
||||
This function is used to list all the table nodes within that collection.
|
||||
This function is used to list all the table nodes within that
|
||||
collection.
|
||||
|
||||
Args:
|
||||
gid: Server group ID
|
||||
@ -277,7 +279,8 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
@BaseTableView.check_precondition
|
||||
def node(self, gid, sid, did, scid, tid):
|
||||
"""
|
||||
This function is used to list all the table nodes within that collection.
|
||||
This function is used to list all the table nodes within that
|
||||
collection.
|
||||
|
||||
Args:
|
||||
gid: Server group ID
|
||||
@ -300,15 +303,22 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
if len(rset['rows']) == 0:
|
||||
return gone(gettext("Could not find the table."))
|
||||
|
||||
if 'is_partitioned' in rset['rows'][0] and \
|
||||
rset['rows'][0]['is_partitioned']:
|
||||
icon = "icon-partition"
|
||||
else:
|
||||
icon = "icon-table"
|
||||
|
||||
res = self.blueprint.generate_browser_node(
|
||||
rset['rows'][0]['oid'],
|
||||
scid,
|
||||
rset['rows'][0]['name'],
|
||||
icon="icon-partition" if 'is_partitioned' in rset['rows'][0] and rset['rows'][0]['is_partitioned'] else "icon-table",
|
||||
tigger_count=rset['rows'][0]['triggercount'],
|
||||
has_enable_triggers=rset['rows'][0]['has_enable_triggers'],
|
||||
is_partitioned=rset['rows'][0]['is_partitioned'] if 'is_partitioned' in rset['rows'][0] else False
|
||||
)
|
||||
rset['rows'][0]['oid'],
|
||||
scid,
|
||||
rset['rows'][0]['name'],
|
||||
icon=icon,
|
||||
tigger_count=rset['rows'][0]['triggercount'],
|
||||
has_enable_triggers=rset['rows'][0]['has_enable_triggers'],
|
||||
is_partitioned=rset['rows'][0]['is_partitioned'] if
|
||||
'is_partitioned' in rset['rows'][0] else False
|
||||
)
|
||||
|
||||
return make_json_response(
|
||||
data=res,
|
||||
@ -318,7 +328,8 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
@BaseTableView.check_precondition
|
||||
def nodes(self, gid, sid, did, scid):
|
||||
"""
|
||||
This function is used to list all the table nodes within that collection.
|
||||
This function is used to list all the table nodes within that
|
||||
collection.
|
||||
|
||||
Args:
|
||||
gid: Server group ID
|
||||
@ -338,16 +349,23 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
|
||||
for row in rset['rows']:
|
||||
if 'is_partitioned' in row and row['is_partitioned']:
|
||||
icon = "icon-partition"
|
||||
else:
|
||||
icon = "icon-table"
|
||||
|
||||
res.append(
|
||||
self.blueprint.generate_browser_node(
|
||||
row['oid'],
|
||||
scid,
|
||||
row['name'],
|
||||
icon="icon-partition" if 'is_partitioned' in row and row['is_partitioned'] else "icon-table",
|
||||
icon=icon,
|
||||
tigger_count=row['triggercount'],
|
||||
has_enable_triggers=row['has_enable_triggers'],
|
||||
is_partitioned=row['is_partitioned'] if 'is_partitioned' in row else False,
|
||||
is_partitioned=row['is_partitioned'] if
|
||||
'is_partitioned' in row else False,
|
||||
rows_cnt=0
|
||||
))
|
||||
|
||||
@ -699,19 +717,23 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
return internal_server_error(errormsg=res)
|
||||
for row in rset['rows']:
|
||||
# Get columns for all 'OF TYPES'.
|
||||
SQL = render_template("/".join([self.table_template_path,
|
||||
'get_columns_for_table.sql']),
|
||||
tid=row['oid'])
|
||||
SQL = render_template(
|
||||
"/".join(
|
||||
[self.table_template_path,
|
||||
'get_columns_for_table.sql']
|
||||
), tid=row['oid']
|
||||
)
|
||||
|
||||
status, type_cols = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=type_cols)
|
||||
|
||||
res.append(
|
||||
{'label': row['typname'], 'value': row['typname'],
|
||||
'tid': row['oid'], 'oftype_columns': type_cols['rows']
|
||||
}
|
||||
)
|
||||
res.append({
|
||||
'label': row['typname'],
|
||||
'value': row['typname'],
|
||||
'tid': row['oid'],
|
||||
'oftype_columns': type_cols['rows']
|
||||
})
|
||||
return make_json_response(
|
||||
data=res,
|
||||
status=200
|
||||
@ -790,8 +812,8 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
def get_relations(self, gid, sid, did, scid, tid=None):
|
||||
"""
|
||||
Returns:
|
||||
This function will return list of tables available for like/relation
|
||||
combobox while creating new table
|
||||
This function will return list of tables available for
|
||||
like/relation combobox while creating new table
|
||||
"""
|
||||
res = [{'label': '', 'value': ''}]
|
||||
try:
|
||||
@ -805,7 +827,10 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
return internal_server_error(errormsg=res)
|
||||
for row in rset['rows']:
|
||||
res.append(
|
||||
{'label': row['like_relation'], 'value': row['like_relation']}
|
||||
{
|
||||
'label': row['like_relation'],
|
||||
'value': row['like_relation']
|
||||
}
|
||||
)
|
||||
return make_json_response(
|
||||
data=res,
|
||||
@ -836,11 +861,14 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
|
||||
for c in final_columns:
|
||||
if 'attacl' in c:
|
||||
c['attacl'] = parse_priv_to_db(c['attacl'], self.column_acl)
|
||||
c['attacl'] = parse_priv_to_db(
|
||||
c['attacl'], self.column_acl
|
||||
)
|
||||
|
||||
if 'cltype' in c:
|
||||
# check type for '[]' in it
|
||||
c['cltype'], c['hasSqrBracket'] = self._cltype_formatter(c['cltype'])
|
||||
c['cltype'], c['hasSqrBracket'] = \
|
||||
self._cltype_formatter(c['cltype'])
|
||||
|
||||
c = TableView.convert_length_precision_to_string(c)
|
||||
|
||||
@ -858,11 +886,14 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
# to include in our create definition, Let's format them
|
||||
for c in final_columns:
|
||||
if 'attacl' in c:
|
||||
c['attacl'] = parse_priv_to_db(c['attacl'], self.column_acl)
|
||||
c['attacl'] = parse_priv_to_db(
|
||||
c['attacl'], self.column_acl
|
||||
)
|
||||
|
||||
if 'cltype' in c:
|
||||
# check type for '[]' in it
|
||||
c['cltype'], c['hasSqrBracket'] = self._cltype_formatter(c['cltype'])
|
||||
c['cltype'], c['hasSqrBracket'] = \
|
||||
self._cltype_formatter(c['cltype'])
|
||||
|
||||
c = TableView.convert_length_precision_to_string(c)
|
||||
|
||||
@ -1097,7 +1128,9 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return super(TableView, self).truncate(gid, sid, did, scid, tid, res)
|
||||
return super(TableView, self).truncate(
|
||||
gid, sid, did, scid, tid, res
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
@ -212,7 +212,8 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
self.qtTypeIdent = driver.qtTypeIdent
|
||||
|
||||
# Set the template path for the SQL scripts
|
||||
self.template_path = 'column/sql/#{0}#'.format(self.manager.version)
|
||||
self.template_path = 'column/sql/#{0}#'.format(
|
||||
self.manager.version)
|
||||
|
||||
# Allowed ACL for column 'Select/Update/Insert/References'
|
||||
self.acl = ['a', 'r', 'w', 'x']
|
||||
@ -236,7 +237,8 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
@check_precondition
|
||||
def list(self, gid, sid, did, scid, tid):
|
||||
"""
|
||||
This function is used to list all the schema nodes within that collection.
|
||||
This function is used to list all the schema nodes within that
|
||||
collection.
|
||||
|
||||
Args:
|
||||
gid: Server group ID
|
||||
@ -249,9 +251,10 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
JSON of available column nodes
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']), tid=tid,
|
||||
show_sys_objects=self.blueprint.show_system_objects)
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
tid=tid, show_sys_objects=self.blueprint.show_system_objects
|
||||
)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
@ -264,8 +267,8 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
@check_precondition
|
||||
def nodes(self, gid, sid, did, scid, tid, clid=None):
|
||||
"""
|
||||
This function will used to create all the child node within that collection.
|
||||
Here it will create all the schema node.
|
||||
This function will used to create all the child node within that
|
||||
collection. Here it will create all the schema node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
@ -354,7 +357,8 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
length = False
|
||||
precision = False
|
||||
if 'elemoid' in data:
|
||||
length, precision, typeval = self.get_length_precision(data['elemoid'])
|
||||
length, precision, typeval = \
|
||||
self.get_length_precision(data['elemoid'])
|
||||
|
||||
# Set length and precision to None
|
||||
data['attlen'] = None
|
||||
@ -434,7 +438,8 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
|
||||
edit_types_list = list()
|
||||
# We will need present type in edit mode
|
||||
if data['typnspname'] == "pg_catalog" or data['typnspname'] == "public":
|
||||
if data['typnspname'] == "pg_catalog" or \
|
||||
data['typnspname'] == "public":
|
||||
edit_types_list.append(present_type)
|
||||
else:
|
||||
t = self.qtTypeIdent(self.conn, data['typnspname'], present_type)
|
||||
@ -442,9 +447,10 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
data['cltype'] = t
|
||||
|
||||
if int(is_reference) == 0:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'edit_mode_types.sql']),
|
||||
type_id=type_id)
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'edit_mode_types.sql']),
|
||||
type_id=type_id
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
|
||||
for row in rset['rows']:
|
||||
@ -476,9 +482,11 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
JSON of selected schema node
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']), tid=tid, clid=clid
|
||||
, show_sys_objects=self.blueprint.show_system_objects)
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
tid=tid, clid=clid,
|
||||
show_sys_objects=self.blueprint.show_system_objects
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
@ -636,9 +644,11 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
# so that we create template for dropping column
|
||||
try:
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']), tid=tid, clid=clid
|
||||
, show_sys_objects=self.blueprint.show_system_objects)
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
tid=tid, clid=clid,
|
||||
show_sys_objects=self.blueprint.show_system_objects
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
@ -773,9 +783,11 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
data = self.convert_length_precision_to_string(data)
|
||||
|
||||
if clid is not None:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']), tid=tid, clid=clid
|
||||
, show_sys_objects=self.blueprint.show_system_objects)
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
tid=tid, clid=clid,
|
||||
show_sys_objects=self.blueprint.show_system_objects
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
@ -834,8 +846,10 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
data['attacl'] = parse_priv_to_db(data['attacl'],
|
||||
self.acl)
|
||||
# If the request for new object which do not have did
|
||||
SQL = render_template("/".join([self.template_path, 'create.sql']),
|
||||
data=data, conn=self.conn, is_sql=is_sql)
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'create.sql']),
|
||||
data=data, conn=self.conn, is_sql=is_sql
|
||||
)
|
||||
return SQL, data['name'] if 'name' in data else old_data['name']
|
||||
|
||||
@check_precondition
|
||||
@ -852,9 +866,11 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
clid: Column ID
|
||||
"""
|
||||
try:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']), tid=tid, clid=clid
|
||||
, show_sys_objects=self.blueprint.show_system_objects)
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
tid=tid, clid=clid,
|
||||
show_sys_objects=self.blueprint.show_system_objects
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
@ -883,14 +899,15 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
if not isinstance(SQL, (str, unicode)):
|
||||
return SQL
|
||||
|
||||
sql_header = u"-- Column: {0}\n\n-- ".format(self.qtIdent(self.conn,
|
||||
data['schema'],
|
||||
data['table'],
|
||||
data['name']))
|
||||
sql_header = u"-- Column: {0}\n\n-- ".format(
|
||||
self.qtIdent(
|
||||
self.conn, data['schema'], data['table'], data['name'])
|
||||
)
|
||||
|
||||
sql_header += render_template("/".join([self.template_path,
|
||||
'delete.sql']),
|
||||
data=data, conn=self.conn)
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
data=data, conn=self.conn
|
||||
)
|
||||
SQL = sql_header + '\n\n' + SQL
|
||||
|
||||
return ajax_response(response=SQL.strip('\n'))
|
||||
@ -943,7 +960,9 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
elif dep_str == 'i':
|
||||
dep_type = 'internal'
|
||||
|
||||
dependents_result.append({'type': 'sequence', 'name': ref_name, 'field': dep_type})
|
||||
dependents_result.append(
|
||||
{'type': 'sequence', 'name': ref_name, 'field': dep_type}
|
||||
)
|
||||
|
||||
return ajax_response(
|
||||
response=dependents_result,
|
||||
@ -990,9 +1009,11 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
|
||||
Returns the statistics for a particular object if seid is specified
|
||||
"""
|
||||
# Fetch column name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']), tid=tid, clid=clid
|
||||
, show_sys_objects=self.blueprint.show_system_objects)
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
tid=tid, clid=clid,
|
||||
show_sys_objects=self.blueprint.show_system_objects
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
|
@ -51,15 +51,16 @@ class ColumnAddTestCase(BaseTestGenerator):
|
||||
def runTest(self):
|
||||
"""This function will add column under table node."""
|
||||
self.column_name = "test_column_add_%s" % (str(uuid.uuid4())[1:8])
|
||||
data = {"name": self.column_name,
|
||||
"cltype": "\"char\"",
|
||||
"attacl": [],
|
||||
"is_primary_key": False,
|
||||
"attnotnull": False,
|
||||
"attlen": None,
|
||||
"attprecision": None,
|
||||
"attoptions": [],
|
||||
"seclabels": []
|
||||
data = {
|
||||
"name": self.column_name,
|
||||
"cltype": "\"char\"",
|
||||
"attacl": [],
|
||||
"is_primary_key": False,
|
||||
"attnotnull": False,
|
||||
"attlen": None,
|
||||
"attprecision": None,
|
||||
"attoptions": [],
|
||||
"seclabels": []
|
||||
}
|
||||
# Add table
|
||||
response = self.tester.post(
|
||||
|
@ -31,7 +31,8 @@ class ConstraintsModule(CollectionNodeModule):
|
||||
Methods:
|
||||
-------
|
||||
* __init__(*args, **kwargs)
|
||||
- Method is used to initialize the ConstraintsModule and it's base module.
|
||||
- Method is used to initialize the ConstraintsModule and it's base
|
||||
module.
|
||||
|
||||
* get_nodes(gid, sid, did)
|
||||
- Method is used to generate the browser collection node.
|
||||
@ -40,8 +41,8 @@ class ConstraintsModule(CollectionNodeModule):
|
||||
- Method is overridden from its base class to make the node as leaf node.
|
||||
|
||||
* script_load()
|
||||
- Load the module script for constraint node, when any of the database node is
|
||||
initialized.
|
||||
- Load the module script for constraint node, when any of the database
|
||||
node is initialized.
|
||||
"""
|
||||
|
||||
NODE_TYPE = 'constraints'
|
||||
@ -108,6 +109,7 @@ def nodes(**kwargs):
|
||||
status=200
|
||||
)
|
||||
|
||||
|
||||
@blueprint.route('/obj/<int:gid>/<int:sid>/<int:did>/<int:scid>/<int:tid>/')
|
||||
def proplist(**kwargs):
|
||||
"""
|
||||
@ -143,9 +145,9 @@ def module_js():
|
||||
"constraints/js/constraints.js",
|
||||
_=gettext,
|
||||
constraints=[
|
||||
(ConstraintRegistry.registry[n])['blueprint'].NODE_TYPE \
|
||||
(ConstraintRegistry.registry[n])['blueprint'].NODE_TYPE
|
||||
for n in ConstraintRegistry.registry
|
||||
]
|
||||
]
|
||||
),
|
||||
200, {'Content-Type': 'application/x-javascript'}
|
||||
)
|
||||
|
@ -16,8 +16,8 @@ import pgadmin.browser.server_groups.servers.databases as database
|
||||
from flask import render_template, make_response, request, jsonify
|
||||
from flask_babel import gettext as _
|
||||
from pgadmin.browser.collection import CollectionNodeModule
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.constraints.type \
|
||||
import ConstraintRegistry
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.type import ConstraintRegistry
|
||||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
@ -75,7 +75,6 @@ class CheckConstraintModule(CollectionNodeModule):
|
||||
"""
|
||||
return database.DatabaseModule.NODE_TYPE
|
||||
|
||||
|
||||
@property
|
||||
def module_use_template_javascript(self):
|
||||
"""
|
||||
@ -84,7 +83,6 @@ class CheckConstraintModule(CollectionNodeModule):
|
||||
"""
|
||||
return False
|
||||
|
||||
|
||||
@property
|
||||
def csssnippets(self):
|
||||
"""
|
||||
@ -217,7 +215,8 @@ class CheckConstraintView(PGChildNodeView):
|
||||
self.qtIdent = driver.qtIdent
|
||||
|
||||
# Set the template path for the SQL scripts
|
||||
self.template_path = 'check_constraint/sql/#{0}#'.format(self.manager.version)
|
||||
self.template_path = 'check_constraint/sql/#{0}#'.format(
|
||||
self.manager.version)
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
@ -286,7 +285,8 @@ class CheckConstraintView(PGChildNodeView):
|
||||
self.qtIdent = driver.qtIdent
|
||||
|
||||
# Set the template path for the SQL scripts
|
||||
self.template_path = 'check_constraint/sql/#{0}#'.format(self.manager.version)
|
||||
self.template_path = 'check_constraint/sql/#{0}#'.format(
|
||||
self.manager.version)
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
@ -325,19 +325,20 @@ class CheckConstraintView(PGChildNodeView):
|
||||
if len(rset['rows']) == 0:
|
||||
return gone(_("""Could not find the check constraint."""))
|
||||
|
||||
if "convalidated" in rset['rows'][0] and rset['rows'][0]["convalidated"]:
|
||||
if "convalidated" in rset['rows'][0] and \
|
||||
rset['rows'][0]["convalidated"]:
|
||||
icon = "icon-check_constraint_bad"
|
||||
valid = False
|
||||
else:
|
||||
icon = "icon-check_constraint"
|
||||
valid = True
|
||||
res = self.blueprint.generate_browser_node(
|
||||
rset['rows'][0]['oid'],
|
||||
tid,
|
||||
rset['rows'][0]['name'],
|
||||
icon=icon,
|
||||
valid=valid
|
||||
)
|
||||
rset['rows'][0]['oid'],
|
||||
tid,
|
||||
rset['rows'][0]['name'],
|
||||
icon=icon,
|
||||
valid=valid
|
||||
)
|
||||
return make_json_response(
|
||||
data=res,
|
||||
status=200
|
||||
@ -403,7 +404,8 @@ class CheckConstraintView(PGChildNodeView):
|
||||
self.qtIdent = driver.qtIdent
|
||||
|
||||
# Set the template path for the SQL scripts
|
||||
self.template_path = 'check_constraint/sql/#{0}#'.format(self.manager.version)
|
||||
self.template_path = 'check_constraint/sql/#{0}#'.format(
|
||||
self.manager.version)
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
@ -546,15 +548,18 @@ class CheckConstraintView(PGChildNodeView):
|
||||
data['name'] = res['rows'][0]['name']
|
||||
|
||||
else:
|
||||
sql = render_template("/".join([self.template_path, 'get_oid.sql']),
|
||||
tid=tid,
|
||||
name=data['name'])
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_oid.sql']),
|
||||
tid=tid,
|
||||
name=data['name']
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
self.end_transaction()
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if "convalidated" in res['rows'][0] and res['rows'][0]["convalidated"]:
|
||||
if "convalidated" in res['rows'][0] and \
|
||||
res['rows'][0]["convalidated"]:
|
||||
icon = "icon-check_constraint_bad"
|
||||
valid = False
|
||||
else:
|
||||
@ -666,13 +671,14 @@ class CheckConstraintView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'get_name.sql']),
|
||||
cid=cid)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_name.sql']), cid=cid)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if "convalidated" in res['rows'][0] and res['rows'][0]["convalidated"]:
|
||||
if "convalidated" in res['rows'][0] and \
|
||||
res['rows'][0]["convalidated"]:
|
||||
icon = 'icon-check_constraint_bad'
|
||||
valid = False
|
||||
else:
|
||||
@ -884,13 +890,15 @@ class CheckConstraintView(PGChildNodeView):
|
||||
try:
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
sql = render_template("/".join([self.template_path, 'get_name.sql']), cid=cid)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_name.sql']), cid=cid)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
data['name'] = res
|
||||
sql = render_template("/".join([self.template_path, 'validate.sql']), data=data)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'validate.sql']), data=data)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
@ -15,8 +15,8 @@ from functools import wraps
|
||||
import pgadmin.browser.server_groups.servers.databases as database
|
||||
from flask import render_template, make_response, request, jsonify
|
||||
from flask_babel import gettext as _
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.constraints.type \
|
||||
import ConstraintRegistry, ConstraintTypeModule
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.type import ConstraintRegistry, ConstraintTypeModule
|
||||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
@ -32,12 +32,14 @@ class ExclusionConstraintModule(ConstraintTypeModule):
|
||||
"""
|
||||
class ForeignKeyConstraintModule(CollectionNodeModule)
|
||||
|
||||
A module class for Exclusion constraint node derived from ConstraintTypeModule.
|
||||
A module class for Exclusion constraint node derived from
|
||||
ConstraintTypeModule.
|
||||
|
||||
Methods:
|
||||
-------
|
||||
* __init__(*args, **kwargs)
|
||||
- Method is used to initialize the ForeignKeyConstraintModule and it's base module.
|
||||
- Method is used to initialize the ForeignKeyConstraintModule and
|
||||
it's base module.
|
||||
|
||||
* get_nodes(gid, sid, did)
|
||||
- Method is used to generate the browser collection node.
|
||||
@ -55,7 +57,8 @@ class ExclusionConstraintModule(ConstraintTypeModule):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""
|
||||
Method is used to initialize the ForeignKeyConstraintModule and it's base module.
|
||||
Method is used to initialize the ForeignKeyConstraintModule and
|
||||
it's base module.
|
||||
|
||||
Args:
|
||||
*args:
|
||||
@ -86,8 +89,8 @@ class ExclusionConstraintModule(ConstraintTypeModule):
|
||||
@property
|
||||
def script_load(self):
|
||||
"""
|
||||
Load the module script for exclusion_constraint, when any of the table node is
|
||||
initialized.
|
||||
Load the module script for exclusion_constraint, when any of the
|
||||
table node is initialized.
|
||||
|
||||
Returns: node type of the server module.
|
||||
"""
|
||||
@ -109,14 +112,16 @@ class ExclusionConstraintView(PGChildNodeView):
|
||||
"""
|
||||
class ExclusionConstraintView(PGChildNodeView)
|
||||
|
||||
A view class for Exclusion constraint node derived from PGChildNodeView. This class is
|
||||
responsible for all the stuff related to view like creating, updating Exclusion constraint
|
||||
node, showing properties, showing sql in sql pane.
|
||||
A view class for Exclusion constraint node derived from
|
||||
PGChildNodeView. This class is responsible for all the stuff related
|
||||
to view like creating, updating Exclusion constraint node, showing
|
||||
properties, showing sql in sql pane.
|
||||
|
||||
Methods:
|
||||
-------
|
||||
* __init__(**kwargs)
|
||||
- Method is used to initialize the ForeignKeyConstraintView and it's base view.
|
||||
- Method is used to initialize the ForeignKeyConstraintView and
|
||||
it's base view.
|
||||
|
||||
* module_js()
|
||||
- This property defines (if javascript) exists for this node.
|
||||
@ -135,8 +140,8 @@ class ExclusionConstraintView(PGChildNodeView):
|
||||
collection as http response.
|
||||
|
||||
* get_list()
|
||||
- This function is used to list all the language nodes within that collection
|
||||
and return list of Exclusion constraint nodes.
|
||||
- This function is used to list all the language nodes within that
|
||||
collection and return list of Exclusion constraint nodes.
|
||||
|
||||
* nodes()
|
||||
- This function returns child node within that collection.
|
||||
@ -152,13 +157,15 @@ class ExclusionConstraintView(PGChildNodeView):
|
||||
- This function will update the data for the selected Exclusion.
|
||||
|
||||
* msql()
|
||||
- This function is used to return modified SQL for the selected Exclusion.
|
||||
- This function is used to return modified SQL for the selected
|
||||
Exclusion.
|
||||
|
||||
* get_sql()
|
||||
- This function will generate sql from model data.
|
||||
|
||||
* sql():
|
||||
- This function will generate sql to show it in sql pane for the selected Exclusion.
|
||||
- This function will generate sql to show it in sql pane for the
|
||||
selected Exclusion.
|
||||
|
||||
* get_access_methods():
|
||||
- Returns access methods for exclusion constraint.
|
||||
@ -235,7 +242,8 @@ class ExclusionConstraintView(PGChildNodeView):
|
||||
)
|
||||
self.conn = self.manager.connection(did=kwargs['did'])
|
||||
|
||||
self.template_path = 'exclusion_constraint/sql/#{0}#'.format(self.manager.version)
|
||||
self.template_path = 'exclusion_constraint/sql/#{0}#'.format(
|
||||
self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
@ -284,7 +292,9 @@ class ExclusionConstraintView(PGChildNodeView):
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""Could not find the exclusion constraint in the table."""))
|
||||
return gone(_(
|
||||
"""Could not find the exclusion constraint in the table."""
|
||||
))
|
||||
|
||||
result = res['rows'][0]
|
||||
|
||||
@ -363,12 +373,11 @@ class ExclusionConstraintView(PGChildNodeView):
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(
|
||||
sid
|
||||
)
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid)
|
||||
self.conn = self.manager.connection(did=did)
|
||||
|
||||
self.template_path = 'exclusion_constraint/sql/#{0}#'.format(self.manager.version)
|
||||
self.template_path = 'exclusion_constraint/sql/#{0}#'.format(
|
||||
self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
@ -418,11 +427,11 @@ class ExclusionConstraintView(PGChildNodeView):
|
||||
return gone(_("""Could not find the exclusion constraint."""))
|
||||
|
||||
res = self.blueprint.generate_browser_node(
|
||||
rset['rows'][0]['oid'],
|
||||
tid,
|
||||
rset['rows'][0]['name'],
|
||||
icon="icon-exclusion_constraint"
|
||||
)
|
||||
rset['rows'][0]['oid'],
|
||||
tid,
|
||||
rset['rows'][0]['name'],
|
||||
icon="icon-exclusion_constraint"
|
||||
)
|
||||
return make_json_response(
|
||||
data=res,
|
||||
status=200
|
||||
@ -479,12 +488,11 @@ class ExclusionConstraintView(PGChildNodeView):
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(
|
||||
sid
|
||||
)
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid)
|
||||
self.conn = self.manager.connection(did=did)
|
||||
|
||||
self.template_path = 'exclusion_constraint/sql/#{0}#'.format(self.manager.version)
|
||||
self.template_path = 'exclusion_constraint/sql/#{0}#'.format(
|
||||
self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
@ -598,7 +606,10 @@ class ExclusionConstraintView(PGChildNodeView):
|
||||
data['name'] = res['rows'][0]['name']
|
||||
|
||||
else:
|
||||
sql = render_template("/".join([self.template_path, 'get_oid.sql']), name=data['name'])
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_oid.sql']),
|
||||
name=data['name']
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
self.end_transaction()
|
||||
@ -654,7 +665,10 @@ class ExclusionConstraintView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'get_oid.sql']), name=data['name'])
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_oid.sql']),
|
||||
name=data['name']
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@ -694,8 +708,10 @@ class ExclusionConstraintView(PGChildNodeView):
|
||||
else:
|
||||
cascade = False
|
||||
try:
|
||||
sql = render_template("/".join([self.template_path, 'get_name.sql']),
|
||||
cid=exid)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_name.sql']),
|
||||
cid=exid
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@ -707,7 +723,8 @@ class ExclusionConstraintView(PGChildNodeView):
|
||||
'Error: Object not found.'
|
||||
),
|
||||
info=_(
|
||||
'The specified exclusion constraint could not be found.\n'
|
||||
'The specified exclusion constraint could not '
|
||||
'be found.\n'
|
||||
)
|
||||
)
|
||||
|
||||
@ -790,10 +807,12 @@ class ExclusionConstraintView(PGChildNodeView):
|
||||
|
||||
"""
|
||||
if exid is not None:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
did=did,
|
||||
tid=tid,
|
||||
cid=exid)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
did=did,
|
||||
tid=tid,
|
||||
cid=exid
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
@ -15,8 +15,8 @@ from functools import wraps
|
||||
import pgadmin.browser.server_groups.servers.databases as database
|
||||
from flask import render_template, make_response, request, jsonify
|
||||
from flask_babel import gettext as _
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.constraints.type \
|
||||
import ConstraintRegistry, ConstraintTypeModule
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.type import ConstraintRegistry, ConstraintTypeModule
|
||||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
@ -32,12 +32,14 @@ class ForeignKeyConstraintModule(ConstraintTypeModule):
|
||||
"""
|
||||
class ForeignKeyConstraintModule(CollectionNodeModule)
|
||||
|
||||
A module class for Foreign key constraint node derived from ConstraintTypeModule.
|
||||
A module class for Foreign key constraint node derived from
|
||||
ConstraintTypeModule.
|
||||
|
||||
Methods:
|
||||
-------
|
||||
* __init__(*args, **kwargs)
|
||||
- Method is used to initialize the ForeignKeyConstraintModule and it's base module.
|
||||
- Method is used to initialize the ForeignKeyConstraintModule and
|
||||
it's base module.
|
||||
|
||||
* get_nodes(gid, sid, did)
|
||||
- Method is used to generate the browser collection node.
|
||||
@ -55,7 +57,8 @@ class ForeignKeyConstraintModule(ConstraintTypeModule):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""
|
||||
Method is used to initialize the ForeignKeyConstraintModule and it's base module.
|
||||
Method is used to initialize the ForeignKeyConstraintModule and
|
||||
it's base module.
|
||||
|
||||
Args:
|
||||
*args:
|
||||
@ -132,14 +135,16 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
"""
|
||||
class ForeignKeyConstraintView(PGChildNodeView)
|
||||
|
||||
A view class for Foreign key constraint node derived from PGChildNodeView. This class is
|
||||
responsible for all the stuff related to view like creating, updating Foreign key constraint
|
||||
A view class for Foreign key constraint node derived from
|
||||
PGChildNodeView. This class is responsible for all the stuff related
|
||||
to view like creating, updating Foreign key constraint
|
||||
node, showing properties, showing sql in sql pane.
|
||||
|
||||
Methods:
|
||||
-------
|
||||
* __init__(**kwargs)
|
||||
- Method is used to initialize the ForeignKeyConstraintView and it's base view.
|
||||
- Method is used to initialize the ForeignKeyConstraintView and
|
||||
it's base view.
|
||||
|
||||
* module_js()
|
||||
- This property defines (if javascript) exists for this node.
|
||||
@ -155,8 +160,8 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
collection as http response.
|
||||
|
||||
* get_list()
|
||||
- This function is used to list all the language nodes within that collection
|
||||
and return list of foreign key constraint nodes.
|
||||
- This function is used to list all the language nodes within that
|
||||
collection and return list of foreign key constraint nodes.
|
||||
|
||||
* nodes()
|
||||
- This function returns child node within that collection.
|
||||
@ -172,13 +177,15 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
- This function will update the data for the selected foreign key.
|
||||
|
||||
* msql()
|
||||
- This function is used to return modified SQL for the selected foreign key.
|
||||
- This function is used to return modified SQL for the selected
|
||||
foreign key.
|
||||
|
||||
* get_sql()
|
||||
- This function will generate sql from model data.
|
||||
|
||||
* sql():
|
||||
- This function will generate sql to show it in sql pane for the selected foreign key.
|
||||
- This function will generate sql to show it in sql pane for the
|
||||
selected foreign key.
|
||||
|
||||
* get_indices():
|
||||
- This function returns indices for current table.
|
||||
@ -244,7 +251,8 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
kwargs['sid']
|
||||
)
|
||||
self.conn = self.manager.connection(did=kwargs['did'])
|
||||
self.template_path = 'foreign_key/sql/#{0}#'.format(self.manager.version)
|
||||
self.template_path = 'foreign_key/sql/#{0}#'.format(
|
||||
self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
@ -293,7 +301,9 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""Could not find the foreign key constraint in the table."""))
|
||||
return gone(_(
|
||||
"""Could not find the foreign key constraint in the table."""
|
||||
))
|
||||
|
||||
result = res['rows'][0]
|
||||
|
||||
@ -375,11 +385,10 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(
|
||||
sid
|
||||
)
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid)
|
||||
self.conn = self.manager.connection(did=did)
|
||||
self.template_path = 'foreign_key/sql/#{0}#'.format(self.manager.version)
|
||||
self.template_path = 'foreign_key/sql/#{0}#'.format(
|
||||
self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
@ -433,12 +442,12 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
valid = True
|
||||
|
||||
res = self.blueprint.generate_browser_node(
|
||||
rset['rows'][0]['oid'],
|
||||
tid,
|
||||
rset['rows'][0]['name'],
|
||||
icon=icon,
|
||||
valid=valid
|
||||
)
|
||||
rset['rows'][0]['oid'],
|
||||
tid,
|
||||
rset['rows'][0]['name'],
|
||||
icon=icon,
|
||||
valid=valid
|
||||
)
|
||||
|
||||
return make_json_response(
|
||||
data=res,
|
||||
@ -491,11 +500,10 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
"""
|
||||
This function returns all foreign key nodes as a list.
|
||||
"""
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(
|
||||
sid
|
||||
)
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid)
|
||||
self.conn = self.manager.connection(did=did)
|
||||
self.template_path = 'foreign_key/sql/#{0}#'.format(self.manager.version)
|
||||
self.template_path = 'foreign_key/sql/#{0}#'.format(
|
||||
self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
@ -625,7 +633,10 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
data['name'] = res['rows'][0]['name']
|
||||
|
||||
else:
|
||||
sql = render_template("/".join([self.template_path, 'get_oid.sql']), name=data['name'])
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_oid.sql']),
|
||||
name=data['name']
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
self.end_transaction()
|
||||
@ -701,9 +712,11 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'get_oid.sql']),
|
||||
tid=tid,
|
||||
name=data['name'])
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_oid.sql']),
|
||||
tid=tid,
|
||||
name=data['name']
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@ -750,7 +763,8 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
else:
|
||||
cascade = False
|
||||
try:
|
||||
sql = render_template("/".join([self.template_path, 'get_name.sql']), fkid=fkid)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_name.sql']), fkid=fkid)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@ -770,7 +784,9 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'delete.sql']), data=data, cascade=cascade)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
data=data, cascade=cascade)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@ -843,7 +859,9 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
|
||||
"""
|
||||
if fkid is not None:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']), tid=tid, cid=fkid)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
tid=tid, cid=fkid)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@ -860,14 +878,14 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
data=data, o_data=old_data)
|
||||
|
||||
if 'autoindex' in data and data['autoindex'] and \
|
||||
('coveringindex' in data and
|
||||
data['coveringindex'] != ''):
|
||||
('coveringindex' in data and data['coveringindex'] != ''):
|
||||
|
||||
col_sql = render_template("/".join([self.template_path,
|
||||
'get_constraint_cols.sql']),
|
||||
tid=tid,
|
||||
keys=zip(old_data['confkey'], old_data['conkey']),
|
||||
confrelid=old_data['confrelid'])
|
||||
col_sql = render_template(
|
||||
"/".join([self.template_path, 'get_constraint_cols.sql']),
|
||||
tid=tid,
|
||||
keys=zip(old_data['confkey'], old_data['conkey']),
|
||||
confrelid=old_data['confrelid']
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(col_sql)
|
||||
|
||||
@ -894,8 +912,9 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
elif isinstance(data[arg], list) and len(data[arg]) < 1:
|
||||
return _('-- definition incomplete')
|
||||
|
||||
if data['autoindex'] and ('coveringindex' not in data or
|
||||
data['coveringindex'] == ''):
|
||||
if data['autoindex'] and \
|
||||
('coveringindex' not in data or
|
||||
data['coveringindex'] == ''):
|
||||
return _('-- definition incomplete')
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
@ -1047,13 +1066,15 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
try:
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
sql = render_template("/".join([self.template_path, 'get_name.sql']), fkid=fkid)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_name.sql']), fkid=fkid)
|
||||
status, res = self.conn.execute_scalar(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
data['name'] = res
|
||||
sql = render_template("/".join([self.template_path, 'validate.sql']), data=data)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'validate.sql']), data=data)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@ -1126,7 +1147,6 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
||||
Returns:
|
||||
|
||||
"""
|
||||
|
||||
data = request.args if request.args else None
|
||||
index = None
|
||||
try:
|
||||
|
@ -15,8 +15,8 @@ from functools import wraps
|
||||
import pgadmin.browser.server_groups.servers.databases as database
|
||||
from flask import render_template, make_response, request, jsonify
|
||||
from flask_babel import gettext as _
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.constraints.type \
|
||||
import ConstraintRegistry, ConstraintTypeModule
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.type import ConstraintRegistry, ConstraintTypeModule
|
||||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
@ -32,12 +32,14 @@ class IndexConstraintModule(ConstraintTypeModule):
|
||||
"""
|
||||
class IndexConstraintModule(CollectionNodeModule)
|
||||
|
||||
A module class for Primary key constraint node derived from ConstraintTypeModule.
|
||||
A module class for Primary key constraint node derived from
|
||||
ConstraintTypeModule.
|
||||
|
||||
Methods:
|
||||
-------
|
||||
* __init__(*args, **kwargs)
|
||||
- Method is used to initialize the PrimaryKeyConstraintModule and it's base module.
|
||||
- Method is used to initialize the PrimaryKeyConstraintModule and
|
||||
it's base module.
|
||||
|
||||
* get_nodes(gid, sid, did)
|
||||
- Method is used to generate the browser collection node.
|
||||
@ -55,7 +57,8 @@ class IndexConstraintModule(ConstraintTypeModule):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""
|
||||
Method is used to initialize the PrimaryKeyConstraintModule and it's base module.
|
||||
Method is used to initialize the PrimaryKeyConstraintModule and
|
||||
it's base module.
|
||||
|
||||
Args:
|
||||
*args:
|
||||
@ -106,7 +109,8 @@ class PrimaryKeyConstraintModule(IndexConstraintModule):
|
||||
"""
|
||||
class PrimaryKeyConstraintModule(IndexConstraintModule)
|
||||
|
||||
A module class for the catalog schema node derived from IndexConstraintModule.
|
||||
A module class for the catalog schema node derived from
|
||||
IndexConstraintModule.
|
||||
"""
|
||||
|
||||
NODE_TYPE = 'primary_key'
|
||||
@ -120,7 +124,8 @@ class UniqueConstraintModule(IndexConstraintModule):
|
||||
"""
|
||||
class UniqueConstraintModule(IndexConstraintModule)
|
||||
|
||||
A module class for the catalog schema node derived from IndexConstraintModule.
|
||||
A module class for the catalog schema node derived from
|
||||
IndexConstraintModule.
|
||||
"""
|
||||
|
||||
NODE_TYPE = 'unique_constraint'
|
||||
@ -134,14 +139,16 @@ class IndexConstraintView(PGChildNodeView):
|
||||
"""
|
||||
class PrimaryKeyConstraintView(PGChildNodeView)
|
||||
|
||||
A view class for Primary key constraint node derived from PGChildNodeView. This class is
|
||||
responsible for all the stuff related to view like creating, updating Primary key constraint
|
||||
A view class for Primary key constraint node derived from
|
||||
PGChildNodeView. This class is responsible for all the stuff related
|
||||
to view like creating, updating Primary key constraint
|
||||
node, showing properties, showing sql in sql pane.
|
||||
|
||||
Methods:
|
||||
-------
|
||||
* __init__(**kwargs)
|
||||
- Method is used to initialize the PrimaryKeyConstraintView and it's base view.
|
||||
- Method is used to initialize the PrimaryKeyConstraintView and
|
||||
it's base view.
|
||||
|
||||
* module_js()
|
||||
- This property defines (if javascript) exists for this node.
|
||||
@ -157,8 +164,8 @@ class IndexConstraintView(PGChildNodeView):
|
||||
collection as http response.
|
||||
|
||||
* get_list()
|
||||
- This function is used to list all the language nodes within that collection
|
||||
and return list of primary key constraint nodes.
|
||||
- This function is used to list all the language nodes within that
|
||||
collection and return list of primary key constraint nodes.
|
||||
|
||||
* nodes()
|
||||
- This function returns child node within that collection.
|
||||
@ -174,13 +181,15 @@ class IndexConstraintView(PGChildNodeView):
|
||||
- This function will update the data for the selected primary key.
|
||||
|
||||
* msql()
|
||||
- This function is used to return modified SQL for the selected primary key.
|
||||
- This function is used to return modified SQL for the selected primary
|
||||
key.
|
||||
|
||||
* get_sql()
|
||||
- This function will generate sql from model data.
|
||||
|
||||
* sql():
|
||||
- This function will generate sql to show it in sql pane for the selected primary key.
|
||||
- This function will generate sql to show it in sql pane for the
|
||||
selected primary key.
|
||||
|
||||
* get_indices():
|
||||
- This function returns indices for current table.
|
||||
@ -373,9 +382,7 @@ class IndexConstraintView(PGChildNodeView):
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(
|
||||
sid
|
||||
)
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid)
|
||||
self.conn = self.manager.connection(did=did)
|
||||
self.template_path = 'index_constraint/sql'
|
||||
|
||||
@ -430,15 +437,15 @@ class IndexConstraintView(PGChildNodeView):
|
||||
)))
|
||||
|
||||
res = self.blueprint.generate_browser_node(
|
||||
rset['rows'][0]['oid'],
|
||||
tid,
|
||||
rset['rows'][0]['name'],
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
rset['rows'][0]['oid'],
|
||||
tid,
|
||||
rset['rows'][0]['name'],
|
||||
icon="icon-%s" % self.node_type
|
||||
)
|
||||
return make_json_response(
|
||||
data=res,
|
||||
status=200
|
||||
)
|
||||
data=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
@check_precondition
|
||||
def nodes(self, gid, sid, did, scid, tid):
|
||||
@ -474,11 +481,12 @@ class IndexConstraintView(PGChildNodeView):
|
||||
tid,
|
||||
row['name'],
|
||||
icon="icon-%s" % self.node_type
|
||||
))
|
||||
return make_json_response(
|
||||
data=res,
|
||||
status=200
|
||||
)
|
||||
)
|
||||
return make_json_response(
|
||||
data=res,
|
||||
status=200
|
||||
)
|
||||
|
||||
def get_nodes(self, gid, sid, did, scid, tid, cid=None):
|
||||
"""
|
||||
@ -495,9 +503,7 @@ class IndexConstraintView(PGChildNodeView):
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(
|
||||
sid
|
||||
)
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid)
|
||||
self.conn = self.manager.connection(did=did)
|
||||
self.template_path = 'index_constraint/sql'
|
||||
|
||||
@ -559,20 +565,21 @@ class IndexConstraintView(PGChildNodeView):
|
||||
except (ValueError, TypeError, KeyError):
|
||||
data[k] = v
|
||||
|
||||
def is_key_list(key, data):
|
||||
return isinstance(data[key], list) and len(data[param]) > 0
|
||||
|
||||
for arg in required_args:
|
||||
if isinstance(arg, list):
|
||||
for param in arg:
|
||||
if (param in data and
|
||||
(not isinstance(data[param], list) or
|
||||
(isinstance(data[param], list) and
|
||||
len(data[param]) > 0))):
|
||||
if param in data and is_key_list(param, data):
|
||||
break
|
||||
else:
|
||||
return make_json_response(
|
||||
status=400,
|
||||
success=0,
|
||||
errormsg=_(
|
||||
"Could not find at least one required parameter (%s)." % str(param)
|
||||
"Could not find at least one required "
|
||||
"parameter (%s)." % str(param)
|
||||
)
|
||||
)
|
||||
|
||||
@ -626,10 +633,12 @@ class IndexConstraintView(PGChildNodeView):
|
||||
data['name'] = res['rows'][0]['name']
|
||||
|
||||
else:
|
||||
sql = render_template("/".join([self.template_path, 'get_oid.sql']),
|
||||
tid=tid,
|
||||
constraint_type=self.constraint_type,
|
||||
name=data['name'])
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_oid.sql']),
|
||||
tid=tid,
|
||||
constraint_type=self.constraint_type,
|
||||
name=data['name']
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
self.end_transaction()
|
||||
@ -685,10 +694,12 @@ class IndexConstraintView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'get_oid.sql']),
|
||||
tid=tid,
|
||||
constraint_type=self.constraint_type,
|
||||
name=data['name'])
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_oid.sql']),
|
||||
tid=tid,
|
||||
constraint_type=self.constraint_type,
|
||||
name=data['name']
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@ -727,10 +738,12 @@ class IndexConstraintView(PGChildNodeView):
|
||||
else:
|
||||
cascade = False
|
||||
try:
|
||||
sql = render_template("/".join([self.template_path, 'get_name.sql']),
|
||||
tid=tid,
|
||||
constraint_type=self.constraint_type,
|
||||
cid=cid)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_name.sql']),
|
||||
tid=tid,
|
||||
constraint_type=self.constraint_type,
|
||||
cid=cid
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@ -825,18 +838,23 @@ class IndexConstraintView(PGChildNodeView):
|
||||
|
||||
"""
|
||||
if cid is not None:
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
did=did,
|
||||
tid=tid,
|
||||
cid=cid,
|
||||
constraint_type=self.constraint_type)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
did=did,
|
||||
tid=tid,
|
||||
cid=cid,
|
||||
constraint_type=self.constraint_type
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""Could not find the {} in the table.""".format(
|
||||
"primary key" if self.constraint_type == "p" else "unique key"
|
||||
)))
|
||||
return gone(
|
||||
_("""Could not find the {} in the table.""".format(
|
||||
"primary key" if self.constraint_type == "p"
|
||||
else "unique key"
|
||||
))
|
||||
)
|
||||
|
||||
old_data = res['rows'][0]
|
||||
required_args = [u'name']
|
||||
@ -852,15 +870,19 @@ class IndexConstraintView(PGChildNodeView):
|
||||
[u'columns', u'index'] # Either of one should be there.
|
||||
]
|
||||
|
||||
def is_key_str(key, data):
|
||||
return isinstance(data[key], str) and data[key] != ""
|
||||
|
||||
def is_key_list(key, data):
|
||||
return isinstance(data[key], list) and len(data[param]) > 0
|
||||
|
||||
for arg in required_args:
|
||||
if isinstance(arg, list):
|
||||
for param in arg:
|
||||
if (param in data and
|
||||
((isinstance(data[param], str) and
|
||||
data[param] != "") or
|
||||
(isinstance(data[param], list) and
|
||||
len(data[param]) > 0))):
|
||||
break
|
||||
if param in data:
|
||||
if is_key_str(param, data) \
|
||||
or is_key_list(param, data):
|
||||
break
|
||||
else:
|
||||
return _('-- definition incomplete')
|
||||
|
||||
@ -970,19 +992,24 @@ class IndexConstraintView(PGChildNodeView):
|
||||
|
||||
if is_pgstattuple:
|
||||
# Fetch index details only if extended stats available
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
did=did,
|
||||
tid=tid,
|
||||
cid=cid,
|
||||
constraint_type=self.constraint_type)
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
did=did,
|
||||
tid=tid,
|
||||
cid=cid,
|
||||
constraint_type=self.constraint_type
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""Could not find the {} in the table.""".format(
|
||||
"primary key" if self.constraint_type == "p" else "unique key"
|
||||
)))
|
||||
return gone(
|
||||
_("""Could not find the {} in the table.""".format(
|
||||
"primary key" if self.constraint_type == "p"
|
||||
else "unique key"
|
||||
))
|
||||
)
|
||||
|
||||
result = res['rows'][0]
|
||||
name = result['name']
|
||||
|
@ -16,7 +16,8 @@ import pgadmin.browser.server_groups.servers.databases as database
|
||||
from flask import render_template, request, jsonify
|
||||
from flask_babel import gettext
|
||||
from pgadmin.browser.collection import CollectionNodeModule
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.partitions import backend_supported
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
partitions import backend_supported
|
||||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
@ -373,7 +374,8 @@ class IndexesView(PGChildNodeView):
|
||||
@check_precondition
|
||||
def list(self, gid, sid, did, scid, tid):
|
||||
"""
|
||||
This function is used to list all the schema nodes within that collection.
|
||||
This function is used to list all the schema nodes within that
|
||||
collection.
|
||||
|
||||
Args:
|
||||
gid: Server group ID
|
||||
@ -401,8 +403,8 @@ class IndexesView(PGChildNodeView):
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, scid, tid, idx):
|
||||
"""
|
||||
This function will used to create all the child node within that collection.
|
||||
Here it will create all the schema node.
|
||||
This function will used to create all the child node within that
|
||||
collection. Here it will create all the schema node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
@ -427,11 +429,11 @@ class IndexesView(PGChildNodeView):
|
||||
return gone(gettext("""Could not find the index in the table."""))
|
||||
|
||||
res = self.blueprint.generate_browser_node(
|
||||
rset['rows'][0]['oid'],
|
||||
tid,
|
||||
rset['rows'][0]['name'],
|
||||
icon="icon-index"
|
||||
)
|
||||
rset['rows'][0]['oid'],
|
||||
tid,
|
||||
rset['rows'][0]['name'],
|
||||
icon="icon-index"
|
||||
)
|
||||
|
||||
return make_json_response(
|
||||
data=res,
|
||||
@ -441,8 +443,8 @@ class IndexesView(PGChildNodeView):
|
||||
@check_precondition
|
||||
def nodes(self, gid, sid, did, scid, tid):
|
||||
"""
|
||||
This function will used to create all the child node within that collection.
|
||||
Here it will create all the schema node.
|
||||
This function will used to create all the child node within that
|
||||
collection. Here it will create all the schema node.
|
||||
|
||||
Args:
|
||||
gid: Server Group ID
|
||||
@ -826,7 +828,9 @@ class IndexesView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""Could not find the index in the table."""))
|
||||
return gone(
|
||||
gettext("""Could not find the index in the table.""")
|
||||
)
|
||||
|
||||
old_data = dict(res['rows'][0])
|
||||
|
||||
|
@ -27,10 +27,13 @@ from pgadmin.browser.utils import PGChildModule
|
||||
|
||||
|
||||
def backend_supported(module, manager, **kwargs):
|
||||
if 'tid' in kwargs and CollectionNodeModule.BackendSupported(module, manager, **kwargs):
|
||||
if 'tid' in kwargs and CollectionNodeModule.BackendSupported(
|
||||
module, manager, **kwargs):
|
||||
conn = manager.connection(did=kwargs['did'])
|
||||
|
||||
template_path = 'partition/sql/{0}/#{0}#{1}#'.format(manager.server_type, manager.version)
|
||||
template_path = 'partition/sql/{0}/#{0}#{1}#'.format(
|
||||
manager.server_type, manager.version
|
||||
)
|
||||
SQL = render_template("/".join(
|
||||
[template_path, 'backend_support.sql']), tid=kwargs['tid'])
|
||||
status, res = conn.execute_scalar(SQL)
|
||||
@ -41,6 +44,7 @@ def backend_supported(module, manager, **kwargs):
|
||||
|
||||
return res
|
||||
|
||||
|
||||
class PartitionsModule(CollectionNodeModule):
|
||||
"""
|
||||
class PartitionsModule(CollectionNodeModule)
|
||||
@ -114,7 +118,9 @@ class PartitionsModule(CollectionNodeModule):
|
||||
if first_registration:
|
||||
self.submodules = list(app.find_submodules(self.import_name))
|
||||
|
||||
super(CollectionNodeModule, self).register(app, options, first_registration)
|
||||
super(CollectionNodeModule, self).register(
|
||||
app, options, first_registration
|
||||
)
|
||||
|
||||
for module in self.submodules:
|
||||
if first_registration:
|
||||
@ -290,9 +296,9 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
JSON of available table nodes
|
||||
"""
|
||||
SQL = render_template(
|
||||
"/".join([self.partition_template_path, 'nodes.sql']),
|
||||
scid=scid, tid=tid
|
||||
)
|
||||
"/".join([self.partition_template_path, 'nodes.sql']),
|
||||
scid=scid, tid=tid
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
@ -319,7 +325,7 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
|
||||
return make_json_response(
|
||||
data=browser_node(rset['rows'][0]), status=200
|
||||
)
|
||||
)
|
||||
|
||||
res = []
|
||||
for row in rset['rows']:
|
||||
@ -462,9 +468,10 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
temp_data['schema'] = partition_schema
|
||||
temp_data['name'] = partition_name
|
||||
|
||||
SQL = render_template("/".join(
|
||||
[self.partition_template_path, 'detach.sql']),
|
||||
data=temp_data, conn=self.conn)
|
||||
SQL = render_template(
|
||||
"/".join([self.partition_template_path, 'detach.sql']),
|
||||
data=temp_data, conn=self.conn
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
@ -578,7 +585,9 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
return super(PartitionsView, self).truncate(gid, sid, did, scid, ptid, res)
|
||||
return super(PartitionsView, self).truncate(
|
||||
gid, sid, did, scid, ptid, res
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
@ -599,7 +608,7 @@ class PartitionsView(BaseTableView, DataTypeReader, VacuumSettings):
|
||||
try:
|
||||
SQL = render_template(
|
||||
"/".join([self.partition_template_path, 'properties.sql']),
|
||||
did=did, scid=scid, tid=tid,ptid=ptid,
|
||||
did=did, scid=scid, tid=tid, ptid=ptid,
|
||||
datlastsysoid=self.datlastsysoid
|
||||
)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
@ -9,7 +9,8 @@
|
||||
|
||||
import sys
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.partitions import PartitionsModule
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
partitions import PartitionsModule
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
|
||||
if sys.version_info < (3, 3):
|
||||
@ -20,7 +21,8 @@ else:
|
||||
|
||||
class TestBackendSupport(BaseTestGenerator):
|
||||
scenarios = [
|
||||
('when tid is not present in arguments, should return None and no query should be done',
|
||||
('when tid is not present in arguments, should return None and no '
|
||||
'query should be done',
|
||||
dict(
|
||||
manager=dict(
|
||||
server_type="",
|
||||
@ -35,8 +37,8 @@ class TestBackendSupport(BaseTestGenerator):
|
||||
expect_error_response=False,
|
||||
expected_number_calls_on_render_template=0
|
||||
)),
|
||||
('when tid is present in arguments and CollectionNodeModule does not support, '
|
||||
'should return None and no query should be done',
|
||||
('when tid is present in arguments and CollectionNodeModule does '
|
||||
'not support, should return None and no query should be done',
|
||||
dict(
|
||||
manager=dict(
|
||||
server_type="",
|
||||
@ -66,7 +68,9 @@ class TestBackendSupport(BaseTestGenerator):
|
||||
expected_return_value=123,
|
||||
expect_error_response=False,
|
||||
expected_number_calls_on_render_template=1,
|
||||
expect_render_template_to_be_called_with=call('partition/sql/gpdb/#gpdb#5#/backend_support.sql', tid=123)
|
||||
expect_render_template_to_be_called_with=call(
|
||||
'partition/sql/gpdb/#gpdb#5#/backend_support.sql', tid=123
|
||||
)
|
||||
)),
|
||||
('when error happens while querying the database, '
|
||||
'should return an internal server error',
|
||||
@ -83,31 +87,53 @@ class TestBackendSupport(BaseTestGenerator):
|
||||
expected_return_value=None,
|
||||
expect_error_response=True,
|
||||
expected_number_calls_on_render_template=1,
|
||||
expect_render_template_to_be_called_with=call('partition/sql/pg/#pg#10#/backend_support.sql', tid=123)
|
||||
expect_render_template_to_be_called_with=call(
|
||||
'partition/sql/pg/#pg#10#/backend_support.sql', tid=123
|
||||
)
|
||||
))
|
||||
]
|
||||
|
||||
@patch('pgadmin.browser.server_groups.servers.databases.schemas.tables.partitions.internal_server_error')
|
||||
@patch('pgadmin.browser.server_groups.servers.databases.schemas.tables.partitions.CollectionNodeModule')
|
||||
@patch('pgadmin.browser.server_groups.servers.databases.schemas.tables.partitions.render_template')
|
||||
def runTest(self, render_template_mock, CollectionNodeModule_mock, internal_server_error_mock):
|
||||
@patch(
|
||||
'pgadmin.browser.server_groups.servers.databases.schemas.tables.'
|
||||
'partitions.internal_server_error'
|
||||
)
|
||||
@patch(
|
||||
'pgadmin.browser.server_groups.servers.databases.schemas.tables.'
|
||||
'partitions.CollectionNodeModule'
|
||||
)
|
||||
@patch(
|
||||
'pgadmin.browser.server_groups.servers.databases.schemas.tables.'
|
||||
'partitions.render_template'
|
||||
)
|
||||
def runTest(
|
||||
self, render_template_mock, CollectionNodeModule_mock,
|
||||
internal_server_error_mock
|
||||
):
|
||||
module = PartitionsModule("partition")
|
||||
module.manager = Mock()
|
||||
module.manager.server_type = self.manager['server_type']
|
||||
module.manager.version = self.manager['version']
|
||||
connection_mock = Mock()
|
||||
connection_mock.execute_scalar.return_value = self.connection_execution_return_value
|
||||
connection_mock.execute_scalar.return_value = \
|
||||
self.connection_execution_return_value
|
||||
module.manager.connection.return_value = connection_mock
|
||||
CollectionNodeModule_mock.BackendSupported.return_value = self.collection_node_active
|
||||
CollectionNodeModule_mock.BackendSupported.return_value = \
|
||||
self.collection_node_active
|
||||
|
||||
result = module.BackendSupported(module.manager, **self.input_arguments)
|
||||
result = module.BackendSupported(
|
||||
module.manager, **self.input_arguments
|
||||
)
|
||||
|
||||
if self.expected_number_calls_on_render_template == 0:
|
||||
render_template_mock.assert_not_called()
|
||||
else:
|
||||
render_template_mock.assert_has_calls([self.expect_render_template_to_be_called_with])
|
||||
render_template_mock.assert_has_calls(
|
||||
[self.expect_render_template_to_be_called_with]
|
||||
)
|
||||
|
||||
if self.expect_error_response:
|
||||
internal_server_error_mock.assert_called_with(errormsg=self.connection_execution_return_value[1])
|
||||
internal_server_error_mock.assert_called_with(
|
||||
errormsg=self.connection_execution_return_value[1]
|
||||
)
|
||||
else:
|
||||
self.assertEqual(result, self.expected_return_value)
|
||||
|
@ -248,10 +248,10 @@ class RuleView(PGChildNodeView):
|
||||
return gone(gettext("""Could not find the rule in the table."""))
|
||||
|
||||
res = self.blueprint.generate_browser_node(
|
||||
rset['rows'][0]['oid'],
|
||||
tid,
|
||||
rset['rows'][0]['name'],
|
||||
icon="icon-rule"
|
||||
rset['rows'][0]['oid'],
|
||||
tid,
|
||||
rset['rows'][0]['name'],
|
||||
icon="icon-rule"
|
||||
)
|
||||
|
||||
return make_json_response(
|
||||
@ -485,7 +485,9 @@ class RuleView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""Could not find the rule in the table."""))
|
||||
return gone(
|
||||
gettext("""Could not find the rule in the table.""")
|
||||
)
|
||||
res_data = parse_rule_definition(res)
|
||||
|
||||
old_data = res_data
|
||||
|
@ -28,14 +28,16 @@ class TestColumnForeignKeyGetConstraintCols(BaseTestGenerator):
|
||||
]
|
||||
|
||||
def runTest(self):
|
||||
""" When there are no foreign key properties on the column, it returns an empty result """
|
||||
""" When there are no foreign key properties on the column, it returns
|
||||
an empty result """
|
||||
with test_utils.Database(self.server) as (connection, database_name):
|
||||
test_utils.create_table(self.server, database_name, "test_table")
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("SELECT pg_class.oid as table_id, "
|
||||
"pg_attribute.attnum as column_id "
|
||||
"FROM pg_class join pg_attribute on attrelid=pg_class.oid "
|
||||
"FROM pg_class join pg_attribute on "
|
||||
"attrelid=pg_class.oid "
|
||||
"where pg_class.relname='test_table'"
|
||||
" and pg_attribute.attname = 'some_column'")
|
||||
table_id, column_id = cursor.fetchone()
|
||||
@ -46,7 +48,10 @@ class TestColumnForeignKeyGetConstraintCols(BaseTestGenerator):
|
||||
self.versions_to_test = ['9.1_plus']
|
||||
|
||||
for version in self.versions_to_test:
|
||||
template_file = os.path.join(os.path.dirname(__file__), "..", version, "properties.sql")
|
||||
template_file = os.path.join(
|
||||
os.path.dirname(__file__), "..", version,
|
||||
"properties.sql"
|
||||
)
|
||||
template = file_as_template(template_file)
|
||||
|
||||
sql = template.render(
|
||||
|
@ -9,7 +9,8 @@
|
||||
|
||||
import os
|
||||
|
||||
from regression.python_test_utils.sql_template_test_base import SQLTemplateTestBase
|
||||
from regression.python_test_utils.sql_template_test_base import \
|
||||
SQLTemplateTestBase
|
||||
from regression.python_test_utils.template_helper import file_as_template
|
||||
|
||||
|
||||
@ -27,7 +28,8 @@ class TestColumnAclSql(SQLTemplateTestBase):
|
||||
def test_setup(self, connection, cursor):
|
||||
cursor.execute("SELECT pg_class.oid AS table_id, "
|
||||
"pg_attribute.attnum AS column_id "
|
||||
"FROM pg_class JOIN pg_attribute ON attrelid=pg_class.oid "
|
||||
"FROM pg_class JOIN pg_attribute ON "
|
||||
"attrelid=pg_class.oid "
|
||||
"WHERE pg_class.relname='test_table'"
|
||||
" AND pg_attribute.attname = 'some_column'")
|
||||
self.table_id, self.column_id = cursor.fetchone()
|
||||
@ -48,4 +50,7 @@ class TestColumnAclSql(SQLTemplateTestBase):
|
||||
|
||||
@staticmethod
|
||||
def get_template_file(version, filename):
|
||||
return os.path.join(os.path.dirname(__file__), "..", "templates", "column", "sql", version, filename)
|
||||
return os.path.join(
|
||||
os.path.dirname(__file__), "..", "templates", "column", "sql",
|
||||
version, filename
|
||||
)
|
||||
|
@ -9,7 +9,8 @@
|
||||
|
||||
import os
|
||||
|
||||
from regression.python_test_utils.sql_template_test_base import SQLTemplateTestBase
|
||||
from regression.python_test_utils.sql_template_test_base import \
|
||||
SQLTemplateTestBase
|
||||
from regression.python_test_utils.template_helper import file_as_template
|
||||
|
||||
|
||||
@ -49,4 +50,7 @@ class TestColumnPropertiesSql(SQLTemplateTestBase):
|
||||
|
||||
@staticmethod
|
||||
def get_template_file(version, filename):
|
||||
return os.path.join(os.path.dirname(__file__), "..", "templates", "column", "sql", version, filename)
|
||||
return os.path.join(
|
||||
os.path.dirname(__file__), "..", "templates", "column", "sql",
|
||||
version, filename
|
||||
)
|
||||
|
@ -109,9 +109,10 @@ class TableUpdateTestCase(BaseTestGenerator):
|
||||
'partitioned',
|
||||
self.partition_type)
|
||||
else:
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
self.table_id = tables_utils.create_table(
|
||||
self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch added table under schema node."""
|
||||
|
@ -9,7 +9,8 @@
|
||||
|
||||
import os
|
||||
from regression.python_test_utils.template_helper import file_as_template
|
||||
from regression.python_test_utils.sql_template_test_base import SQLTemplateTestBase
|
||||
from regression.python_test_utils.sql_template_test_base import \
|
||||
SQLTemplateTestBase
|
||||
|
||||
|
||||
class TestTablesAclSql(SQLTemplateTestBase):
|
||||
@ -37,10 +38,14 @@ class TestTablesAclSql(SQLTemplateTestBase):
|
||||
return sql
|
||||
|
||||
def assertions(self, fetch_result, descriptions):
|
||||
public_acls = list(filter(lambda acl: acl[1] == 'PUBLIC', fetch_result))
|
||||
public_acls = list(
|
||||
filter(lambda acl: acl[1] == 'PUBLIC', fetch_result)
|
||||
)
|
||||
self.assertEqual(len(public_acls), 1)
|
||||
|
||||
new_acl_map = dict(zip(map(lambda column: column.name, descriptions), public_acls[0]))
|
||||
new_acl_map = dict(
|
||||
zip(map(lambda column: column.name, descriptions), public_acls[0])
|
||||
)
|
||||
|
||||
self.assertEqual('PUBLIC', new_acl_map['grantee'])
|
||||
self.assertEqual(self.server['username'], new_acl_map['grantor'])
|
||||
@ -51,4 +56,7 @@ class TestTablesAclSql(SQLTemplateTestBase):
|
||||
|
||||
@staticmethod
|
||||
def get_template_file(version, filename):
|
||||
return os.path.join(os.path.dirname(__file__), "..", "templates", "table", "sql", version, filename)
|
||||
return os.path.join(
|
||||
os.path.dirname(__file__), "..", "templates", "table", "sql",
|
||||
version, filename
|
||||
)
|
||||
|
@ -10,7 +10,8 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from regression.python_test_utils.sql_template_test_base import SQLTemplateTestBase
|
||||
from regression.python_test_utils.sql_template_test_base import \
|
||||
SQLTemplateTestBase
|
||||
from regression.python_test_utils.template_helper import file_as_template
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
@ -52,4 +53,7 @@ class TestTablesNodeSql(SQLTemplateTestBase):
|
||||
|
||||
@staticmethod
|
||||
def get_template_file(version, filename):
|
||||
return os.path.join(os.path.dirname(__file__), "..", "templates", "table", "sql", version, filename)
|
||||
return os.path.join(
|
||||
os.path.dirname(__file__), "..", "templates", "table", "sql",
|
||||
version, filename
|
||||
)
|
||||
|
@ -11,7 +11,8 @@ import os
|
||||
import sys
|
||||
|
||||
from regression.python_test_utils.template_helper import file_as_template
|
||||
from regression.python_test_utils.sql_template_test_base import SQLTemplateTestBase
|
||||
from regression.python_test_utils.sql_template_test_base import \
|
||||
SQLTemplateTestBase
|
||||
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
@ -66,7 +67,9 @@ class TestTablesPropertiesSql(SQLTemplateTestBase):
|
||||
cursor.execute("SELECT oid FROM pg_class where relname='test_table'")
|
||||
self.table_id = cursor.fetchone()[0]
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_template_file(version, filename):
|
||||
return os.path.join(os.path.dirname(__file__), "..", "templates", "table", "sql", version, filename)
|
||||
return os.path.join(
|
||||
os.path.dirname(__file__), "..", "templates", "table", "sql",
|
||||
version, filename
|
||||
)
|
||||
|
@ -27,7 +27,9 @@ class TestTemplateCreate(BaseTestGenerator):
|
||||
'when no primary key is present, '
|
||||
'it returns "DISTRIBUTED RANDOMLY"',
|
||||
dict(
|
||||
template_path=os.path.join('table', 'sql', 'gpdb_5.0_plus', 'create.sql'),
|
||||
template_path=os.path.join(
|
||||
'table', 'sql', 'gpdb_5.0_plus', 'create.sql'
|
||||
),
|
||||
input_parameters=dict(
|
||||
data=dict()
|
||||
),
|
||||
@ -41,7 +43,8 @@ class TestTemplateCreate(BaseTestGenerator):
|
||||
'when primary key is present, '
|
||||
'it returns "DISTRIBUTED BY (attr_primary_key)"',
|
||||
dict(
|
||||
template_path=os.path.join('table', 'sql', 'gpdb_5.0_plus', 'create.sql'),
|
||||
template_path=os.path.join(
|
||||
'table', 'sql', 'gpdb_5.0_plus', 'create.sql'),
|
||||
input_parameters=dict(
|
||||
data=dict(
|
||||
primary_key=[
|
||||
@ -55,7 +58,9 @@ class TestTemplateCreate(BaseTestGenerator):
|
||||
]
|
||||
)
|
||||
),
|
||||
expected_in_return_value='DISTRIBUTED BY (attr_primary_key_column_1, attr_primary_key_column_2)',
|
||||
expected_in_return_value='DISTRIBUTED BY '
|
||||
'(attr_primary_key_column_1, '
|
||||
'attr_primary_key_column_2)',
|
||||
expected_not_in_return_value='DISTRIBUTED RANDOMLY'
|
||||
)
|
||||
),
|
||||
@ -64,7 +69,8 @@ class TestTemplateCreate(BaseTestGenerator):
|
||||
'when distribution is present, '
|
||||
'it returns "DISTRIBUTED BY (attr1, attr2, attr4)"',
|
||||
dict(
|
||||
template_path=os.path.join('table', 'sql', 'gpdb_5.0_plus', 'create.sql'),
|
||||
template_path=os.path.join(
|
||||
'table', 'sql', 'gpdb_5.0_plus', 'create.sql'),
|
||||
input_parameters=dict(
|
||||
data=dict(
|
||||
distribution=[1, 2, 4],
|
||||
@ -77,7 +83,8 @@ class TestTemplateCreate(BaseTestGenerator):
|
||||
]
|
||||
)
|
||||
),
|
||||
expected_in_return_value='DISTRIBUTED BY (attr1, attr2, attr4)',
|
||||
expected_in_return_value='DISTRIBUTED BY '
|
||||
'(attr1, attr2, attr4)',
|
||||
expected_not_in_return_value='DISTRIBUTED RANDOMLY'
|
||||
)
|
||||
),
|
||||
@ -88,14 +95,17 @@ class TestTemplateCreate(BaseTestGenerator):
|
||||
|
||||
def runTest(self):
|
||||
with FakeApp().app_context():
|
||||
result = render_template(self.template_path, **self.input_parameters)
|
||||
result_beautified = re.sub(' +', ' ', str(result).replace("\n", " ").strip())
|
||||
result = render_template(
|
||||
self.template_path, **self.input_parameters)
|
||||
result_beautified = re.sub(
|
||||
' +', ' ', str(result).replace("\n", " ").strip())
|
||||
if hasattr(self, 'expected_return_value'):
|
||||
self.assertEqual(result_beautified, self.expected_return_value)
|
||||
if hasattr(self, 'expected_in_return_value'):
|
||||
self.assertIn(self.expected_in_return_value, result_beautified)
|
||||
if hasattr(self, 'expected_not_in_return_value'):
|
||||
self.assertNotIn(self.expected_not_in_return_value, result_beautified)
|
||||
self.assertNotIn(
|
||||
self.expected_not_in_return_value, result_beautified)
|
||||
|
||||
|
||||
class FakeApp(Flask):
|
||||
@ -107,16 +117,20 @@ class FakeApp(Flask):
|
||||
self.jinja_env.filters['qtTypeIdent'] = driver.qtTypeIdent
|
||||
self.jinja_loader = ChoiceLoader([
|
||||
FileSystemLoader(
|
||||
os.path.dirname(os.path.realpath(__file__)) + '/../templates/'
|
||||
os.path.dirname(
|
||||
os.path.realpath(__file__)) + '/../templates/'
|
||||
),
|
||||
FileSystemLoader(
|
||||
os.path.dirname(os.path.realpath(__file__)) + '/../../templates/'
|
||||
os.path.dirname(
|
||||
os.path.realpath(__file__)) + '/../../templates/'
|
||||
),
|
||||
FileSystemLoader(
|
||||
os.path.dirname(os.path.realpath(__file__)) + '/../../types/templates/'
|
||||
os.path.dirname(
|
||||
os.path.realpath(__file__)) + '/../../types/templates/'
|
||||
),
|
||||
FileSystemLoader(
|
||||
os.path.dirname(os.path.realpath(__file__)) + '/../../../../templates/'
|
||||
os.path.dirname(
|
||||
os.path.realpath(__file__)) + '/../../../../templates/'
|
||||
),
|
||||
]
|
||||
)
|
||||
|
@ -10,7 +10,8 @@
|
||||
import os
|
||||
|
||||
import jinja2
|
||||
from regression.python_test_utils.sql_template_test_base import SQLTemplateTestBase
|
||||
from regression.python_test_utils.sql_template_test_base import \
|
||||
SQLTemplateTestBase
|
||||
from regression.python_test_utils.template_helper import file_as_template
|
||||
|
||||
|
||||
@ -27,7 +28,8 @@ class TestTriggerGetOidSql(SQLTemplateTestBase):
|
||||
def test_setup(self, connection, cursor):
|
||||
cursor.execute("SELECT pg_class.oid AS table_id, "
|
||||
"pg_attribute.attnum AS column_id "
|
||||
"FROM pg_class JOIN pg_attribute ON attrelid=pg_class.oid "
|
||||
"FROM pg_class JOIN pg_attribute ON "
|
||||
"attrelid=pg_class.oid "
|
||||
"WHERE pg_class.relname='test_table'"
|
||||
" AND pg_attribute.attname = 'some_column'")
|
||||
self.table_id, self.column_id = cursor.fetchone()
|
||||
@ -48,5 +50,7 @@ class TestTriggerGetOidSql(SQLTemplateTestBase):
|
||||
|
||||
@staticmethod
|
||||
def get_template_file(version, filename):
|
||||
return os.path.join(os.path.dirname(__file__), "..", "templates", "trigger", "sql", version, filename)
|
||||
|
||||
return os.path.join(
|
||||
os.path.dirname(__file__), "..", "templates", "trigger", "sql",
|
||||
version, filename
|
||||
)
|
||||
|
@ -9,7 +9,8 @@
|
||||
|
||||
import os
|
||||
|
||||
from regression.python_test_utils.sql_template_test_base import SQLTemplateTestBase
|
||||
from regression.python_test_utils.sql_template_test_base import \
|
||||
SQLTemplateTestBase
|
||||
from regression.python_test_utils.template_helper import file_as_template
|
||||
|
||||
|
||||
@ -40,4 +41,7 @@ class TestTriggerNodesSql(SQLTemplateTestBase):
|
||||
|
||||
@staticmethod
|
||||
def get_template_file(version, filename):
|
||||
return os.path.join(os.path.dirname(__file__), "..", "templates", "trigger", "sql", version, filename)
|
||||
return os.path.join(
|
||||
os.path.dirname(__file__), "..", "templates", "trigger", "sql",
|
||||
version, filename
|
||||
)
|
||||
|
@ -89,8 +89,10 @@ def verify_table(server, db_name, table_id):
|
||||
raise
|
||||
|
||||
|
||||
def create_table_for_partition(server, db_name, schema_name, table_name,
|
||||
table_type, partition_type, partition_name=None):
|
||||
def create_table_for_partition(
|
||||
server, db_name, schema_name, table_name,
|
||||
table_type, partition_type, partition_name=None
|
||||
):
|
||||
"""
|
||||
This function creates partitioned/partition/regular table
|
||||
under provided schema.
|
||||
@ -216,9 +218,10 @@ def set_partition_data(server, db_name, schema_name, table_name,
|
||||
}
|
||||
)
|
||||
elif partition_type == 'range' and mode == 'attach':
|
||||
partition_id = create_table_for_partition(server, db_name, schema_name,
|
||||
'attach_sale_2010', 'regular',
|
||||
partition_type)
|
||||
partition_id = create_table_for_partition(
|
||||
server, db_name, schema_name, 'attach_sale_2010', 'regular',
|
||||
partition_type
|
||||
)
|
||||
data['partitions'].update(
|
||||
{'added': [{'values_from': "'2010-01-01'",
|
||||
'values_to': "'2010-12-31'",
|
||||
@ -228,9 +231,10 @@ def set_partition_data(server, db_name, schema_name, table_name,
|
||||
}
|
||||
)
|
||||
elif partition_type == 'list' and mode == 'attach':
|
||||
partition_id = create_table_for_partition(server, db_name, schema_name,
|
||||
'attach_sale_2011', 'regular',
|
||||
partition_type)
|
||||
partition_id = create_table_for_partition(
|
||||
server, db_name, schema_name, 'attach_sale_2011', 'regular',
|
||||
partition_type
|
||||
)
|
||||
data['partitions'].update(
|
||||
{'added': [{'values_in': "'2011-01-01'",
|
||||
'is_attach': True,
|
||||
|
@ -269,14 +269,16 @@ class TriggerView(PGChildNodeView):
|
||||
kwargs['sid']
|
||||
)
|
||||
self.conn = self.manager.connection(did=kwargs['did'])
|
||||
# We need datlastsysoid to check if current trigger is system trigger
|
||||
# We need datlastsysoid to check if current trigger is system
|
||||
# trigger
|
||||
self.datlastsysoid = self.manager.db_info[
|
||||
kwargs['did']
|
||||
]['datlastsysoid'] if self.manager.db_info is not None and \
|
||||
kwargs['did'] in self.manager.db_info else 0
|
||||
|
||||
# we will set template path for sql scripts
|
||||
self.template_path = 'trigger/sql/#{0}#'.format(self.manager.version)
|
||||
self.template_path = 'trigger/sql/#{0}#'.format(
|
||||
self.manager.version)
|
||||
# Store server type
|
||||
self.server_type = self.manager.server_type
|
||||
# We need parent's name eg table name and schema name
|
||||
@ -327,10 +329,10 @@ class TriggerView(PGChildNodeView):
|
||||
'value': 'Inline EDB-SPL'
|
||||
})
|
||||
try:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_triggerfunctions.sql']),
|
||||
show_system_objects=self.blueprint.show_system_objects
|
||||
)
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'get_triggerfunctions.sql']),
|
||||
show_system_objects=self.blueprint.show_system_objects
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@ -351,7 +353,8 @@ class TriggerView(PGChildNodeView):
|
||||
@check_precondition
|
||||
def list(self, gid, sid, did, scid, tid):
|
||||
"""
|
||||
This function is used to list all the trigger nodes within that collection.
|
||||
This function is used to list all the trigger nodes within that
|
||||
collection.
|
||||
|
||||
Args:
|
||||
gid: Server group ID
|
||||
@ -378,7 +381,8 @@ class TriggerView(PGChildNodeView):
|
||||
@check_precondition
|
||||
def node(self, gid, sid, did, scid, tid, trid):
|
||||
"""
|
||||
This function will used to create the child node within that collection.
|
||||
This function will used to create the child node within that
|
||||
collection.
|
||||
Here it will create specific the trigger node.
|
||||
|
||||
Args:
|
||||
@ -402,15 +406,17 @@ class TriggerView(PGChildNodeView):
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
if len(rset['rows']) == 0:
|
||||
return gone(gettext("""Could not find the trigger in the table."""))
|
||||
return gone(
|
||||
gettext("""Could not find the trigger in the table.""")
|
||||
)
|
||||
|
||||
res = self.blueprint.generate_browser_node(
|
||||
rset['rows'][0]['oid'],
|
||||
tid,
|
||||
rset['rows'][0]['name'],
|
||||
icon="icon-trigger" if rset['rows'][0]['is_enable_trigger']
|
||||
else "icon-trigger-bad"
|
||||
)
|
||||
rset['rows'][0]['oid'],
|
||||
tid,
|
||||
rset['rows'][0]['name'],
|
||||
icon="icon-trigger" if
|
||||
rset['rows'][0]['is_enable_trigger'] else "icon-trigger-bad"
|
||||
)
|
||||
|
||||
return make_json_response(
|
||||
data=res,
|
||||
@ -420,7 +426,8 @@ class TriggerView(PGChildNodeView):
|
||||
@check_precondition
|
||||
def nodes(self, gid, sid, did, scid, tid):
|
||||
"""
|
||||
This function will used to create all the child node within that collection.
|
||||
This function will used to create all the child node within that
|
||||
collection.
|
||||
Here it will create all the trigger node.
|
||||
|
||||
Args:
|
||||
@ -542,7 +549,6 @@ class TriggerView(PGChildNodeView):
|
||||
formatted_args = ["'{0}'".format(arg) for arg in args]
|
||||
return ', '.join(formatted_args)
|
||||
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, scid, tid, trid):
|
||||
"""
|
||||
@ -572,7 +578,8 @@ class TriggerView(PGChildNodeView):
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""Could not find the trigger in the table."""))
|
||||
return gone(
|
||||
gettext("""Could not find the trigger in the table."""))
|
||||
|
||||
# Making copy of output for future use
|
||||
data = dict(res['rows'][0])
|
||||
@ -626,8 +633,10 @@ class TriggerView(PGChildNodeView):
|
||||
return make_json_response(
|
||||
status=410,
|
||||
success=0,
|
||||
errormsg=gettext("Could not find the required parameter (%s)." % \
|
||||
required_args[arg])
|
||||
errormsg=gettext(
|
||||
"Could not find the required parameter (%s)." %
|
||||
required_args[arg]
|
||||
)
|
||||
)
|
||||
|
||||
# Adding parent into data dict, will be using it while creating sql
|
||||
@ -757,8 +766,10 @@ class TriggerView(PGChildNodeView):
|
||||
# We need oid to add object in browser tree and if user
|
||||
# update the trigger then new OID is getting generated
|
||||
# so we need to return new OID of trigger.
|
||||
SQL = render_template("/".join([self.template_path, 'get_oid.sql']),
|
||||
tid=tid, data=data)
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'get_oid.sql']),
|
||||
tid=tid, data=data
|
||||
)
|
||||
status, new_trid = self.conn.execute_scalar(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=new_trid)
|
||||
@ -836,23 +847,26 @@ class TriggerView(PGChildNodeView):
|
||||
"""
|
||||
This function will return trigger function with schema name
|
||||
"""
|
||||
# If language is 'edbspl' then trigger function should be 'Inline EDB-SPL'
|
||||
# else we will find the trigger function with schema name.
|
||||
# If language is 'edbspl' then trigger function should be
|
||||
# 'Inline EDB-SPL' else we will find the trigger function
|
||||
# with schema name.
|
||||
if data['lanname'] == 'edbspl':
|
||||
data['tfunction'] = 'Inline EDB-SPL'
|
||||
else:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_triggerfunctions.sql']),
|
||||
tgfoid=data['tgfoid'],
|
||||
show_system_objects=self.blueprint.show_system_objects)
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'get_triggerfunctions.sql']),
|
||||
tgfoid=data['tgfoid'],
|
||||
show_system_objects=self.blueprint.show_system_objects
|
||||
)
|
||||
|
||||
status, result = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
# Update the trigger function which we have fetched with schema name
|
||||
# Update the trigger function which we have fetched with schema
|
||||
# name
|
||||
if 'rows' in result and len(result['rows']) > 0 and \
|
||||
'tfunctions' in result['rows'][0]:
|
||||
'tfunctions' in result['rows'][0]:
|
||||
data['tfunction'] = result['rows'][0]['tfunctions']
|
||||
return data
|
||||
|
||||
@ -888,8 +902,10 @@ class TriggerView(PGChildNodeView):
|
||||
old_data = self.get_trigger_function_schema(old_data)
|
||||
|
||||
if len(old_data['custom_tgargs']) > 1:
|
||||
# We know that trigger has more than 1 argument, let's join them
|
||||
old_data['tgargs'] = self._format_args(old_data['custom_tgargs'])
|
||||
# We know that trigger has more than 1 argument, let's join
|
||||
# them
|
||||
old_data['tgargs'] = \
|
||||
self._format_args(old_data['custom_tgargs'])
|
||||
|
||||
if len(old_data['tgattr']) > 1:
|
||||
columns = ', '.join(old_data['tgattr'].split(' '))
|
||||
@ -939,7 +955,8 @@ class TriggerView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""Could not find the trigger in the table."""))
|
||||
return gone(
|
||||
gettext("""Could not find the trigger in the table."""))
|
||||
|
||||
data = dict(res['rows'][0])
|
||||
# Adding parent into data dict, will be using it while creating sql
|
||||
|
@ -52,7 +52,7 @@ class TriggersAddTestCase(BaseTestGenerator):
|
||||
self.func_name = "trigger_func_add_%s" % str(uuid.uuid4())[1:8]
|
||||
self.function_info = \
|
||||
trigger_funcs_utils.create_trigger_function_with_trigger(
|
||||
self.server, self.db_name, self.schema_name, self.func_name)
|
||||
self.server, self.db_name, self.schema_name, self.func_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will trigger under table node."""
|
||||
|
@ -52,7 +52,7 @@ class TriggersDeleteTestCase(BaseTestGenerator):
|
||||
self.func_name = "trigger_func_delete_%s" % str(uuid.uuid4())[1:8]
|
||||
self.function_info = \
|
||||
trigger_funcs_utils.create_trigger_function_with_trigger(
|
||||
self.server, self.db_name, self.schema_name, self.func_name)
|
||||
self.server, self.db_name, self.schema_name, self.func_name)
|
||||
self.trigger_name = "test_trigger_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.trigger_id = triggers_utils.create_trigger(self.server,
|
||||
self.db_name,
|
||||
|
@ -52,7 +52,7 @@ class TriggersGetTestCase(BaseTestGenerator):
|
||||
self.func_name = "trigger_func_get_%s" % str(uuid.uuid4())[1:8]
|
||||
self.function_info = \
|
||||
trigger_funcs_utils.create_trigger_function_with_trigger(
|
||||
self.server, self.db_name, self.schema_name, self.func_name)
|
||||
self.server, self.db_name, self.schema_name, self.func_name)
|
||||
self.trigger_name = "test_trigger_get_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.trigger_id = triggers_utils.create_trigger(self.server,
|
||||
self.db_name,
|
||||
|
@ -54,7 +54,7 @@ class TriggersUpdateTestCase(BaseTestGenerator):
|
||||
self.func_name = "trigger_func_add_%s" % str(uuid.uuid4())[1:8]
|
||||
self.function_info = \
|
||||
trigger_funcs_utils.create_trigger_function_with_trigger(
|
||||
self.server, self.db_name, self.schema_name, self.func_name)
|
||||
self.server, self.db_name, self.schema_name, self.func_name)
|
||||
self.trigger_name = "test_trigger_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.trigger_id = triggers_utils.create_trigger(self.server,
|
||||
self.db_name,
|
||||
|
@ -111,12 +111,12 @@ class BaseTableView(PGChildNodeView):
|
||||
if server_type == 'gpdb' else
|
||||
'#{0}#'.format(ver)
|
||||
)
|
||||
self.data_type_template_path='datatype/sql/'+ (
|
||||
'#{0}#{1}#'.format(server_type, ver)
|
||||
if server_type == 'gpdb' else
|
||||
'#{0}#'.format(ver)
|
||||
self.data_type_template_path = 'datatype/sql/' + (
|
||||
'#{0}#{1}#'.format(server_type, ver) if
|
||||
server_type == 'gpdb' else'#{0}#'.format(ver)
|
||||
)
|
||||
self.partition_template_path = 'partition/sql/{0}/#{0}#{1}#'.format(server_type, ver)
|
||||
self.partition_template_path = \
|
||||
'partition/sql/{0}/#{0}#{1}#'.format(server_type, ver)
|
||||
|
||||
# Template for Column ,check constraint and exclusion
|
||||
# constraint node
|
||||
@ -130,7 +130,8 @@ class BaseTableView(PGChildNodeView):
|
||||
self.index_constraint_template_path = 'index_constraint/sql'
|
||||
|
||||
# Template for foreign key constraint node
|
||||
self.foreign_key_template_path = 'foreign_key/sql/#{0}#'.format(ver)
|
||||
self.foreign_key_template_path = \
|
||||
'foreign_key/sql/#{0}#'.format(ver)
|
||||
|
||||
# Template for index node
|
||||
self.index_template_path = 'index/sql/#{0}#'.format(ver)
|
||||
@ -163,7 +164,7 @@ class BaseTableView(PGChildNodeView):
|
||||
else:
|
||||
SQL = render_template(
|
||||
"/".join(
|
||||
[self.trigger_template_path,'get_triggerfunctions.sql']
|
||||
[self.trigger_template_path, 'get_triggerfunctions.sql']
|
||||
),
|
||||
tgfoid=data['tgfoid'],
|
||||
show_system_objects=self.blueprint.show_system_objects
|
||||
@ -176,7 +177,7 @@ class BaseTableView(PGChildNodeView):
|
||||
# Update the trigger function which we have fetched with
|
||||
# schema name
|
||||
if 'rows' in result and len(result['rows']) > 0 and \
|
||||
'tfunctions' in result['rows'][0]:
|
||||
'tfunctions' in result['rows'][0]:
|
||||
data['tfunction'] = result['rows'][0]['tfunctions']
|
||||
return data
|
||||
|
||||
@ -193,7 +194,6 @@ class BaseTableView(PGChildNodeView):
|
||||
formatted_args = ["'{0}'".format(arg) for arg in args]
|
||||
return ', '.join(formatted_args)
|
||||
|
||||
|
||||
def _columns_formatter(self, tid, data):
|
||||
"""
|
||||
Args:
|
||||
@ -279,7 +279,6 @@ class BaseTableView(PGChildNodeView):
|
||||
column['attlen'] = matchObj.group(1)
|
||||
column['attprecision'] = None
|
||||
|
||||
|
||||
SQL = render_template("/".join([self.column_template_path,
|
||||
'is_referenced.sql']),
|
||||
tid=tid, clid=column['attnum'])
|
||||
@ -310,7 +309,9 @@ class BaseTableView(PGChildNodeView):
|
||||
edit_types_list.append(present_type)
|
||||
|
||||
column['edit_types'] = edit_types_list
|
||||
column['cltype'] = DataTypeReader.parse_type_name(column['cltype'])
|
||||
column['cltype'] = DataTypeReader.parse_type_name(
|
||||
column['cltype']
|
||||
)
|
||||
|
||||
if 'indkey' in column:
|
||||
# Current column
|
||||
@ -346,10 +347,14 @@ class BaseTableView(PGChildNodeView):
|
||||
for ctype in index_constraints.keys():
|
||||
data[index_constraints[ctype]] = []
|
||||
|
||||
sql = render_template("/".join([self.index_constraint_template_path,
|
||||
'properties.sql']),
|
||||
did=did, tid=tid,
|
||||
constraint_type=ctype)
|
||||
sql = render_template(
|
||||
"/".join(
|
||||
[self.index_constraint_template_path, 'properties.sql']
|
||||
),
|
||||
did=did,
|
||||
tid=tid,
|
||||
constraint_type=ctype
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
@ -488,9 +493,12 @@ class BaseTableView(PGChildNodeView):
|
||||
"""
|
||||
|
||||
# We will fetch all the index constraints for the table
|
||||
sql = render_template("/".join([self.exclusion_constraint_template_path,
|
||||
'properties.sql']),
|
||||
did=did, tid=tid)
|
||||
sql = render_template(
|
||||
"/".join(
|
||||
[self.exclusion_constraint_template_path, 'properties.sql']
|
||||
),
|
||||
did=did, tid=tid
|
||||
)
|
||||
|
||||
status, result = self.conn.execute_dict(sql)
|
||||
|
||||
@ -844,7 +852,9 @@ class BaseTableView(PGChildNodeView):
|
||||
if 'columns' in data:
|
||||
for c in data['columns']:
|
||||
if 'attacl' in c:
|
||||
c['attacl'] = parse_priv_to_db(c['attacl'], self.column_acl)
|
||||
c['attacl'] = parse_priv_to_db(
|
||||
c['attacl'], self.column_acl
|
||||
)
|
||||
|
||||
# check type for '[]' in it
|
||||
if 'cltype' in c:
|
||||
@ -999,7 +1009,8 @@ class BaseTableView(PGChildNodeView):
|
||||
data = self.get_trigger_function_schema(data)
|
||||
|
||||
if len(data['custom_tgargs']) > 1:
|
||||
# We know that trigger has more than 1 argument, let's join them
|
||||
# We know that trigger has more than 1 argument, let's
|
||||
# join them
|
||||
data['tgargs'] = self._format_args(data['custom_tgargs'])
|
||||
|
||||
if len(data['tgattr']) >= 1:
|
||||
@ -1103,7 +1114,7 @@ class BaseTableView(PGChildNodeView):
|
||||
part_data['relispartition'] = True
|
||||
part_data['name'] = row['name']
|
||||
part_data['partition_value'] = row['partition_value']
|
||||
part_data['is_partitioned'] = row ['is_partitioned']
|
||||
part_data['is_partitioned'] = row['is_partitioned']
|
||||
part_data['partition_scheme'] = row['partition_scheme']
|
||||
|
||||
partition_sql += render_template("/".join(
|
||||
@ -1112,7 +1123,9 @@ class BaseTableView(PGChildNodeView):
|
||||
|
||||
# Add into main sql
|
||||
partition_sql = re.sub('\n{2,}', '\n\n', partition_sql)
|
||||
main_sql.append(sql_header + '\n\n' + partition_sql.strip('\n'))
|
||||
main_sql.append(
|
||||
sql_header + '\n\n' + partition_sql.strip('\n')
|
||||
)
|
||||
|
||||
sql = '\n'.join(main_sql)
|
||||
|
||||
@ -1185,9 +1198,10 @@ class BaseTableView(PGChildNodeView):
|
||||
elif isinstance(data[arg], list) and len(data[arg]) < 1:
|
||||
return False
|
||||
|
||||
if 'autoindex' in data and data['autoindex'] and \
|
||||
if 'autoindex' in data and \
|
||||
data['autoindex'] and \
|
||||
('coveringindex' not in data or
|
||||
data['coveringindex'] == ''):
|
||||
data['coveringindex'] == ''):
|
||||
return False
|
||||
|
||||
return True
|
||||
@ -1265,9 +1279,18 @@ class BaseTableView(PGChildNodeView):
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
properties_sql = render_template("/".join(
|
||||
[self.index_constraint_template_path, 'properties.sql']),
|
||||
did=did, tid=tid, cid=c['oid'], constraint_type=ctype)
|
||||
properties_sql = render_template(
|
||||
"/".join(
|
||||
[
|
||||
self.index_constraint_template_path,
|
||||
'properties.sql'
|
||||
]
|
||||
),
|
||||
did=did,
|
||||
tid=tid,
|
||||
cid=c['oid'],
|
||||
constraint_type=ctype
|
||||
)
|
||||
status, res = self.conn.execute_dict(properties_sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@ -1287,11 +1310,14 @@ class BaseTableView(PGChildNodeView):
|
||||
c['table'] = data['name']
|
||||
|
||||
# Sql to add object
|
||||
if self.validate_constrains(index_constraints[ctype], c):
|
||||
if self.validate_constrains(
|
||||
index_constraints[ctype], c):
|
||||
sql.append(
|
||||
render_template(
|
||||
"/".join([self.index_constraint_template_path,
|
||||
'create.sql']),
|
||||
"/".join(
|
||||
[self.index_constraint_template_path,
|
||||
'create.sql']
|
||||
),
|
||||
data=c, conn=self.conn,
|
||||
constraint_name='PRIMARY KEY'
|
||||
if ctype == 'p' else 'UNIQUE'
|
||||
@ -1300,7 +1326,10 @@ class BaseTableView(PGChildNodeView):
|
||||
else:
|
||||
sql.append(
|
||||
gettext(
|
||||
'-- definition incomplete for {0} constraint'.format(index_constraints[ctype])
|
||||
'-- definition incomplete for {0} '
|
||||
'constraint'.format(
|
||||
index_constraints[ctype]
|
||||
)
|
||||
)
|
||||
)
|
||||
if len(sql) > 0:
|
||||
@ -1361,7 +1390,8 @@ class BaseTableView(PGChildNodeView):
|
||||
if not self.validate_constrains('foreign_key', c):
|
||||
sql.append(
|
||||
gettext(
|
||||
'-- definition incomplete for foreign_key constraint'
|
||||
'-- definition incomplete for foreign_key '
|
||||
'constraint'
|
||||
)
|
||||
)
|
||||
return '\n\n'.join(sql)
|
||||
@ -1373,13 +1403,19 @@ class BaseTableView(PGChildNodeView):
|
||||
|
||||
coveringindex = self.search_coveringindex(tid, cols)
|
||||
|
||||
if coveringindex is None and 'autoindex' in c and c['autoindex'] and \
|
||||
('coveringindex' in c and
|
||||
c['coveringindex'] != ''):
|
||||
if coveringindex is None and \
|
||||
'autoindex' in c and \
|
||||
c['autoindex'] and \
|
||||
('coveringindex' in c and
|
||||
c['coveringindex'] != ''):
|
||||
sql.append(render_template(
|
||||
"/".join([self.foreign_key_template_path, 'create_index.sql']),
|
||||
data=c, conn=self.conn).strip('\n')
|
||||
)
|
||||
"/".join(
|
||||
[
|
||||
self.foreign_key_template_path,
|
||||
'create_index.sql'
|
||||
]
|
||||
), data=c, conn=self.conn).strip('\n')
|
||||
)
|
||||
|
||||
if 'added' in constraint:
|
||||
for c in constraint['added']:
|
||||
@ -1392,14 +1428,17 @@ class BaseTableView(PGChildNodeView):
|
||||
if not self.validate_constrains('foreign_key', c):
|
||||
sql.append(
|
||||
gettext(
|
||||
'-- definition incomplete for foreign_key constraint'
|
||||
'-- definition incomplete for foreign_key '
|
||||
'constraint'
|
||||
)
|
||||
)
|
||||
return '\n\n'.join(sql)
|
||||
|
||||
SQL = render_template("/".join([self.foreign_key_template_path,
|
||||
'get_parent.sql']),
|
||||
tid=c['columns'][0]['references'])
|
||||
SQL = render_template(
|
||||
"/".join(
|
||||
[self.foreign_key_template_path, 'get_parent.sql']
|
||||
), tid=c['columns'][0]['references']
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
@ -1462,9 +1501,12 @@ class BaseTableView(PGChildNodeView):
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
properties_sql = render_template("/".join(
|
||||
[self.check_constraint_template_path, 'properties.sql']),
|
||||
tid=tid, cid=c['oid'])
|
||||
properties_sql = render_template(
|
||||
"/".join(
|
||||
[self.check_constraint_template_path,
|
||||
'properties.sql']
|
||||
), tid=tid, cid=c['oid']
|
||||
)
|
||||
status, res = self.conn.execute_dict(properties_sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
@ -1539,7 +1581,8 @@ class BaseTableView(PGChildNodeView):
|
||||
c['table'] = data['name']
|
||||
|
||||
properties_sql = render_template("/".join(
|
||||
[self.exclusion_constraint_template_path, 'properties.sql']),
|
||||
[self.exclusion_constraint_template_path,
|
||||
'properties.sql']),
|
||||
did=did, tid=tid, cid=c['oid'])
|
||||
status, res = self.conn.execute_dict(properties_sql)
|
||||
if not status:
|
||||
@ -1562,7 +1605,8 @@ class BaseTableView(PGChildNodeView):
|
||||
if not self.validate_constrains('exclude_constraint', c):
|
||||
sql.append(
|
||||
gettext(
|
||||
'-- definition incomplete for exclusion_constraint'
|
||||
'-- definition incomplete for '
|
||||
'exclusion_constraint'
|
||||
)
|
||||
)
|
||||
return '\n\n'.join(sql)
|
||||
@ -1617,26 +1661,32 @@ class BaseTableView(PGChildNodeView):
|
||||
# If table(s) added
|
||||
if c_len > p_len:
|
||||
data['coll_inherits_added'] = list(
|
||||
set(data['coll_inherits']) - set(old_data['coll_inherits'])
|
||||
set(data['coll_inherits']) -
|
||||
set(old_data['coll_inherits'])
|
||||
)
|
||||
# If table(s)removed
|
||||
elif c_len < p_len:
|
||||
data['coll_inherits_removed'] = list(
|
||||
set(old_data['coll_inherits']) - set(data['coll_inherits'])
|
||||
set(old_data['coll_inherits']) -
|
||||
set(data['coll_inherits'])
|
||||
)
|
||||
# Safe side verification,In case it happens..
|
||||
# If user removes and adds same number of table
|
||||
# eg removed one table and added one new table
|
||||
elif c_len == p_len:
|
||||
data['coll_inherits_added'] = list(
|
||||
set(data['coll_inherits']) - set(old_data['coll_inherits'])
|
||||
set(data['coll_inherits']) -
|
||||
set(old_data['coll_inherits'])
|
||||
)
|
||||
data['coll_inherits_removed'] = list(
|
||||
set(old_data['coll_inherits']) - set(data['coll_inherits'])
|
||||
set(old_data['coll_inherits']) -
|
||||
set(data['coll_inherits'])
|
||||
)
|
||||
|
||||
SQL = render_template("/".join([self.table_template_path, 'update.sql']),
|
||||
o_data=old_data, data=data, conn=self.conn)
|
||||
SQL = render_template(
|
||||
"/".join([self.table_template_path, 'update.sql']),
|
||||
o_data=old_data, data=data, conn=self.conn
|
||||
)
|
||||
# Removes training new lines
|
||||
SQL = SQL.strip('\n') + '\n\n'
|
||||
|
||||
@ -1670,26 +1720,40 @@ class BaseTableView(PGChildNodeView):
|
||||
c['attacl'] = parse_priv_to_db(c['attacl'],
|
||||
self.column_acl)
|
||||
|
||||
properties_sql = render_template("/".join([self.column_template_path,
|
||||
'properties.sql']),
|
||||
tid=tid,
|
||||
clid=c['attnum'],
|
||||
show_sys_objects=self.blueprint.show_system_objects
|
||||
)
|
||||
properties_sql = render_template(
|
||||
"/".join([self.column_template_path,
|
||||
'properties.sql']),
|
||||
tid=tid,
|
||||
clid=c['attnum'],
|
||||
show_sys_objects=self.blueprint.show_system_objects
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(properties_sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
old_data = res['rows'][0]
|
||||
|
||||
old_data['cltype'], old_data['hasSqrBracket'] = self._cltype_formatter(old_data['cltype'])
|
||||
old_data = BaseTableView.convert_length_precision_to_string(old_data)
|
||||
old_data['cltype'], old_data['hasSqrBracket'] = \
|
||||
self._cltype_formatter(old_data['cltype'])
|
||||
old_data = \
|
||||
BaseTableView.convert_length_precision_to_string(
|
||||
old_data
|
||||
)
|
||||
|
||||
fulltype = self.get_full_type(
|
||||
old_data['typnspname'], old_data['typname'],
|
||||
old_data['isdup'], old_data['attndims'], old_data['atttypmod']
|
||||
old_data['typnspname'],
|
||||
old_data['typname'],
|
||||
old_data['isdup'],
|
||||
old_data['attndims'],
|
||||
old_data['atttypmod']
|
||||
)
|
||||
|
||||
def get_type_attr(key, data):
|
||||
"""Utility function"""
|
||||
if key in data:
|
||||
return data[key]
|
||||
return None
|
||||
|
||||
# If the column data type has not changed then fetch
|
||||
# old length and precision
|
||||
if 'elemoid' in old_data and 'cltype' not in c:
|
||||
@ -1700,13 +1764,19 @@ class BaseTableView(PGChildNodeView):
|
||||
if length and precision:
|
||||
matchObj = re.search(r'(\d+),(\d+)', fulltype)
|
||||
if matchObj:
|
||||
c['attlen'] = ('attlen' in c and c['attlen']) or matchObj.group(1)
|
||||
c['attprecision'] = ('attprecision' in c and c['attprecision']) or matchObj.group(2)
|
||||
c['attlen'] = get_type_attr(
|
||||
'attlen', c
|
||||
) or matchObj.group(1)
|
||||
c['attprecision'] = get_type_attr(
|
||||
'attprecision', c
|
||||
) or matchObj.group(2)
|
||||
elif length:
|
||||
# If we have length only
|
||||
matchObj = re.search(r'(\d+)', fulltype)
|
||||
if matchObj:
|
||||
c['attlen'] = ('attlen' in c and c['attlen']) or matchObj.group(1)
|
||||
c['attlen'] = get_type_attr(
|
||||
'attlen', c
|
||||
) or matchObj.group(1)
|
||||
c['attprecision'] = None
|
||||
else:
|
||||
c['attlen'] = None
|
||||
@ -1783,9 +1853,15 @@ class BaseTableView(PGChildNodeView):
|
||||
temp_data['name'] = table_name
|
||||
|
||||
# Sql for detach partition
|
||||
partitions_sql += render_template("/".join(
|
||||
[self.partition_template_path, 'detach.sql']),
|
||||
data=temp_data, conn=self.conn).strip('\n') + '\n\n'
|
||||
partitions_sql += render_template(
|
||||
"/".join(
|
||||
[
|
||||
self.partition_template_path,
|
||||
'detach.sql'
|
||||
]
|
||||
),
|
||||
data=temp_data,
|
||||
conn=self.conn).strip('\n') + '\n\n'
|
||||
|
||||
# If partition(s) is/are added
|
||||
if 'added' in partitions:
|
||||
@ -1804,7 +1880,8 @@ class BaseTableView(PGChildNodeView):
|
||||
SQL += partitions_sql.strip('\n')
|
||||
|
||||
# Check if index constraints are added/changed/deleted
|
||||
index_constraint_sql = self.get_index_constraint_sql(did, tid, data)
|
||||
index_constraint_sql = self.get_index_constraint_sql(
|
||||
did, tid, data)
|
||||
# If we have index constraint sql then ad it in main sql
|
||||
if index_constraint_sql is not None:
|
||||
SQL += '\n' + index_constraint_sql
|
||||
@ -1822,7 +1899,8 @@ class BaseTableView(PGChildNodeView):
|
||||
SQL += '\n' + check_constraint_sql
|
||||
|
||||
# Check if exclusion constraint(s) is/are added/changed/deleted
|
||||
exclusion_constraint_sql = self.get_exclusion_constraint_sql(did, tid, data)
|
||||
exclusion_constraint_sql = self.get_exclusion_constraint_sql(
|
||||
did, tid, data)
|
||||
# If we have check constraint sql then ad it in main sql
|
||||
if exclusion_constraint_sql is not None:
|
||||
SQL += '\n' + exclusion_constraint_sql
|
||||
@ -1931,7 +2009,8 @@ class BaseTableView(PGChildNodeView):
|
||||
status, pscid = self.conn.execute_scalar(
|
||||
render_template(
|
||||
"/".join([
|
||||
self.table_template_path, 'get_schema_oid.sql'
|
||||
self.table_template_path,
|
||||
'get_schema_oid.sql'
|
||||
]),
|
||||
tid=row['oid']
|
||||
)
|
||||
@ -1939,7 +2018,9 @@ class BaseTableView(PGChildNodeView):
|
||||
if not status:
|
||||
return internal_server_error(errormsg=pscid)
|
||||
|
||||
detached.append({'oid': row['oid'], 'schema_id': pscid})
|
||||
detached.append(
|
||||
{'oid': row['oid'], 'schema_id': pscid}
|
||||
)
|
||||
partitions_oid['detached'] = detached
|
||||
|
||||
# Fetch oid and schema oid for all created/attached partitions
|
||||
@ -1951,7 +2032,8 @@ class BaseTableView(PGChildNodeView):
|
||||
status, pscid = self.conn.execute_scalar(
|
||||
render_template(
|
||||
"/".join([
|
||||
self.table_template_path, 'get_schema_oid.sql'
|
||||
self.table_template_path,
|
||||
'get_schema_oid.sql'
|
||||
]),
|
||||
tid=row['partition_name']
|
||||
)
|
||||
@ -1980,21 +2062,33 @@ class BaseTableView(PGChildNodeView):
|
||||
|
||||
created.append({
|
||||
'oid': ptid,
|
||||
'schema_id': scid
|
||||
'schema_id': scid
|
||||
})
|
||||
|
||||
partitions_oid['created'] = created
|
||||
partitions_oid['attached'] = attached
|
||||
|
||||
if self.node_type == 'partition':
|
||||
icon = "icon-partition"
|
||||
elif 'is_partitioned' in res['rows'][0] and \
|
||||
res['rows'][0]['is_partitioned']:
|
||||
icon = "icon-partition"
|
||||
else:
|
||||
icon = "icon-table"
|
||||
|
||||
if 'relkind' in res['rows'][0] and \
|
||||
res['rows'][0]['relkind'] == 'p':
|
||||
is_partitioned = True
|
||||
else:
|
||||
is_partitioned = False
|
||||
|
||||
return jsonify(
|
||||
node=self.blueprint.generate_browser_node(
|
||||
tid,
|
||||
parent_id,
|
||||
name,
|
||||
icon="icon-partition" if (
|
||||
'is_partitioned' in res['rows'][0] and res['rows'][0]['is_partitioned']
|
||||
) or self.node_type == 'partition' else "icon-table",
|
||||
is_partitioned=True if 'relkind' in res['rows'][0] and res['rows'][0]['relkind'] == 'p' else False,
|
||||
icon=icon,
|
||||
is_partitioned=is_partitioned,
|
||||
parent_schema_id=scid,
|
||||
schema_id=rest['rows'][0]['scid'],
|
||||
schema_name=rest['rows'][0]['nspname'],
|
||||
@ -2043,7 +2137,8 @@ class BaseTableView(PGChildNodeView):
|
||||
# Fetch partition of this table if it is partitioned table.
|
||||
if 'is_partitioned' in data and data['is_partitioned']:
|
||||
# get the partition type
|
||||
data['partition_type'] = data['partition_scheme'].split()[0].lower()
|
||||
data['partition_type'] = \
|
||||
data['partition_scheme'].split()[0].lower()
|
||||
|
||||
partitions = []
|
||||
SQL = render_template("/".join([self.partition_template_path,
|
||||
@ -2062,7 +2157,7 @@ class BaseTableView(PGChildNodeView):
|
||||
|
||||
if data['partition_type'] == 'range':
|
||||
range_part = row['partition_value'].split(
|
||||
'FOR VALUES FROM (')[1].split(') TO')
|
||||
'FOR VALUES FROM (')[1].split(') TO')
|
||||
range_from = range_part[0]
|
||||
range_to = range_part[1][2:-1]
|
||||
|
||||
|
@ -55,7 +55,9 @@ class JobModule(CollectionNodeModule):
|
||||
|
||||
status, res = conn.execute_scalar("""
|
||||
SELECT
|
||||
has_table_privilege('pgagent.pga_job', 'INSERT, SELECT, UPDATE') has_priviledge
|
||||
has_table_privilege(
|
||||
'pgagent.pga_job', 'INSERT, SELECT, UPDATE'
|
||||
) has_priviledge
|
||||
WHERE EXISTS(
|
||||
SELECT has_schema_privilege('pgagent', 'USAGE')
|
||||
WHERE EXISTS(
|
||||
@ -148,7 +150,11 @@ class JobView(PGChildNodeView):
|
||||
@wraps(f)
|
||||
def wrap(self, *args, **kwargs):
|
||||
|
||||
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(kwargs['sid'])
|
||||
self.manager = get_driver(
|
||||
PG_DEFAULT_DRIVER
|
||||
).connection_manager(
|
||||
kwargs['sid']
|
||||
)
|
||||
self.conn = self.manager.connection()
|
||||
|
||||
# Set the template path for the sql scripts.
|
||||
@ -182,9 +188,8 @@ SELECT EXISTS(
|
||||
if jid is not None:
|
||||
if len(rset['rows']) != 1:
|
||||
return gone(
|
||||
errormsg=_(
|
||||
"Could not find the pgAgent job on the server."
|
||||
))
|
||||
errormsg=_("Could not find the pgAgent job on the server.")
|
||||
)
|
||||
return make_json_response(
|
||||
data=self.blueprint.generate_browser_node(
|
||||
rset['rows'][0]['jobid'],
|
||||
@ -356,8 +361,8 @@ SELECT EXISTS(
|
||||
jid,
|
||||
sid,
|
||||
row['jobname'],
|
||||
icon="icon-pga_job" if row['jobenabled'] else
|
||||
"icon-pga_job-disabled"
|
||||
icon="icon-pga_job" if row['jobenabled']
|
||||
else "icon-pga_job-disabled"
|
||||
)
|
||||
)
|
||||
|
||||
@ -438,7 +443,6 @@ SELECT EXISTS(
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
_("Could not find the object on the server.")
|
||||
@ -446,7 +450,7 @@ SELECT EXISTS(
|
||||
|
||||
row = res['rows'][0]
|
||||
|
||||
status, res= self.conn.execute_dict(
|
||||
status, res = self.conn.execute_dict(
|
||||
render_template(
|
||||
"/".join([self.template_path, 'steps.sql']),
|
||||
jid=jid, conn=self.conn,
|
||||
@ -478,7 +482,7 @@ SELECT EXISTS(
|
||||
'jexdate': schedule['jexdate'][idx],
|
||||
'jextime': schedule['jextime'][idx]
|
||||
})
|
||||
idx+=1
|
||||
idx += 1
|
||||
del schedule['jexid']
|
||||
del schedule['jexdate']
|
||||
del schedule['jextime']
|
||||
@ -527,4 +531,5 @@ SELECT EXISTS(
|
||||
status=200
|
||||
)
|
||||
|
||||
|
||||
JobView.register_node_view(blueprint)
|
||||
|
@ -391,7 +391,6 @@ class JobScheduleView(PGChildNodeView):
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@check_precondition
|
||||
def update(self, gid, sid, jid, jscid):
|
||||
"""
|
||||
|
@ -464,18 +464,16 @@ define('pgadmin.node.pga_schedule', [
|
||||
this.errorModel.unset('jscstart');
|
||||
}
|
||||
|
||||
val = this.get('jscend');
|
||||
if (_.isUndefined(val) || _.isNull(val) ||
|
||||
String(val).replace(/^\s+|\s+$/g, '') == '') {
|
||||
msg = gettext('Please enter the end time.');
|
||||
this.errorModel.set('jscend', msg);
|
||||
errMsg = errMsg || msg;
|
||||
} else {
|
||||
this.errorModel.unset('jscend');
|
||||
}
|
||||
|
||||
// End time must be greater than Start time
|
||||
if(!errMsg) {
|
||||
val = this.get('jscend');
|
||||
// No further validation required if end date is not provided by
|
||||
// the user
|
||||
if (_.isUndefined(val) || _.isNull(val) ||
|
||||
String(val).replace(/^\s+|\s+$/g, '') == '') {
|
||||
return;
|
||||
}
|
||||
|
||||
var start_time = this.get('jscstart'),
|
||||
end_time = this.get('jscend'),
|
||||
start_time_js = start_time.split(' '),
|
||||
|
@ -487,7 +487,6 @@ SELECT EXISTS(
|
||||
|
||||
return make_json_response(success=1)
|
||||
|
||||
|
||||
@check_precondition
|
||||
def msql(self, gid, sid, jid, jstid=None):
|
||||
"""
|
||||
|
Loading…
Reference in New Issue
Block a user