Fix SQL generated when dropping NOT NULL from a "char" column. Fixes #2989

This commit is contained in:
Murtuza Zabuawala 2018-03-09 15:23:49 +00:00 committed by Dave Page
parent 83477224cb
commit 985a004766
6 changed files with 177 additions and 61 deletions

View File

@ -364,20 +364,9 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
data['attlen'] = None
data['attprecision'] = None
import re
# If we have length & precision both
if length and precision:
matchObj = re.search(r'(\d+),(\d+)', fulltype)
if matchObj:
data['attlen'] = matchObj.group(1)
data['attprecision'] = matchObj.group(2)
elif length:
# If we have length only
matchObj = re.search(r'(\d+)', fulltype)
if matchObj:
data['attlen'] = matchObj.group(1)
data['attprecision'] = None
self.set_length_precision(
length, precision, fulltype, data
)
# We need to fetch inherited tables for each table
SQL = render_template("/".join([self.template_path,

View File

@ -0,0 +1,96 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import json
import uuid
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression.python_test_utils import test_utils as utils
from . import utils as tables_utils
class TableNotNullUpdateTestCase(BaseTestGenerator):
"""This class will add new collation under schema node."""
scenarios = [
('Update Table with not null field', dict(url='/browser/table/obj/')),
]
def setUp(self):
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
db_con = database_utils.connect_database(
self, utils.SERVER_GROUP, self.server_id, self.db_id
)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add a table.")
self.schema_id = schema_info["schema_id"]
self.schema_name = schema_info["schema_name"]
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to add a table.")
self.table_name = "test_table_column_put_%s" % (str(uuid.uuid4())[1:8])
custom_sql = 'column_1 "char" NOT NULL, ' \
'column_2 character varying(10) NOT NULL'
self.table_id = tables_utils.create_table(
self.server,
self.db_name,
self.schema_name,
self.table_name,
custom_sql
)
def runTest(self):
"""This function will fetch added table under schema node."""
table_response = tables_utils.verify_table(
self.server, self.db_name, self.table_id
)
if not table_response:
raise Exception("Could not find the table to update.")
data = {
"id": self.table_id,
"columns": {
"changed": [
{
"attnum": 1,
"attnotnull": False
},
{
"attnum": 2,
"attnotnull": False
}
]
}
}
response = self.tester.put(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(self.db_id) + '/' +
str(self.schema_id) + '/' + str(self.table_id),
data=json.dumps(data), follow_redirects=True
)
self.assertEquals(response.status_code, 200)
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)

View File

@ -15,7 +15,8 @@ import traceback
from regression.python_test_utils import test_utils as utils
def create_table(server, db_name, schema_name, table_name):
def create_table(server, db_name, schema_name, table_name,
custom_column_sql=None):
"""
This function creates a table under provided schema.
:param server: server details
@ -39,9 +40,13 @@ def create_table(server, db_name, schema_name, table_name):
old_isolation_level = connection.isolation_level
connection.set_isolation_level(0)
pg_cursor = connection.cursor()
query = "CREATE TABLE %s.%s(id serial UNIQUE NOT NULL, name text," \
" location text)" %\
(schema_name, table_name)
if custom_column_sql:
query = "CREATE TABLE %s.%s(%s)" % \
(schema_name, table_name, custom_column_sql)
else:
query = "CREATE TABLE %s.%s(id serial UNIQUE NOT NULL, " \
"name text, location text)" % \
(schema_name, table_name)
pg_cursor.execute(query)
connection.set_isolation_level(old_isolation_level)
connection.commit()

View File

@ -1748,39 +1748,20 @@ class BaseTableView(PGChildNodeView):
old_data['atttypmod']
)
def get_type_attr(key, data):
"""Utility function"""
if key in data:
return data[key]
return None
# If the column data type has not changed then fetch
# old length and precision
if 'elemoid' in old_data and 'cltype' not in c:
length, precision, typeval = \
self.get_length_precision(old_data['elemoid'])
# Set proper values for old data
self.set_length_precision(
length, precision, fulltype, old_data
)
# If we have length & precision both
if length and precision:
matchObj = re.search(r'(\d+),(\d+)', fulltype)
if matchObj:
c['attlen'] = get_type_attr(
'attlen', c
) or matchObj.group(1)
c['attprecision'] = get_type_attr(
'attprecision', c
) or matchObj.group(2)
elif length:
# If we have length only
matchObj = re.search(r'(\d+)', fulltype)
if matchObj:
c['attlen'] = get_type_attr(
'attlen', c
) or matchObj.group(1)
c['attprecision'] = None
else:
c['attlen'] = None
c['attprecision'] = None
# Set proper values for in new data
self.set_length_precision(
length, precision, fulltype, c, old_data
)
if 'cltype' in c:
typename = c['cltype']

View File

@ -464,22 +464,17 @@ class TypeView(PGChildNodeView, DataTypeReader):
# Below logic will allow us to split length, precision from
# type name for grid
import re
t_len = None
t_prec = None
data = {
'attlen': None,
'attprecision': None
}
# If we have length & precision both
if is_tlength and is_precision:
matchObj = re.search(r'(\d+),(\d+)', row['fulltype'])
if matchObj:
t_len = matchObj.group(1)
t_prec = matchObj.group(2)
elif is_tlength:
# If we have length only
matchObj = re.search(r'(\d+)', row['fulltype'])
if matchObj:
t_len = matchObj.group(1)
t_prec = None
self.set_length_precision(
is_tlength, is_precision, row['fulltype'], data
)
t_len = data['attlen']
t_prec = data['attprecision']
type_name = DataTypeReader.parse_type_name(row['typname'])

View File

@ -8,7 +8,7 @@
##########################################################################
"""Schema collection node helper class"""
import re
import json
from flask import render_template
@ -336,6 +336,56 @@ class DataTypeReader:
return type_name
@classmethod
def set_length_precision(cls, length, precision, fulltype, data,
old_data=None):
"""
Parse length & precision from datatype and then assign it to datatype
according to client format
Args:
length: Boolean flag for length
precision: Boolean flag for precision
fulltype: Type name with length & precision
data: New values
old_data: Old values
"""
# If we have length & precision both
if length and precision:
match_obj = re.search(r'(\d+),(\d+)', fulltype)
if match_obj:
attribute_length = DataTypeReader.get_valid_length_value(
data.get('attlen', None))
data['attlen'] = attribute_length or match_obj.group(1)
attribute_precision = DataTypeReader.get_valid_length_value(
data.get('attprecision', None))
data['attprecision'] = attribute_precision or match_obj.group(
2)
elif length:
# If we have length only
match_obj = re.search(r'(\d+)', fulltype)
if match_obj:
attribute_length = DataTypeReader.get_valid_length_value(
data.get('attlen', None))
data['attlen'] = attribute_length or match_obj.group(1)
data['attprecision'] = None
else:
# Use the old values to avoid unnecessary
if old_data:
if 'attlen' in old_data:
if old_data['attlen'] != '-1':
data['attlen'] = old_data.get('attlen', None)
if 'attprecision' in old_data:
if old_data['attprecision'] != '-1':
data['attprecision'] = old_data.get(
'attprecision', None
)
@classmethod
def get_valid_length_value(cls, val):
return val if val and int(val) != -1 else None
def trigger_definition(data):
"""