1) Ensure re_sql tests should not abort at the first failure, it should run all the test cases.

2) Added place holder for owner, so we won't need to create separate pg/ppas folder just because of change in the owner name.
This commit is contained in:
Akshay Joshi
2019-07-09 15:37:53 +05:30
parent f4ac1e804e
commit 1a9d8f01aa
20 changed files with 170 additions and 30 deletions

View File

@@ -6,7 +6,7 @@ CREATE FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OPTIONS (opt1 'val1', opt2 'valopt2');
ALTER FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OWNER TO postgres;
OWNER TO <OWNER>;
COMMENT ON FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
IS 'a comment';

View File

@@ -5,7 +5,7 @@
CREATE FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#";
ALTER FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OWNER TO postgres;
OWNER TO <OWNER>;
COMMENT ON FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
IS 'a comment';

View File

@@ -6,7 +6,7 @@ CREATE FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OPTIONS (opt1 'val1');
ALTER FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OWNER TO postgres;
OWNER TO <OWNER>;
COMMENT ON FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
IS 'a comment';

View File

@@ -6,7 +6,7 @@ CREATE FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OPTIONS (opt1 'val1', opt2 'val2');
ALTER FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OWNER TO postgres;
OWNER TO <OWNER>;
COMMENT ON FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
IS 'a comment';

View File

@@ -7,7 +7,7 @@ CREATE FOREIGN DATA WRAPPER "Fdw2_$%{}[]()&*^!@""'`\/#"
OPTIONS (opt1 'val1');
ALTER FOREIGN DATA WRAPPER "Fdw2_$%{}[]()&*^!@""'`\/#"
OWNER TO postgres;
OWNER TO <OWNER>;
COMMENT ON FOREIGN DATA WRAPPER "Fdw2_$%{}[]()&*^!@""'`\/#"
IS 'a comment';

View File

@@ -7,7 +7,7 @@ CREATE FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OPTIONS (opt1 'val1');
ALTER FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OWNER TO postgres;
OWNER TO <OWNER>;
COMMENT ON FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
IS 'a comment';

View File

@@ -5,4 +5,4 @@
CREATE FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#";
ALTER FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OWNER TO postgres;
OWNER TO <OWNER>;

View File

@@ -7,7 +7,6 @@
"sql_endpoint": "NODE-foreign_data_wrapper.sql_id",
"data": {
"name": "Fdw1_$%{}[]()&*^!@\"'`\\/#",
"fdwowner": "postgres",
"fdwacl": [],
"fdwoptions": []
},

View File

@@ -6,7 +6,7 @@ CREATE FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OPTIONS (opt1 'val1', opt2 'valopt2');
ALTER FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OWNER TO postgres;
OWNER TO <OWNER>;
COMMENT ON FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
IS 'a comment';

View File

@@ -5,7 +5,7 @@
CREATE FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#";
ALTER FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OWNER TO postgres;
OWNER TO <OWNER>;
COMMENT ON FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
IS 'a comment';

View File

@@ -6,7 +6,7 @@ CREATE FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OPTIONS (opt1 'val1');
ALTER FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OWNER TO postgres;
OWNER TO <OWNER>;
COMMENT ON FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
IS 'a comment';

View File

@@ -6,7 +6,7 @@ CREATE FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OPTIONS (opt1 'val1', opt2 'val2');
ALTER FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OWNER TO postgres;
OWNER TO <OWNER>;
COMMENT ON FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
IS 'a comment';

View File

@@ -7,7 +7,7 @@ CREATE FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""`\/#"
OPTIONS (opt1 'val1');
ALTER FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OWNER TO postgres;
OWNER TO <OWNER>;
COMMENT ON FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
IS 'a comment';

View File

@@ -7,7 +7,7 @@ CREATE FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OPTIONS (opt1 'val1');
ALTER FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OWNER TO postgres;
OWNER TO <OWNER>;
COMMENT ON FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
IS 'a comment';

View File

@@ -5,4 +5,4 @@
CREATE FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#";
ALTER FOREIGN DATA WRAPPER "Fdw1_$%{}[]()&*^!@""'`\/#"
OWNER TO postgres;
OWNER TO <OWNER>;

View File

@@ -7,7 +7,6 @@
"sql_endpoint": "NODE-foreign_data_wrapper.sql_id",
"data": {
"name": "Fdw1_$%{}[]()&*^!@\"'`\\/#",
"fdwowner": "postgres",
"fdwacl": [],
"fdwoptions": []
},

View File

@@ -0,0 +1,11 @@
-- Language: Lan1_$%{}[]()&*^!@"'`\/#
-- DROP LANGUAGE "Lan1_$%{}[]()&*^!@""'`\/#"
CREATE TRUSTED PROCEDURAL LANGUAGE "Lan1_$%{}[]()&*^!@""'`\/#"
HANDLER plpgsql_call_handler
INLINE plpgsql_inline_handler
VALIDATOR plpgsql_validator;
ALTER LANGUAGE "Lan1_$%{}[]()&*^!@""'`\/#"
OWNER TO <OWNER>;

View File

@@ -0,0 +1,11 @@
-- Language: Lan1_$%{}[]()&*^!@"'`\/#
-- DROP LANGUAGE "Lan1_$%{}[]()&*^!@""'`\/#"
CREATE TRUSTED PROCEDURAL LANGUAGE "Lan1_$%{}[]()&*^!@""'`\/#"
HANDLER plpgsql_call_handler
INLINE plpgsql_inline_handler
VALIDATOR plpgsql_validator;
ALTER LANGUAGE "Lan1_$%{}[]()&*^!@""'`\/#"
OWNER TO <OWNER>;

View File

@@ -0,0 +1,43 @@
{
"scenarios": [
{
"type": "create",
"name": "Create Resource groups",
"endpoint": "NODE-resource_group.obj",
"sql_endpoint": "NODE-resource_group.sql_id",
"data": {
"name": "Rg1_$%{}[]()&*^!@\"'`\\/#",
"cpu_rate_limit": 0,
"dirty_rate_limit": 0
},
"expected_sql_file": "create_resource_group.sql"
},
{
"type": "alter",
"name": "Alter Resource groups name",
"endpoint": "NODE-resource_group.obj_id",
"sql_endpoint": "NODE-resource_group.sql_id",
"data": {
"name": "Rg2_$%{}[]()&*^!@\"'`\\/#"
},
"expected_sql_file": "alter_resource_group_name.sql"
},
{
"type": "alter",
"name": "Alter Resource groups options",
"endpoint": "NODE-resource_group.obj_id",
"sql_endpoint": "NODE-resource_group.sql_id",
"data": {
"cpu_rate_limit": 1,
"dirty_rate_limit": 5
},
"expected_sql_file": "alter_resource_group_options.sql"
},
{
"type": "delete",
"name": "Drop Resource groups",
"endpoint": "NODE-resource_group.obj_id",
"data": {}
}
]
}

View File

@@ -9,7 +9,7 @@
from __future__ import print_function
import json
import os
import sys
import traceback
from flask import url_for
import regression
from regression import parent_node_dict
@@ -52,6 +52,10 @@ class ReverseEngineeredSQLTestCases(BaseTestGenerator):
('Reverse Engineered SQL Test Cases', dict())
]
@classmethod
def setUpClass(cls):
cls.maxDiff = None
def setUp(self):
# Get the database connection
self.db_con = database_utils.connect_database(
@@ -65,6 +69,8 @@ class ReverseEngineeredSQLTestCases(BaseTestGenerator):
# Get the application path
self.apppath = os.getcwd()
# Status of the test case
self.final_test_status = True
def runTest(self):
""" Create the module list on which reverse engineering sql test
@@ -72,9 +78,9 @@ class ReverseEngineeredSQLTestCases(BaseTestGenerator):
# Schema ID placeholder in JSON file which needs to be replaced
# while running the test cases
self.JSON_PLACEHOLDERS = {'schema_id': '<SCHEMA_ID>'}
self.JSON_PLACEHOLDERS = {'schema_id': '<SCHEMA_ID>',
'owner': '<OWNER>'}
server_info = self.server_information
resql_module_list = create_resql_module_list(
BaseTestGenerator.re_sql_module_list,
BaseTestGenerator.exclude_pkgs)
@@ -98,6 +104,9 @@ class ReverseEngineeredSQLTestCases(BaseTestGenerator):
for key, scenarios in data.items():
self.execute_test_case(scenarios)
# Check the final status of the test case
self.assertEqual(self.final_test_status, True)
def tearDown(self):
database_utils.disconnect_database(
self, self.server_information['server_id'],
@@ -160,8 +169,6 @@ class ReverseEngineeredSQLTestCases(BaseTestGenerator):
print("\n")
for scenario in scenarios:
print(scenario['name'])
if 'type' in scenario and scenario['type'] == 'create':
# Get the url and create the specific node.
@@ -193,30 +200,66 @@ class ReverseEngineeredSQLTestCases(BaseTestGenerator):
response = self.tester.post(create_url,
data=json.dumps(scenario['data']),
content_type='html/json')
try:
self.assertEquals(response.status_code, 200)
except Exception as e:
self.final_test_status = False
print(scenario['name'] + "..............FAIL")
traceback.print_exc()
continue
resp_data = json.loads(response.data.decode('utf8'))
object_id = resp_data['node']['_id']
# Compare the reverse engineering SQL
self.check_re_sql(scenario, object_id)
if not self.check_re_sql(scenario, object_id):
print_msg = scenario['name']
if 'expected_sql_file' in scenario:
print_msg = print_msg + " Expected SQL File:" + \
scenario['expected_sql_file']
print_msg = print_msg + " ..............FAIL"
print(print_msg)
continue
elif 'type' in scenario and scenario['type'] == 'alter':
# Get the url and create the specific node.
alter_url = self.get_url(scenario['endpoint'], object_id)
response = self.tester.put(alter_url,
data=json.dumps(scenario['data']),
follow_redirects=True)
try:
self.assertEquals(response.status_code, 200)
except Exception as e:
self.final_test_status = False
print(scenario['name'] + "..............FAIL")
traceback.print_exc()
continue
resp_data = json.loads(response.data.decode('utf8'))
object_id = resp_data['node']['_id']
# Compare the reverse engineering SQL
self.check_re_sql(scenario, object_id)
if not self.check_re_sql(scenario, object_id):
print_msg = scenario['name']
if 'expected_sql_file' in scenario:
print_msg = print_msg + " Expected SQL File:" + \
scenario['expected_sql_file']
print_msg = print_msg + " ..............FAIL"
print(print_msg)
continue
elif 'type' in scenario and scenario['type'] == 'delete':
# Get the delete url and delete the object created above.
delete_url = self.get_url(scenario['endpoint'], object_id)
delete_response = self.tester.delete(delete_url,
follow_redirects=True)
try:
self.assertEquals(delete_response.status_code, 200)
except Exception as e:
self.final_test_status = False
print(scenario['name'] + "..............FAIL")
traceback.print_exc()
continue
print(scenario['name'] + "..............OK")
def get_test_folder(self, module_path):
"""
@@ -260,7 +303,13 @@ class ReverseEngineeredSQLTestCases(BaseTestGenerator):
"""
sql_url = self.get_url(scenario['sql_endpoint'], object_id)
response = self.tester.get(sql_url)
try:
self.assertEquals(response.status_code, 200)
except Exception as e:
self.final_test_status = False
traceback.print_exc()
return False
resp_sql = response.data.decode('unicode_escape')
# Remove first and last double quotes
@@ -278,8 +327,36 @@ class ReverseEngineeredSQLTestCases(BaseTestGenerator):
fp = open(output_file, "r")
# Used rstrip to remove trailing \n
sql = fp.read().rstrip()
# Replace place holder <owner> with the current username
# used to connect to the database
if 'username' in self.server:
sql = sql.replace(self.JSON_PLACEHOLDERS['owner'],
self.server['username'])
try:
self.assertEquals(sql, resp_sql)
except Exception as e:
self.final_test_status = False
traceback.print_exc()
return False
else:
try:
self.assertFalse("Expected SQL File not found")
except Exception as e:
self.final_test_status = False
traceback.print_exc()
return False
elif 'expected_sql' in scenario:
self.assertEquals(scenario['expected_sql'], resp_sql)
exp_sql = scenario['expected_sql']
# Replace place holder <owner> with the current username
# used to connect to the database
if 'username' in self.server:
exp_sql = exp_sql.replace(self.JSON_PLACEHOLDERS['owner'],
self.server['username'])
try:
self.assertEquals(exp_sql, resp_sql)
except Exception as e:
self.final_test_status = False
traceback.print_exc()
return False
return True